diff -Nru showq-0.4.1+git20090622/debian/changelog showq-0.4.1+git20090622+dfsg0/debian/changelog --- showq-0.4.1+git20090622/debian/changelog 2011-12-03 04:21:05.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/debian/changelog 2012-01-10 13:41:33.000000000 +0000 @@ -1,8 +1,11 @@ -showq (0.4.1+git20090622-1build1) precise; urgency=low +showq (0.4.1+git20090622+dfsg0-1) unstable; urgency=low - * No-change rebuild to drop spurious libsfgcc1 dependency on armhf. + * Team upload. + * Repack upstream tarball to get rid of the waf binary (Closes: #654502). + * debian/rules: Properly prune *.pyc files. + * debian/watch: Mangle debian's versioning. - -- Adam Conrad Fri, 02 Dec 2011 21:21:05 -0700 + -- Alessio Treglia Tue, 10 Jan 2012 14:41:29 +0100 showq (0.4.1+git20090622-1) unstable; urgency=low diff -Nru showq-0.4.1+git20090622/debian/rules showq-0.4.1+git20090622+dfsg0/debian/rules --- showq-0.4.1+git20090622/debian/rules 2011-04-12 21:04:36.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/debian/rules 2012-01-10 13:39:20.000000000 +0000 @@ -17,7 +17,7 @@ override_dh_auto_clean: $(WAF) distclean - rm -f autowaf.pyc + find -name "*.pyc" -delete dh_auto_clean override_dh_auto_install: diff -Nru showq-0.4.1+git20090622/debian/watch showq-0.4.1+git20090622+dfsg0/debian/watch --- showq-0.4.1+git20090622/debian/watch 2011-04-11 12:34:22.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/debian/watch 2012-01-10 13:39:51.000000000 +0000 @@ -1,2 +1,3 @@ version=3 +opts=dversionmangle=s/.dfsg[0-9]*// \ http://download.berlios.de/showq/showq(.*)\.tar\.gz Binary files /tmp/kNa0uXfJDV/showq-0.4.1+git20090622/waf and /tmp/0hljJABKlo/showq-0.4.1+git20090622+dfsg0/waf differ diff -Nru showq-0.4.1+git20090622/wafadmin/Build.py showq-0.4.1+git20090622+dfsg0/wafadmin/Build.py --- showq-0.4.1+git20090622/wafadmin/Build.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Build.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,618 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import os,sys,errno,re,glob,gc,datetime,shutil +try:import cPickle +except:import pickle as cPickle +import Runner,TaskGen,Node,Scripting,Utils,Environment,Task,Logs,Options +from Logs import debug,error,info +from Constants import* +SAVED_ATTRS='root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split() +bld=None +class BuildError(Utils.WafError): + def __init__(self,b=None,t=[]): + self.bld=b + self.tasks=t + self.ret=1 + Utils.WafError.__init__(self,self.format_error()) + def format_error(self): + lst=['Build failed'] + for tsk in self.tasks: + txt=tsk.format_error() + if txt:lst.append(txt) + return'\n'.join(lst) +class BuildContext(Utils.Context): + def __init__(self): + global bld + bld=self + self.task_manager=Task.TaskManager() + self.id_nodes=0 + self.idx={} + self.all_envs={} + self.bdir='' + self.path=None + self.deps_man=Utils.DefaultDict(list) + self.cache_node_abspath={} + self.cache_scanned_folders={} + self.uninstall=[] + for v in'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split(): + var={} + setattr(self,v,var) + self.cache_dir_contents={} + self.all_task_gen=[] + self.task_gen_cache_names={} + self.cache_sig_vars={} + self.log=None + self.root=None + self.srcnode=None + self.bldnode=None + class node_class(Node.Node): + pass + self.node_class=node_class + self.node_class.__module__="Node" + self.node_class.__name__="Nodu" + self.node_class.bld=self + self.is_install=None + def __copy__(self): + raise Utils.WafError('build contexts are not supposed to be cloned') + def load(self): + try: + env=Environment.Environment(os.path.join(self.cachedir,'build.config.py')) + except(IOError,OSError): + pass + else: + if env['version']1:raise + if data: + for x in SAVED_ATTRS:setattr(self,x,data[x]) + else: + debug('build: Build cache loading failed') + finally: + if f:f.close() + gc.enable() + def save(self): + gc.disable() + self.root.__class__.bld=None + Node.Nodu=self.node_class + db=os.path.join(self.bdir,DBFILE) + file=open(db+'.tmp','wb') + data={} + for x in SAVED_ATTRS:data[x]=getattr(self,x) + cPickle.dump(data,file,-1) + file.close() + try:os.unlink(db) + except OSError:pass + os.rename(db+'.tmp',db) + self.root.__class__.bld=self + gc.enable() + def clean(self): + debug('build: clean called') + precious=set([]) + for env in self.all_envs.values(): + for x in env[CFG_FILES]: + node=self.srcnode.find_resource(x) + if node: + precious.add(node.id) + def clean_rec(node): + for x in node.childs.keys(): + nd=node.childs[x] + tp=nd.id&3 + if tp==Node.DIR: + clean_rec(nd) + elif tp==Node.BUILD: + if nd.id in precious:continue + for env in self.all_envs.values(): + try:os.remove(nd.abspath(env)) + except OSError:pass + node.childs.__delitem__(x) + clean_rec(self.srcnode) + for v in'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split(): + setattr(self,v,{}) + def compile(self): + debug('build: compile called') + self.flush() + self.generator=Runner.Parallel(self,Options.options.jobs) + def dw(on=True): + if Options.options.progress_bar: + if on:sys.stderr.write(Logs.colors.cursor_on) + else:sys.stderr.write(Logs.colors.cursor_off) + debug('build: executor starting') + back=os.getcwd() + os.chdir(self.bldnode.abspath()) + try: + try: + dw(on=False) + self.generator.start() + except KeyboardInterrupt: + dw() + if self.generator.consumers: + self.save() + raise + except Exception: + dw() + raise + else: + dw() + if self.generator.consumers: + self.save() + if self.generator.error: + raise BuildError(self,self.task_manager.tasks_done) + finally: + os.chdir(back) + def install(self): + debug('build: install called') + self.flush() + if self.is_install<0: + lst=[] + for x in self.uninstall: + dir=os.path.dirname(x) + if not dir in lst:lst.append(dir) + lst.sort() + lst.reverse() + nlst=[] + for y in lst: + x=y + while len(x)>4: + if not x in nlst:nlst.append(x) + x=os.path.dirname(x) + nlst.sort() + nlst.reverse() + for x in nlst: + try:os.rmdir(x) + except OSError:pass + def new_task_gen(self,*k,**kw): + kw['bld']=self + if len(k)==0: + ret=TaskGen.task_gen(*k,**kw) + else: + cls_name=k[0] + try:cls=TaskGen.task_gen.classes[cls_name] + except KeyError:raise Utils.WscriptError('%s is not a valid task generator -> %s'%(cls_name,[x for x in TaskGen.task_gen.classes])) + ret=cls(*k,**kw) + return ret + def load_envs(self): + try: + lst=Utils.listdir(self.cachedir) + except OSError,e: + if e.errno==errno.ENOENT: + raise Utils.WafError('The project was not configured: run "waf configure" first!') + else: + raise + if not lst: + raise Utils.WafError('The cache directory is empty: reconfigure the project') + for file in lst: + if file.endswith(CACHE_SUFFIX): + env=Environment.Environment(os.path.join(self.cachedir,file)) + name=file[:-len(CACHE_SUFFIX)] + self.all_envs[name]=env + self.init_variants() + for env in self.all_envs.values(): + for f in env[CFG_FILES]: + newnode=self.path.find_or_declare(f) + try: + hash=Utils.h_file(newnode.abspath(env)) + except(IOError,AttributeError): + error("cannot find "+f) + hash=SIG_NIL + self.node_sigs[env.variant()][newnode.id]=hash + self.bldnode=self.root.find_dir(self.bldnode.abspath()) + self.path=self.srcnode=self.root.find_dir(self.srcnode.abspath()) + self.cwd=self.bldnode.abspath() + def setup(self,tool,tooldir=None,funs=None): + if isinstance(tool,list): + for i in tool:self.setup(i,tooldir) + return + if not tooldir:tooldir=Options.tooldir + file=None + module=Utils.load_tool(tool,tooldir) + if hasattr(module,"setup"):module.setup(self) + if file:file.close() + def init_variants(self): + debug('build: init variants') + lstvariants=[] + for env in self.all_envs.values(): + if not env.variant()in lstvariants: + lstvariants.append(env.variant()) + self.lst_variants=lstvariants + debug('build: list of variants is %r'%lstvariants) + for name in lstvariants+[0]: + for v in'node_sigs cache_node_abspath'.split(): + var=getattr(self,v) + if not name in var: + var[name]={} + def load_dirs(self,srcdir,blddir,load_cache=1): + assert(os.path.isabs(srcdir)) + assert(os.path.isabs(blddir)) + self.cachedir=os.path.join(blddir,CACHE_DIR) + if srcdir==blddir: + raise Utils.WafError("build dir must be different from srcdir: %s <-> %s "%(srcdir,blddir)) + self.bdir=blddir + self.load() + if not self.root: + Node.Nodu=self.node_class + self.root=Node.Nodu('',None,Node.DIR) + if not self.srcnode: + self.srcnode=self.root.ensure_dir_node_from_path(srcdir) + debug('build: srcnode is %s and srcdir %s'%(self.srcnode.name,srcdir)) + self.path=self.srcnode + try:os.makedirs(blddir) + except OSError:pass + if not self.bldnode: + self.bldnode=self.root.ensure_dir_node_from_path(blddir) + self.init_variants() + def rescan(self,src_dir_node): + if self.cache_scanned_folders.get(src_dir_node.id,None):return + self.cache_scanned_folders[src_dir_node.id]=1 + if hasattr(self,'repository'):self.repository(src_dir_node) + if sys.platform=="win32"and not src_dir_node.name: + return + self.listdir_src(src_dir_node) + h1=self.srcnode.height() + h2=src_dir_node.height() + lst=[] + child=src_dir_node + while h2>h1: + lst.append(child.name) + child=child.parent + h2-=1 + lst.reverse() + for variant in self.lst_variants: + sub_path=os.path.join(self.bldnode.abspath(),variant,*lst) + try: + self.listdir_bld(src_dir_node,sub_path,variant) + except OSError: + dict=self.node_sigs[variant] + for node in src_dir_node.childs.values(): + if node.id in dict: + dict.__delitem__(node.id) + if node.id!=self.bldnode.id: + src_dir_node.childs.__delitem__(node.name) + os.makedirs(sub_path) + def listdir_src(self,parent_node): + parent_path=parent_node.abspath() + try: + lst=set(Utils.listdir(parent_path)) + except OSError: + if not parent_node.childs: + raise + for x in parent_node.childs.values(): + if x.id&3==Node.FILE: + raise + lst=set([]) + self.cache_dir_contents[parent_node.id]=lst + debug('build: folder contents %r'%lst) + node_names=set([x.name for x in parent_node.childs.values()if x.id&3 in(Node.FILE,Node.DIR)]) + cache=self.node_sigs[0] + to_keep=lst&node_names + for x in to_keep: + node=parent_node.childs[x] + if node.id&3==Node.DIR:continue + try: + cache[node.id]=Utils.h_file(parent_path+os.sep+node.name) + except IOError: + raise Utils.WafError("The file %s is not readable or has become a dir"%node.abspath()) + to_remove=node_names-lst + if to_remove: + for name in to_remove: + nd=parent_node.childs[name] + if nd.id&3==Node.DIR: + for x in nd.childs.values(): + if x.id&3==Node.FILE: + break + else: + continue + self.remove_node(nd) + def remove_node(self,node): + if node.id&3==Node.DIR: + for x in node.childs.values(): + self.remove_node(x) + if node.id!=self.bldnode.id: + node.parent.childs.__delitem__(node.name) + elif node.id&3==Node.FILE: + if node.id in self.node_sigs[0]: + self.node_sigs[0].__delitem__(node.id) + node.parent.childs.__delitem__(node.name) + else: + for variant in self.lst_variants: + if node.id in self.node_sigs[variant]: + self.node_sigs[variant].__delitem__(node.id) + node.parent.childs.__delitem__(node.name) + def listdir_bld(self,parent_node,path,variant): + i_existing_nodes=[x for x in parent_node.childs.values()if x.id&3==Node.BUILD] + lst=set(Utils.listdir(path)) + node_names=set([x.name for x in i_existing_nodes]) + remove_names=node_names-lst + ids_to_remove=[x.id for x in i_existing_nodes if x.name in remove_names] + cache=self.node_sigs[variant] + for nid in ids_to_remove: + if nid in cache: + cache.__delitem__(nid) + def get_env(self): + return self.env_of_name('default') + def set_env(self,name,val): + self.all_envs[name]=val + env=property(get_env,set_env) + def add_manual_dependency(self,path,value): + if isinstance(path,Node.Node): + node=path + elif os.path.isabs(path): + node=self.root.find_resource(path) + else: + node=self.path.find_resource(path) + self.deps_man[node.id].append(value) + def launch_node(self): + try: + return self.p_ln + except AttributeError: + self.p_ln=self.root.find_dir(Options.launch_dir) + return self.p_ln + def glob(self,pattern,relative=True): + path=self.path.abspath() + files=[self.root.find_resource(x)for x in glob.glob(path+os.sep+pattern)] + if relative: + files=[x.path_to_parent(self.path)for x in files if x] + else: + files=[x.abspath()for x in files if x] + return files + def add_group(self,*k): + self.task_manager.add_group(*k) + def set_group(self,*k,**kw): + self.task_manager.set_group(*k,**kw) + def hash_env_vars(self,env,vars_lst): + idx=str(id(env))+str(vars_lst) + try:return self.cache_sig_vars[idx] + except KeyError:pass + lst=[str(env[a])for a in vars_lst] + ret=Utils.h_list(lst) + debug("envhash: %r %r"%(ret,lst)) + self.cache_sig_vars[idx]=ret + return ret + def name_to_obj(self,name,env): + cache=self.task_gen_cache_names + if not cache: + for x in self.all_task_gen: + vt=x.env.variant()+'_' + if x.name: + cache[vt+x.name]=x + else: + if isinstance(x.target,str): + target=x.target + else: + target=' '.join(x.target) + v=vt+target + if not cache.get(v,None): + cache[v]=x + return cache.get(env.variant()+'_'+name,None) + def flush(self,all=1): + self.ini=datetime.datetime.now() + self.task_gen_cache_names={} + self.name_to_obj('',self.env) + debug('build: delayed operation TaskGen.flush() called') + if Options.options.compile_targets: + debug('task_gen: posting objects listed in compile_targets') + target_objects=Utils.DefaultDict(list) + for target_name in Options.options.compile_targets.split(','): + target_name=target_name.strip() + for env in self.all_envs.values(): + obj=self.name_to_obj(target_name,env) + if obj: + target_objects[target_name].append(obj) + if not target_name in target_objects and all: + raise Utils.WafError("target '%s' does not exist"%target_name) + to_compile=[] + for x in target_objects.values(): + for y in x: + to_compile.append(id(y)) + for i in xrange(len(self.task_manager.groups)): + g=self.task_manager.groups[i] + self.task_manager.current_group=i + for tg in g.tasks_gen: + if id(tg)in to_compile: + tg.post() + else: + debug('task_gen: posting objects (normal)') + ln=self.launch_node() + if ln.is_child_of(self.bldnode)or not ln.is_child_of(self.srcnode): + ln=self.srcnode + proj_node=self.root.find_dir(os.path.split(Utils.g_module.root_path)[0]) + if proj_node.id!=self.srcnode.id: + ln=self.srcnode + for i in xrange(len(self.task_manager.groups)): + g=self.task_manager.groups[i] + self.task_manager.current_group=i + for tg in g.tasks_gen: + if not tg.path.is_child_of(ln): + continue + tg.post() + def env_of_name(self,name): + try: + return self.all_envs[name] + except KeyError: + error('no such environment: '+name) + return None + def progress_line(self,state,total,col1,col2): + n=len(str(total)) + Utils.rot_idx+=1 + ind=Utils.rot_chr[Utils.rot_idx%4] + ini=self.ini + pc=(100.*state)/total + eta=Utils.get_elapsed_time(ini) + fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind) + left=fs%(state,total,col1,pc,col2) + right='][%s%s%s]'%(col1,eta,col2) + cols=Utils.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2) + if cols<7:cols=7 + ratio=int((cols*state)/total)-1 + bar=('='*ratio+'>').ljust(cols) + msg=Utils.indicator%(left,bar,right) + return msg + def do_install(self,src,tgt,chmod=O644): + if self.is_install>0: + if not Options.options.force: + try: + st1=os.stat(tgt) + st2=os.stat(src) + except OSError: + pass + else: + if st1.st_mtime>=st2.st_mtime and st1.st_size==st2.st_size: + return False + srclbl=src.replace(self.srcnode.abspath(None)+os.sep,'') + info("* installing %s as %s"%(srclbl,tgt)) + try:os.remove(tgt) + except OSError:pass + try: + shutil.copy2(src,tgt) + os.chmod(tgt,chmod) + except IOError: + try: + os.stat(src) + except(OSError,IOError): + error('File %r does not exist'%src) + raise Utils.WafError('Could not install the file %r'%tgt) + return True + elif self.is_install<0: + info("* uninstalling %s"%tgt) + self.uninstall.append(tgt) + try:os.remove(tgt) + except OSError:pass + return True + def get_install_path(self,path,env=None): + if not env:env=self.env + destdir=env.get_destdir() + path=path.replace('/',os.sep) + destpath=Utils.subst_vars(path,env) + if destdir: + destpath=os.path.join(destdir,destpath.lstrip(os.sep)) + return destpath + def install_files(self,path,files,env=None,chmod=O644,relative_trick=False): + if env: + assert isinstance(env,Environment.Environment),"invalid parameter" + if not self.is_install:return[] + if not path:return[] + node=self.path + if isinstance(files,str)and'*'in files: + gl=node.abspath()+os.sep+files + lst=glob.glob(gl) + else: + lst=Utils.to_list(files) + env=env or self.env + destpath=self.get_install_path(path,env) + Utils.check_dir(destpath) + installed_files=[] + for filename in lst: + if not os.path.isabs(filename): + nd=node.find_resource(filename) + if not nd: + raise Utils.WafError("Unable to install the file `%s': not found in %s"%(filename,node)) + if relative_trick: + destfile=os.path.join(destpath,filename) + Utils.check_dir(os.path.dirname(destfile)) + else: + destfile=os.path.join(destpath,nd.name) + filename=nd.abspath(env) + else: + alst=Utils.split_path(filename) + destfile=os.path.join(destpath,alst[-1]) + if self.do_install(filename,destfile,chmod): + installed_files.append(destfile) + return installed_files + def install_as(self,path,srcfile,env=None,chmod=O644): + if env: + assert isinstance(env,Environment.Environment),"invalid parameter" + if not self.is_install:return False + if not path:return False + if not env:env=self.env + node=self.path + destpath=self.get_install_path(path,env) + dir,name=os.path.split(destpath) + Utils.check_dir(dir) + if not os.path.isabs(srcfile): + filenode=node.find_resource(srcfile) + src=filenode.abspath(env) + else: + src=srcfile + return self.do_install(src,destpath,chmod) + def symlink_as(self,path,src,env=None): + if not self.is_install:return + if not path:return + tgt=self.get_install_path(path,env) + dir,name=os.path.split(tgt) + Utils.check_dir(dir) + if self.is_install>0: + link=False + if not os.path.islink(tgt): + link=True + elif os.readlink(tgt)!=src: + link=True + try:os.remove(tgt) + except OSError:pass + if link: + info('* symlink %s (-> %s)'%(tgt,src)) + os.symlink(src,tgt) + return 0 + else: + try: + info("* removing %s"%(tgt)) + os.remove(tgt) + return 0 + except OSError: + return 1 + def exec_command(self,cmd,**kw): + debug('runner: system command -> %s'%cmd) + if self.log: + self.log.write('%s\n'%cmd) + kw['log']=self.log + try: + if not kw.get('cwd',None): + kw['cwd']=self.cwd + except AttributeError: + self.cwd=kw['cwd']=self.bldnode.abspath() + return Utils.exec_command(cmd,**kw) + def printout(self,s): + f=self.log or sys.stderr + f.write(s) + f.flush() + def add_subdirs(self,dirs): + self.recurse(dirs,'build') + def pre_recurse(self,name_or_mod,path,nexdir): + if not hasattr(self,'oldpath'): + self.oldpath=[] + self.oldpath.append(self.path) + self.path=self.root.find_dir(nexdir) + return{'bld':self,'ctx':self} + def post_recurse(self,name_or_mod,path,nexdir): + self.path=self.oldpath.pop() + def pre_build(self): + if hasattr(self,'pre_funs'): + for m in self.pre_funs: + m(self) + def post_build(self): + if hasattr(self,'post_funs'): + for m in self.post_funs: + m(self) + def add_pre_fun(self,meth): + try:self.pre_funs.append(meth) + except AttributeError:self.pre_funs=[meth] + def add_post_fun(self,meth): + try:self.post_funs.append(meth) + except AttributeError:self.post_funs=[meth] + def use_the_magic(self): + Task.algotype=Task.MAXPARALLEL + Task.file_deps=Task.extract_deps + diff -Nru showq-0.4.1+git20090622/wafadmin/Configure.py showq-0.4.1+git20090622+dfsg0/wafadmin/Configure.py --- showq-0.4.1+git20090622/wafadmin/Configure.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Configure.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,209 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,shlex,sys,time +try:import cPickle +except ImportError:import pickle as cPickle +import Environment,Utils,Options +from Logs import warn +from Constants import* +conf_template='''# project %(app)s configured on %(now)s by +# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) +# using %(args)s +# +''' +class ConfigurationError(Utils.WscriptError): + pass +autoconfig=False +def find_file(filename,path_list): + for directory in Utils.to_list(path_list): + if os.path.exists(os.path.join(directory,filename)): + return directory + return'' +def find_program_impl(env,filename,path_list=[],var=None,environ=None): + if not environ: + environ=os.environ + try:path_list=path_list.split() + except AttributeError:pass + if var: + if var in environ:env[var]=environ[var] + if env[var]:return env[var] + if not path_list:path_list=environ['PATH'].split(os.pathsep) + ext=(Options.platform=='win32')and'.exe,.com,.bat,.cmd'or'' + for y in[filename+x for x in ext.split(',')]: + for directory in path_list: + x=os.path.join(directory,y) + if os.path.isfile(x): + if var:env[var]=x + return x + return'' +class ConfigurationContext(Utils.Context): + tests={} + error_handlers=[] + def __init__(self,env=None,blddir='',srcdir=''): + self.env=None + self.envname='' + self.environ=dict(os.environ) + self.line_just=40 + self.blddir=blddir + self.srcdir=srcdir + self.all_envs={} + self.cwd=self.curdir=os.getcwd() + self.tools=[] + self.setenv(DEFAULT) + self.lastprog='' + self.hash=0 + self.files=[] + self.tool_cache=[] + if self.blddir: + self.post_init() + def post_init(self): + self.cachedir=os.path.join(self.blddir,CACHE_DIR) + path=os.path.join(self.blddir,WAF_CONFIG_LOG) + try:os.unlink(path) + except(OSError,IOError):pass + self.log=open(path,'w') + app=getattr(Utils.g_module,'APPNAME','') + if app: + ver=getattr(Utils.g_module,'VERSION','') + if ver: + app="%s (%s)"%(app,ver) + now=time.ctime() + pyver=sys.hexversion + systype=sys.platform + args=" ".join(sys.argv) + wafver=WAFVERSION + abi=ABI + self.log.write(conf_template%vars()) + def __del__(self): + if hasattr(self,'log')and self.log: + self.log.close() + def fatal(self,msg): + raise ConfigurationError(msg) + def check_tool(self,input,tooldir=None,funs=None): + tools=Utils.to_list(input) + if tooldir:tooldir=Utils.to_list(tooldir) + for tool in tools: + tool=tool.replace('++','xx') + mag=(tool,id(self.env),funs) + if mag in self.tool_cache: + continue + self.tool_cache.append(mag) + module=Utils.load_tool(tool,tooldir) + func=getattr(module,'detect',None) + if func: + if type(func)is type(find_file):func(self) + else:self.eval_rules(funs or func) + self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs}) + def sub_config(self,k): + self.recurse(k,name='configure') + def pre_recurse(self,name_or_mod,path,nexdir): + return{'conf':self,'ctx':self} + def post_recurse(self,name_or_mod,path,nexdir): + if not autoconfig: + return + self.hash=hash((self.hash,getattr(name_or_mod,'waf_hash_val',name_or_mod))) + self.files.append(path) + def store(self,file=''): + if not os.path.isdir(self.cachedir): + os.makedirs(self.cachedir) + if not file: + file=open(os.path.join(self.cachedir,'build.config.py'),'w') + file.write('version = 0x%x\n'%HEXVERSION) + file.write('tools = %r\n'%self.tools) + file.close() + if not self.all_envs: + self.fatal('nothing to store in the configuration context!') + for key in self.all_envs: + tmpenv=self.all_envs[key] + tmpenv.store(os.path.join(self.cachedir,key+CACHE_SUFFIX)) + def set_env_name(self,name,env): + self.all_envs[name]=env + return env + def retrieve(self,name,fromenv=None): + try: + env=self.all_envs[name] + except KeyError: + env=Environment.Environment() + env['PREFIX']=os.path.abspath(os.path.expanduser(Options.options.prefix)) + self.all_envs[name]=env + else: + if fromenv:warn("The environment %s may have been configured already"%name) + return env + def setenv(self,name): + self.env=self.retrieve(name) + self.envname=name + def add_os_flags(self,var,dest=None): + try:self.env.append_value(dest or var,Utils.to_list(self.environ[var])) + except KeyError:pass + def check_message_1(self,sr): + self.line_just=max(self.line_just,len(sr)) + self.log.write(sr+'\n\n') + Utils.pprint('NORMAL',"%s :"%sr.ljust(self.line_just),sep='') + def check_message_2(self,sr,color='GREEN'): + Utils.pprint(color,sr) + def check_message(self,th,msg,state,option=''): + sr='Checking for %s %s'%(th,msg) + self.check_message_1(sr) + p=self.check_message_2 + if state:p('ok '+option) + else:p('not found','YELLOW') + def check_message_custom(self,th,msg,custom,option='',color='PINK'): + sr='Checking for %s %s'%(th,msg) + self.check_message_1(sr) + self.check_message_2(custom,color) + def find_program(self,filename,path_list=[],var=None,mandatory=False): + ret=find_program_impl(self.env,filename,path_list,var,environ=self.environ) + self.check_message('program',filename,ret,ret) + self.log.write('find program=%r paths=%r var=%r -> %r\n\n'%(filename,path_list,var,ret)) + if not ret and mandatory: + self.fatal('The program %s could not be found'%filename) + return ret + def cmd_to_list(self,cmd): + if isinstance(cmd,str)and cmd.find(' '): + try: + os.stat(cmd) + except OSError: + return shlex.split(cmd) + else: + return[cmd] + return cmd + def __getattr__(self,name): + r=self.__class__.__dict__.get(name,None) + if r:return r + if name and name.startswith('require_'): + for k in['check_','find_']: + n=name.replace('require_',k) + ret=self.__class__.__dict__.get(n,None) + if ret: + def run(*k,**kw): + r=ret(self,*k,**kw) + if not r: + self.fatal('requirement failure') + return r + return run + self.fatal('No such method %r'%name) + def eval_rules(self,rules): + self.rules=Utils.to_list(rules) + for x in self.rules: + f=getattr(self,x) + if not f:self.fatal("No such method '%s'."%x) + try: + f() + except Exception,e: + ret=self.err_handler(x,e) + if ret==BREAK: + break + elif ret==CONTINUE: + continue + else: + self.fatal(e) + def err_handler(self,fun,error): + pass +def conf(f): + setattr(ConfigurationContext,f.__name__,f) + return f +def conftest(f): + ConfigurationContext.tests[f.__name__]=f + return conf(f) + diff -Nru showq-0.4.1+git20090622/wafadmin/Constants.py showq-0.4.1+git20090622+dfsg0/wafadmin/Constants.py --- showq-0.4.1+git20090622/wafadmin/Constants.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Constants.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,47 @@ +#! /usr/bin/env python +# encoding: utf-8 + +HEXVERSION=0x10506 +WAFVERSION="1.5.6" +WAFREVISION="6178M" +ABI=7 +O644=420 +O755=493 +MAXJOBS=99999999 +CACHE_DIR='c4che' +CACHE_SUFFIX='.cache.py' +DBFILE='.wafpickle-%d'%ABI +WSCRIPT_FILE='wscript' +WSCRIPT_BUILD_FILE='wscript_build' +WAF_CONFIG_LOG='config.log' +WAF_CONFIG_H='config.h' +SIG_NIL='iluvcuteoverload' +VARIANT='_VARIANT_' +DEFAULT='default' +SRCDIR='srcdir' +BLDDIR='blddir' +APPNAME='APPNAME' +VERSION='VERSION' +DEFINES='defines' +UNDEFINED=() +BREAK="break" +CONTINUE="continue" +JOBCONTROL="JOBCONTROL" +MAXPARALLEL="MAXPARALLEL" +NORMAL="NORMAL" +NOT_RUN=0 +MISSING=1 +CRASHED=2 +EXCEPTION=3 +SKIPPED=8 +SUCCESS=9 +ASK_LATER=-1 +SKIP_ME=-2 +RUN_ME=-3 +LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s" +HOUR_FORMAT="%H:%M:%S" +TEST_OK=True +CFG_FILES='cfg_files' +INSTALL=1337 +UNINSTALL=-1337 + diff -Nru showq-0.4.1+git20090622/wafadmin/Environment.py showq-0.4.1+git20090622+dfsg0/wafadmin/Environment.py --- showq-0.4.1+git20090622/wafadmin/Environment.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Environment.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,143 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import os,copy,re +import Logs,Options,Utils +from Constants import* +re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M) +class Environment(object): + __slots__=("table","parent") + def __init__(self,filename=None): + self.table={} + if filename: + self.load(filename) + def __contains__(self,key): + if key in self.table:return True + try:return self.parent.__contains__(key) + except AttributeError:return False + def __str__(self): + keys=set() + cur=self + while cur: + keys.update(cur.table.keys()) + cur=getattr(cur,'parent',None) + keys=list(keys) + keys.sort() + return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in keys]) + def __getitem__(self,key): + try: + while 1: + x=self.table.get(key,None) + if not x is None: + return x + self=self.parent + except AttributeError: + return[] + def __setitem__(self,key,value): + self.table[key]=value + def __delitem__(self,key,value): + del self.table[key] + def set_variant(self,name): + self.table[VARIANT]=name + def variant(self): + try: + while 1: + x=self.table.get(VARIANT,None) + if not x is None: + return x + self=self.parent + except AttributeError: + return DEFAULT + def copy(self): + newenv=Environment() + newenv.parent=self + return newenv + def get_flat(self,key): + s=self[key] + if isinstance(s,str):return s + return' '.join(s) + def _get_list_value_for_modification(self,key): + try: + value=self.table[key] + except KeyError: + try:value=self.parent[key] + except AttributeError:value=[] + if isinstance(value,list): + value=copy.copy(value) + else: + value=[value] + else: + if not isinstance(value,list): + value=[value] + self.table[key]=value + return value + def append_value(self,var,value): + current_value=self._get_list_value_for_modification(var) + if isinstance(value,list): + current_value.extend(value) + else: + current_value.append(value) + def prepend_value(self,var,value): + current_value=self._get_list_value_for_modification(var) + if isinstance(value,list): + current_value=value+current_value + self.table[var]=current_value + else: + current_value.insert(0,value) + def append_unique(self,var,value): + current_value=self._get_list_value_for_modification(var) + if isinstance(value,list): + for value_item in value: + if value_item not in current_value: + current_value.append(value_item) + else: + if value not in current_value: + current_value.append(value) + def get_merged_dict(self): + table_list=[] + env=self + while 1: + table_list.insert(0,env.table) + try:env=env.parent + except AttributeError:break + merged_table={} + for table in table_list: + merged_table.update(table) + return merged_table + def store(self,filename): + file=open(filename,'w') + merged_table=self.get_merged_dict() + keys=merged_table.keys() + keys.sort() + for k in keys:file.write('%s = %r\n'%(k,merged_table[k])) + file.close() + def load(self,filename): + tbl=self.table + code=Utils.readf(filename) + for m in re_imp.finditer(code): + g=m.group + tbl[g(2)]=eval(g(3)) + Logs.debug('env: %s'%str(self.table)) + def get_destdir(self): + if self.__getitem__('NOINSTALL'):return'' + return Options.options.destdir + def update(self,d): + for k,v in d.iteritems(): + self[k]=v + def __getattr__(self,name): + if name in self.__slots__: + return object.__getattr__(self,name) + else: + return self[name] + def __setattr__(self,name,value): + if name in self.__slots__: + object.__setattr__(self,name,value) + else: + self[name]=value + def __detattr__(self,name): + if name in self.__slots__: + object.__detattr__(self,name) + else: + del self[name] + diff -Nru showq-0.4.1+git20090622/wafadmin/__init__.py showq-0.4.1+git20090622+dfsg0/wafadmin/__init__.py --- showq-0.4.1+git20090622/wafadmin/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/__init__.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 + + diff -Nru showq-0.4.1+git20090622/wafadmin/Logs.py showq-0.4.1+git20090622+dfsg0/wafadmin/Logs.py --- showq-0.4.1+git20090622/wafadmin/Logs.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Logs.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,89 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,re,logging,traceback,sys +from Constants import* +zones='' +verbose=0 +colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;91m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',} +got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs'] +if got_tty: + try: + got_tty=sys.stderr.isatty() + except AttributeError: + got_tty=False +import Utils +if not got_tty or sys.platform=='win32'or'NOCOLOR'in os.environ: + colors_lst['USE']=False +def get_color(cl): + if not colors_lst['USE']:return'' + return colors_lst.get(cl,'') +class foo(object): + def __getattr__(self,a): + return get_color(a) + def __call__(self,a): + return get_color(a) +colors=foo() +re_log=re.compile(r'(\w+): (.*)',re.M) +class log_filter(logging.Filter): + def __init__(self,name=None): + pass + def filter(self,rec): + rec.c1=colors.PINK + rec.c2=colors.NORMAL + rec.zone=rec.module + if rec.levelno>=logging.INFO: + if rec.levelno>=logging.ERROR: + rec.c1=colors.RED + elif rec.levelno>=logging.WARNING: + rec.c1=colors.YELLOW + else: + rec.c1=colors.GREEN + return True + zone='' + m=re_log.match(rec.msg) + if m: + zone=rec.zone=m.group(1) + rec.msg=m.group(2) + if zones: + return getattr(rec,'zone','')in zones or'*'in zones + elif not verbose>2: + return False + return True +class formatter(logging.Formatter): + def __init__(self): + logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT) + def format(self,rec): + if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO: + return'%s%s%s'%(rec.c1,rec.msg,rec.c2) + return logging.Formatter.format(self,rec) +def debug(msg): + if verbose: + msg=msg.replace('\n',' ') + logging.debug(msg) +def error(msg): + logging.error(msg) + if verbose>1: + if isinstance(msg,Utils.WafError): + st=msg.stack + else: + st=traceback.extract_stack() + if st: + st=st[:-1] + buf=[] + for filename,lineno,name,line in st: + buf.append(' File "%s", line %d, in %s'%(filename,lineno,name)) + if line: + buf.append(' %s'%line.strip()) + if buf:logging.error("\n".join(buf)) +warn=logging.warn +info=logging.info +def init_log(): + log=logging.getLogger() + log.handlers=[] + hdlr=logging.StreamHandler() + hdlr.setFormatter(formatter()) + log.addHandler(hdlr) + log.addFilter(log_filter()) + log.setLevel(logging.DEBUG) + diff -Nru showq-0.4.1+git20090622/wafadmin/Node.py showq-0.4.1+git20090622+dfsg0/wafadmin/Node.py --- showq-0.4.1+git20090622/wafadmin/Node.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Node.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,405 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,fnmatch,re +import Utils +UNDEFINED=0 +DIR=1 +FILE=2 +BUILD=3 +type_to_string={UNDEFINED:"unk",DIR:"dir",FILE:"src",BUILD:"bld"} +exclude_regs=''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.DS_Store'''.split() +exc_fun=None +def default_excludes(): + global exc_fun + if exc_fun: + return exc_fun + regs=[Utils.jar_regexp(x)for x in exclude_regs] + def mat(path): + for x in regs: + if x.match(path): + return True + return False + exc_fun=mat + return exc_fun +class Node(object): + __slots__=("name","parent","id","childs") + def __init__(self,name,parent,node_type=UNDEFINED): + self.name=name + self.parent=parent + self.__class__.bld.id_nodes+=4 + self.id=self.__class__.bld.id_nodes+node_type + if node_type==DIR:self.childs={} + if parent and name in parent.childs: + raise Utils.WafError('node %s exists in the parent files %r already'%(name,parent)) + if parent:parent.childs[name]=self + def __setstate__(self,data): + if len(data)==4: + (self.parent,self.name,self.id,self.childs)=data + else: + (self.parent,self.name,self.id)=data + def __getstate__(self): + if getattr(self,'childs',None)is None: + return(self.parent,self.name,self.id) + else: + return(self.parent,self.name,self.id,self.childs) + def __str__(self): + if not self.parent:return'' + return"%s://%s"%(type_to_string[self.id&3],self.abspath()) + def __repr__(self): + return self.__str__() + def __hash__(self): + raise Utils.WafError('nodes, you are doing it wrong') + def __copy__(self): + raise Utils.WafError('nodes are not supposed to be cloned') + def get_type(self): + return self.id&3 + def set_type(self,t): + self.id=self.id+t-self.id&3 + def dirs(self): + return[x for x in self.childs.values()if x.id&3==DIR] + def files(self): + return[x for x in self.childs.values()if x.id&3==FILE] + def get_dir(self,name,default=None): + node=self.childs.get(name,None) + if not node or node.id&3!=DIR:return default + return node + def get_file(self,name,default=None): + node=self.childs.get(name,None) + if not node or node.id&3!=FILE:return default + return node + def get_build(self,name,default=None): + node=self.childs.get(name,None) + if not node or node.id&3!=BUILD:return default + return node + def find_resource(self,lst): + if isinstance(lst,str): + lst=Utils.split_path(lst) + if len(lst)==1: + parent=self + else: + parent=self.find_dir(lst[:-1]) + if not parent:return None + self.__class__.bld.rescan(parent) + name=lst[-1] + node=parent.childs.get(name,None) + if node: + tp=node.id&3 + if tp==FILE or tp==BUILD: + return node + else: + return None + tree=self.__class__.bld + if not name in tree.cache_dir_contents[parent.id]: + return None + path=parent.abspath()+os.sep+name + try: + st=Utils.h_file(path) + except IOError: + return None + child=self.__class__(name,parent,FILE) + tree.node_sigs[0][child.id]=st + return child + def find_or_declare(self,lst): + if isinstance(lst,str): + lst=Utils.split_path(lst) + if len(lst)==1: + parent=self + else: + parent=self.find_dir(lst[:-1]) + if not parent:return None + self.__class__.bld.rescan(parent) + name=lst[-1] + node=parent.childs.get(name,None) + if node: + tp=node.id&3 + if tp!=BUILD: + raise Utils.WafError("find_or_declare returns a build node, not a source nor a directory %r"%lst) + return node + node=self.__class__(name,parent,BUILD) + return node + def find_dir(self,lst): + if isinstance(lst,str): + lst=Utils.split_path(lst) + current=self + for name in lst: + self.__class__.bld.rescan(current) + prev=current + if not current.parent and name==current.name: + continue + elif not name: + continue + elif name=='.': + continue + elif name=='..': + current=current.parent or current + else: + current=prev.childs.get(name,None) + if current is None: + dir_cont=self.__class__.bld.cache_dir_contents + if prev.id in dir_cont and name in dir_cont[prev.id]: + if not prev.name: + if os.sep=='/': + dirname=os.sep+name + else: + dirname=name + else: + dirname=prev.abspath()+os.sep+name + if not os.path.isdir(dirname): + return None + current=self.__class__(name,prev,DIR) + elif(not prev.name and len(name)==2 and name[1]==':')or name.startswith('\\\\'): + current=self.__class__(name,prev,DIR) + else: + return None + else: + if current.id&3!=DIR: + return None + return current + def ensure_dir_node_from_path(self,lst): + if isinstance(lst,str): + lst=Utils.split_path(lst) + current=self + for name in lst: + if not name: + continue + elif name=='.': + continue + elif name=='..': + current=current.parent or current + else: + prev=current + current=prev.childs.get(name,None) + if current is None: + current=self.__class__(name,prev,DIR) + return current + def exclusive_build_node(self,path): + lst=Utils.split_path(path) + name=lst[-1] + if len(lst)>1: + parent=None + try: + parent=self.find_dir(lst[:-1]) + except OSError: + pass + if not parent: + parent=self.ensure_dir_node_from_path(lst[:-1]) + self.__class__.bld.cache_scanned_folders[parent.id]=1 + else: + try: + self.__class__.bld.rescan(parent) + except OSError: + pass + else: + parent=self + node=parent.childs.get(name,None) + if not node: + node=self.__class__(name,parent,BUILD) + return node + def path_to_parent(self,parent): + lst=[] + p=self + h1=parent.height() + h2=p.height() + while h2>h1: + h2-=1 + lst.append(p.name) + p=p.parent + if lst: + lst.reverse() + ret=os.path.join(*lst) + else: + ret='' + return ret + def find_ancestor(self,node): + dist=self.height()-node.height() + if dist<0:return node.find_ancestor(self) + cand=self + while dist>0: + cand=cand.parent + dist-=1 + if cand==node:return cand + cursor=node + while cand.parent: + cand=cand.parent + cursor=cursor.parent + if cand==cursor:return cand + def relpath_gen(self,going_to): + if self==going_to:return'.' + if going_to.parent==self:return'..' + ancestor=self.find_ancestor(going_to) + lst=[] + cand=self + while not cand.id==ancestor.id: + lst.append(cand.name) + cand=cand.parent + cand=going_to + while not cand.id==ancestor.id: + lst.append('..') + cand=cand.parent + lst.reverse() + return os.sep.join(lst) + def nice_path(self,env=None): + tree=self.__class__.bld + ln=tree.launch_node() + if self.id&3==FILE:return self.relpath_gen(ln) + else:return os.path.join(tree.bldnode.relpath_gen(ln),env.variant(),self.relpath_gen(tree.srcnode)) + def is_child_of(self,node): + p=self + diff=self.height()-node.height() + while diff>0: + diff-=1 + p=p.parent + return p.id==node.id + def variant(self,env): + if not env:return 0 + elif self.id&3==FILE:return 0 + else:return env.variant() + def height(self): + d=self + val=-1 + while d: + d=d.parent + val+=1 + return val + def abspath(self,env=None): + variant=(env and(self.id&3!=FILE)and env.variant())or 0 + ret=self.__class__.bld.cache_node_abspath[variant].get(self.id,None) + if ret:return ret + if not variant: + if not self.parent: + val=os.sep=='/'and os.sep or'' + elif not self.parent.name: + val=(os.sep=='/'and os.sep or'')+self.name + else: + val=self.parent.abspath()+os.sep+self.name + else: + val=os.sep.join((self.__class__.bld.bldnode.abspath(),env.variant(),self.path_to_parent(self.__class__.bld.srcnode))) + self.__class__.bld.cache_node_abspath[variant][self.id]=val + return val + def change_ext(self,ext): + name=self.name + k=name.rfind('.') + if k>=0: + name=name[:k]+ext + else: + name=name+ext + return self.parent.find_or_declare([name]) + def src_dir(self,env): + return self.parent.srcpath(env) + def bld_dir(self,env): + return self.parent.bldpath(env) + def bld_base(self,env): + s=os.path.splitext(self.name)[0] + return os.path.join(self.bld_dir(env),s) + def bldpath(self,env=None): + if self.id&3==FILE: + return self.relpath_gen(self.__class__.bld.bldnode) + if self.path_to_parent(self.__class__.bld.srcnode)is not'': + return os.path.join(env.variant(),self.path_to_parent(self.__class__.bld.srcnode)) + return env.variant() + def srcpath(self,env=None): + if self.id&3==BUILD: + return self.bldpath(env) + return self.relpath_gen(self.__class__.bld.bldnode) + def read(self,env): + return Utils.readf(self.abspath(env)) + def dir(self,env): + return self.parent.abspath(env) + def file(self): + return self.name + def file_base(self): + return os.path.splitext(self.name)[0] + def suffix(self): + k=max(0,self.name.rfind('.')) + return self.name[k:] + def find_iter_impl(self,src=True,bld=True,dir=True,accept_name=None,is_prune=None,maxdepth=25): + self.__class__.bld.rescan(self) + for name in self.__class__.bld.cache_dir_contents[self.id]: + if accept_name(self,name): + node=self.find_resource(name) + if node: + if src and node.id&3==FILE: + yield node + else: + node=self.find_dir(name) + if node and node.id!=self.__class__.bld.bldnode.id: + if dir: + yield node + if not is_prune(self,name): + if maxdepth: + for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1): + yield k + else: + if not is_prune(self,name): + node=self.find_resource(name) + if not node: + node=self.find_dir(name) + if node and node.id!=self.__class__.bld.bldnode.id: + if dir: + yield node + if maxdepth: + for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1): + yield k + if bld: + for node in self.childs.values(): + if node.id==self.__class__.bld.bldnode.id: + continue + if node.id&3==BUILD: + if accept_name(self,node.name): + yield node + raise StopIteration + def find_iter(self,in_pat=['*'],ex_pat=[],prune_pat=['.svn'],src=True,bld=True,dir=False,maxdepth=25,flat=False): + if not(src or bld or dir): + raise StopIteration + if self.id&3!=DIR: + raise StopIteration + in_pat=Utils.to_list(in_pat) + ex_pat=Utils.to_list(ex_pat) + prune_pat=Utils.to_list(prune_pat) + def accept_name(node,name): + for pat in ex_pat: + if fnmatch.fnmatchcase(name,pat): + return False + for pat in in_pat: + if fnmatch.fnmatchcase(name,pat): + return True + return False + def is_prune(node,name): + for pat in prune_pat: + if fnmatch.fnmatchcase(name,pat): + return True + return False + ret=self.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth) + if flat: + return" ".join([x.relpath_gen(self)for x in ret]) + return ret + def ant_glob(self,*k,**kw): + regex=Utils.jar_regexp(k[0]) + def accept(node,name): + ts=node.relpath_gen(self)+'/'+name + return regex.match(ts) + def reject(node,name): + ts=node.relpath_gen(self)+'/'+name + return default_excludes()(ts) + ret=[x for x in self.find_iter_impl(accept_name=accept,is_prune=reject,src=kw.get('src',1),bld=kw.get('bld',1),dir=kw.get('dir',0),maxdepth=kw.get('maxdepth',25))] + if kw.get('flat',True): + return" ".join([x.relpath_gen(self)for x in ret]) + return ret +class Nodu(Node): + pass + diff -Nru showq-0.4.1+git20090622/wafadmin/Options.py showq-0.4.1+git20090622+dfsg0/wafadmin/Options.py --- showq-0.4.1+git20090622/wafadmin/Options.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Options.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,148 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,imp,types,tempfile,optparse +import Logs,Utils,Configure +from Constants import* +cmds='distclean configure build install clean uninstall check dist distcheck'.split() +commands={} +is_install=False +options={} +arg_line=[] +launch_dir='' +tooldir='' +lockfile=os.environ.get('WAFLOCK','.lock-wscript') +try:cache_global=os.path.abspath(os.environ['WAFCACHE']) +except KeyError:cache_global='' +platform=Utils.detect_platform() +conf_file='conf-runs-%s-%d.pickle'%(platform,ABI) +default_prefix=os.environ.get('PREFIX') +if not default_prefix: + if platform=='win32':default_prefix=tempfile.gettempdir() + else:default_prefix='/usr/local/' +default_jobs=os.environ.get('JOBS',-1) +if default_jobs<1: + try: + if os.sysconf_names.has_key('SC_NPROCESSORS_ONLN'): + default_jobs=os.sysconf('SC_NPROCESSORS_ONLN') + else: + default_jobs=int(Utils.cmd_output(['sysctl','-n','hw.ncpu'])) + except: + default_jobs=int(os.environ.get('NUMBER_OF_PROCESSORS',1)) +default_destdir=os.environ.get('DESTDIR','') +def get_usage(self): + cmds_str=[] + module=Utils.g_module + if module: + tbl=module.__dict__ + keys=tbl.keys() + keys.sort() + if'build'in tbl: + if not module.build.__doc__: + module.build.__doc__='builds the project' + if'configure'in tbl: + if not module.configure.__doc__: + module.configure.__doc__='configures the project' + ban=['set_options','init','shutdown'] + optlst=[x for x in keys if not x in ban and type(tbl[x])is type(parse_args_impl)and tbl[x].__doc__ and not x.startswith('_')] + just=max([len(x)for x in optlst]) + for x in optlst: + cmds_str.append(' %s: %s'%(x.ljust(just),tbl[x].__doc__)) + ret='\n'.join(cmds_str) + else: + ret=' '.join(cmds) + return'''waf [command] [options] + +Main commands (example: ./waf build -j4) +%s +'''%ret +setattr(optparse.OptionParser,'get_usage',get_usage) +def create_parser(module=None): + Logs.debug('options: create_parser is called') + parser=optparse.OptionParser(conflict_handler="resolve",version='waf %s (%s)'%(WAFVERSION,WAFREVISION)) + parser.formatter.width=Utils.get_term_cols() + p=parser.add_option + p('-j','--jobs',type='int',default=default_jobs,help='amount of parallel jobs (%r)'%default_jobs,dest='jobs') + p('-k','--keep',action='store_true',default=False,help='keep running happily on independent task groups',dest='keep') + p('-v','--verbose',action='count',default=0,help='verbosity level -v -vv or -vvv [default: 0]',dest='verbose') + p('--nocache',action='store_true',default=False,help='ignore the WAFCACHE (if set)',dest='nocache') + p('--zones',action='store',default='',help='debugging zones (task_gen, deps, tasks, etc)',dest='zones') + p('-p','--progress',action='count',default=0,help='-p: progress bar; -pp: ide output',dest='progress_bar') + p('--targets',action='store',default='',help='build given task generators, e.g. "target1,target2"',dest='compile_targets') + gr=optparse.OptionGroup(parser,'configuration options') + parser.add_option_group(gr) + gr.add_option('-b','--blddir',action='store',default='',help='build dir for the project (configuration)',dest='blddir') + gr.add_option('-s','--srcdir',action='store',default='',help='src dir for the project (configuration)',dest='srcdir') + gr.add_option('--prefix',help='installation prefix (configuration) [default: %r]'%default_prefix,default=default_prefix,dest='prefix') + gr=optparse.OptionGroup(parser,'installation options') + parser.add_option_group(gr) + gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir') + gr.add_option('-f','--force',action='store_true',default=False,help='force file installation',dest='force') + return parser +def parse_args_impl(parser,_args=None): + global options,commands,arg_line + (options,args)=parser.parse_args(args=_args) + arg_line=args + commands={} + for var in cmds:commands[var]=0 + if not args: + commands['build']=1 + args.append('build') + for arg in args: + commands[arg]=True + if'check'in args: + idx=args.index('check') + try: + bidx=args.index('build') + if bidx>idx: + raise ValueError,'build before check' + except ValueError,e: + args.insert(idx,'build') + if args[0]!='init': + args.insert(0,'init') + if options.keep:options.jobs=1 + if options.jobs<1:options.jobs=1 + if'install'in sys.argv or'uninstall'in sys.argv: + options.destdir=options.destdir and os.path.abspath(os.path.expanduser(options.destdir)) + Logs.verbose=options.verbose + Logs.init_log() + if options.zones: + Logs.zones=options.zones.split(',') + if not Logs.verbose:Logs.verbose=1 + elif Logs.verbose>0: + Logs.zones=['runner'] + if Logs.verbose>2: + Logs.zones=['*'] +class Handler(Utils.Context): + parser=None + def __init__(self,module=None): + self.parser=create_parser(module) + self.cwd=os.getcwd() + Handler.parser=self + def add_option(self,*k,**kw): + self.parser.add_option(*k,**kw) + def add_option_group(self,*k,**kw): + return self.parser.add_option_group(*k,**kw) + def get_option_group(self,opt_str): + return self.parser.get_option_group(opt_str) + def sub_options(self,*k,**kw): + if not k:raise Utils.WscriptError('folder expected') + self.recurse(k[0],name='set_options') + def tool_options(self,*k,**kw): + + if not k[0]: + raise Utils.WscriptError('invalid tool_options call %r %r'%(k,kw)) + tools=Utils.to_list(k[0]) + path=Utils.to_list(kw.get('tdir',kw.get('tooldir',tooldir))) + for tool in tools: + tool=tool.replace('++','xx') + module=Utils.load_tool(tool,path) + try: + fun=module.set_options + except AttributeError: + pass + else: + fun(kw.get('option_group',self)) + def parse_args(self,args=None): + parse_args_impl(self.parser,args) + diff -Nru showq-0.4.1+git20090622/wafadmin/pproc.py showq-0.4.1+git20090622+dfsg0/wafadmin/pproc.py --- showq-0.4.1+git20090622/wafadmin/pproc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/pproc.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,496 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import sys +mswindows=(sys.platform=="win32") +import os +import types +import traceback +import gc +class CalledProcessError(Exception): + def __init__(self,returncode,cmd): + self.returncode=returncode + self.cmd=cmd + def __str__(self): + return"Command '%s' returned non-zero exit status %d"%(self.cmd,self.returncode) +if mswindows: + import threading + import msvcrt + if 0: + import pywintypes + from win32api import GetStdHandle,STD_INPUT_HANDLE,STD_OUTPUT_HANDLE,STD_ERROR_HANDLE + from win32api import GetCurrentProcess,DuplicateHandle,GetModuleFileName,GetVersion + from win32con import DUPLICATE_SAME_ACCESS,SW_HIDE + from win32pipe import CreatePipe + from win32process import CreateProcess,STARTUPINFO,GetExitCodeProcess,STARTF_USESTDHANDLES,STARTF_USESHOWWINDOW,CREATE_NEW_CONSOLE + from win32event import WaitForSingleObject,INFINITE,WAIT_OBJECT_0 + else: + from _subprocess import* + class STARTUPINFO: + dwFlags=0 + hStdInput=None + hStdOutput=None + hStdError=None + wShowWindow=0 + class pywintypes: + error=IOError +else: + import select + import errno + import fcntl + import pickle +__all__=["Popen","PIPE","STDOUT","call","check_call","CalledProcessError"] +try: + MAXFD=os.sysconf("SC_OPEN_MAX") +except: + MAXFD=256 +try: + False +except NameError: + False=0 + True=1 +_active=[] +def _cleanup(): + for inst in _active[:]: + if inst.poll(_deadstate=sys.maxint)>=0: + try: + _active.remove(inst) + except ValueError: + pass +PIPE=-1 +STDOUT=-2 +def call(*popenargs,**kwargs): + return Popen(*popenargs,**kwargs).wait() +def check_call(*popenargs,**kwargs): + retcode=call(*popenargs,**kwargs) + cmd=kwargs.get("args") + if cmd is None: + cmd=popenargs[0] + if retcode: + raise CalledProcessError(retcode,cmd) + return retcode +def list2cmdline(seq): + result=[] + needquote=False + for arg in seq: + bs_buf=[] + if result: + result.append(' ') + needquote=(" "in arg)or("\t"in arg)or arg=="" + if needquote: + result.append('"') + for c in arg: + if c=='\\': + bs_buf.append(c) + elif c=='"': + result.append('\\'*len(bs_buf)*2) + bs_buf=[] + result.append('\\"') + else: + if bs_buf: + result.extend(bs_buf) + bs_buf=[] + result.append(c) + if bs_buf: + result.extend(bs_buf) + if needquote: + result.extend(bs_buf) + result.append('"') + return''.join(result) +class Popen(object): + def __init__(self,args,bufsize=0,executable=None,stdin=None,stdout=None,stderr=None,preexec_fn=None,close_fds=False,shell=False,cwd=None,env=None,universal_newlines=False,startupinfo=None,creationflags=0): + _cleanup() + self._child_created=False + if not isinstance(bufsize,(int,long)): + raise TypeError("bufsize must be an integer") + if mswindows: + if preexec_fn is not None: + raise ValueError("preexec_fn is not supported on Windows platforms") + if close_fds: + raise ValueError("close_fds is not supported on Windows platforms") + else: + if startupinfo is not None: + raise ValueError("startupinfo is only supported on Windows platforms") + if creationflags!=0: + raise ValueError("creationflags is only supported on Windows platforms") + self.stdin=None + self.stdout=None + self.stderr=None + self.pid=None + self.returncode=None + self.universal_newlines=universal_newlines + (p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite)=self._get_handles(stdin,stdout,stderr) + self._execute_child(args,executable,preexec_fn,close_fds,cwd,env,universal_newlines,startupinfo,creationflags,shell,p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite) + if mswindows: + if stdin is None and p2cwrite is not None: + os.close(p2cwrite) + p2cwrite=None + if stdout is None and c2pread is not None: + os.close(c2pread) + c2pread=None + if stderr is None and errread is not None: + os.close(errread) + errread=None + if p2cwrite: + self.stdin=os.fdopen(p2cwrite,'wb',bufsize) + if c2pread: + if universal_newlines: + self.stdout=os.fdopen(c2pread,'rU',bufsize) + else: + self.stdout=os.fdopen(c2pread,'rb',bufsize) + if errread: + if universal_newlines: + self.stderr=os.fdopen(errread,'rU',bufsize) + else: + self.stderr=os.fdopen(errread,'rb',bufsize) + def _translate_newlines(self,data): + data=data.replace("\r\n","\n") + data=data.replace("\r","\n") + return data + def __del__(self,sys=sys): + if not self._child_created: + return + self.poll(_deadstate=sys.maxint) + if self.returncode is None and _active is not None: + _active.append(self) + def communicate(self,input=None): + if[self.stdin,self.stdout,self.stderr].count(None)>=2: + stdout=None + stderr=None + if self.stdin: + if input: + self.stdin.write(input) + self.stdin.close() + elif self.stdout: + stdout=self.stdout.read() + elif self.stderr: + stderr=self.stderr.read() + self.wait() + return(stdout,stderr) + return self._communicate(input) + if mswindows: + def _get_handles(self,stdin,stdout,stderr): + if stdin is None and stdout is None and stderr is None: + return(None,None,None,None,None,None) + p2cread,p2cwrite=None,None + c2pread,c2pwrite=None,None + errread,errwrite=None,None + if stdin is None: + p2cread=GetStdHandle(STD_INPUT_HANDLE) + if p2cread is not None: + pass + elif stdin is None or stdin==PIPE: + p2cread,p2cwrite=CreatePipe(None,0) + p2cwrite=p2cwrite.Detach() + p2cwrite=msvcrt.open_osfhandle(p2cwrite,0) + elif isinstance(stdin,int): + p2cread=msvcrt.get_osfhandle(stdin) + else: + p2cread=msvcrt.get_osfhandle(stdin.fileno()) + p2cread=self._make_inheritable(p2cread) + if stdout is None: + c2pwrite=GetStdHandle(STD_OUTPUT_HANDLE) + if c2pwrite is not None: + pass + elif stdout is None or stdout==PIPE: + c2pread,c2pwrite=CreatePipe(None,0) + c2pread=c2pread.Detach() + c2pread=msvcrt.open_osfhandle(c2pread,0) + elif isinstance(stdout,int): + c2pwrite=msvcrt.get_osfhandle(stdout) + else: + c2pwrite=msvcrt.get_osfhandle(stdout.fileno()) + c2pwrite=self._make_inheritable(c2pwrite) + if stderr is None: + errwrite=GetStdHandle(STD_ERROR_HANDLE) + if errwrite is not None: + pass + elif stderr is None or stderr==PIPE: + errread,errwrite=CreatePipe(None,0) + errread=errread.Detach() + errread=msvcrt.open_osfhandle(errread,0) + elif stderr==STDOUT: + errwrite=c2pwrite + elif isinstance(stderr,int): + errwrite=msvcrt.get_osfhandle(stderr) + else: + errwrite=msvcrt.get_osfhandle(stderr.fileno()) + errwrite=self._make_inheritable(errwrite) + return(p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite) + def _make_inheritable(self,handle): + return DuplicateHandle(GetCurrentProcess(),handle,GetCurrentProcess(),0,1,DUPLICATE_SAME_ACCESS) + def _find_w9xpopen(self): + w9xpopen=os.path.join(os.path.dirname(GetModuleFileName(0)),"w9xpopen.exe") + if not os.path.exists(w9xpopen): + w9xpopen=os.path.join(os.path.dirname(sys.exec_prefix),"w9xpopen.exe") + if not os.path.exists(w9xpopen): + raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.") + return w9xpopen + def _execute_child(self,args,executable,preexec_fn,close_fds,cwd,env,universal_newlines,startupinfo,creationflags,shell,p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite): + if not isinstance(args,types.StringTypes): + args=list2cmdline(args) + if startupinfo is None: + startupinfo=STARTUPINFO() + if None not in(p2cread,c2pwrite,errwrite): + startupinfo.dwFlags|=STARTF_USESTDHANDLES + startupinfo.hStdInput=p2cread + startupinfo.hStdOutput=c2pwrite + startupinfo.hStdError=errwrite + if shell: + startupinfo.dwFlags|=STARTF_USESHOWWINDOW + startupinfo.wShowWindow=SW_HIDE + comspec=os.environ.get("COMSPEC","cmd.exe") + args=comspec+" /c "+args + if(GetVersion()>=0x80000000L or os.path.basename(comspec).lower()=="command.com"): + w9xpopen=self._find_w9xpopen() + args='"%s" %s'%(w9xpopen,args) + creationflags|=CREATE_NEW_CONSOLE + try: + hp,ht,pid,tid=CreateProcess(executable,args,None,None,1,creationflags,env,cwd,startupinfo) + except pywintypes.error,e: + raise WindowsError(*e.args) + self._child_created=True + self._handle=hp + self.pid=pid + ht.Close() + if p2cread is not None: + p2cread.Close() + if c2pwrite is not None: + c2pwrite.Close() + if errwrite is not None: + errwrite.Close() + def poll(self,_deadstate=None): + if self.returncode is None: + if WaitForSingleObject(self._handle,0)==WAIT_OBJECT_0: + self.returncode=GetExitCodeProcess(self._handle) + return self.returncode + def wait(self): + if self.returncode is None: + obj=WaitForSingleObject(self._handle,INFINITE) + self.returncode=GetExitCodeProcess(self._handle) + return self.returncode + def _readerthread(self,fh,buffer): + buffer.append(fh.read()) + def _communicate(self,input): + stdout=None + stderr=None + if self.stdout: + stdout=[] + stdout_thread=threading.Thread(target=self._readerthread,args=(self.stdout,stdout)) + stdout_thread.setDaemon(True) + stdout_thread.start() + if self.stderr: + stderr=[] + stderr_thread=threading.Thread(target=self._readerthread,args=(self.stderr,stderr)) + stderr_thread.setDaemon(True) + stderr_thread.start() + if self.stdin: + if input is not None: + self.stdin.write(input) + self.stdin.close() + if self.stdout: + stdout_thread.join() + if self.stderr: + stderr_thread.join() + if stdout is not None: + stdout=stdout[0] + if stderr is not None: + stderr=stderr[0] + if self.universal_newlines and hasattr(file,'newlines'): + if stdout: + stdout=self._translate_newlines(stdout) + if stderr: + stderr=self._translate_newlines(stderr) + self.wait() + return(stdout,stderr) + else: + def _get_handles(self,stdin,stdout,stderr): + p2cread,p2cwrite=None,None + c2pread,c2pwrite=None,None + errread,errwrite=None,None + if stdin is None: + pass + elif stdin==PIPE: + p2cread,p2cwrite=os.pipe() + elif isinstance(stdin,int): + p2cread=stdin + else: + p2cread=stdin.fileno() + if stdout is None: + pass + elif stdout==PIPE: + c2pread,c2pwrite=os.pipe() + elif isinstance(stdout,int): + c2pwrite=stdout + else: + c2pwrite=stdout.fileno() + if stderr is None: + pass + elif stderr==PIPE: + errread,errwrite=os.pipe() + elif stderr==STDOUT: + errwrite=c2pwrite + elif isinstance(stderr,int): + errwrite=stderr + else: + errwrite=stderr.fileno() + return(p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite) + def _set_cloexec_flag(self,fd): + try: + cloexec_flag=fcntl.FD_CLOEXEC + except AttributeError: + cloexec_flag=1 + old=fcntl.fcntl(fd,fcntl.F_GETFD) + fcntl.fcntl(fd,fcntl.F_SETFD,old|cloexec_flag) + def _close_fds(self,but): + for i in xrange(3,MAXFD): + if i==but: + continue + try: + os.close(i) + except: + pass + def _execute_child(self,args,executable,preexec_fn,close_fds,cwd,env,universal_newlines,startupinfo,creationflags,shell,p2cread,p2cwrite,c2pread,c2pwrite,errread,errwrite): + if isinstance(args,types.StringTypes): + args=[args] + else: + args=list(args) + if shell: + args=["/bin/sh","-c"]+args + if executable is None: + executable=args[0] + errpipe_read,errpipe_write=os.pipe() + self._set_cloexec_flag(errpipe_write) + gc_was_enabled=gc.isenabled() + gc.disable() + try: + self.pid=os.fork() + except: + if gc_was_enabled: + gc.enable() + raise + self._child_created=True + if self.pid==0: + try: + if p2cwrite: + os.close(p2cwrite) + if c2pread: + os.close(c2pread) + if errread: + os.close(errread) + os.close(errpipe_read) + if p2cread: + os.dup2(p2cread,0) + if c2pwrite: + os.dup2(c2pwrite,1) + if errwrite: + os.dup2(errwrite,2) + if p2cread and p2cread not in(0,): + os.close(p2cread) + if c2pwrite and c2pwrite not in(p2cread,1): + os.close(c2pwrite) + if errwrite and errwrite not in(p2cread,c2pwrite,2): + os.close(errwrite) + if close_fds: + self._close_fds(but=errpipe_write) + if cwd is not None: + os.chdir(cwd) + if preexec_fn: + apply(preexec_fn) + if env is None: + os.execvp(executable,args) + else: + os.execvpe(executable,args,env) + except: + exc_type,exc_value,tb=sys.exc_info() + exc_lines=traceback.format_exception(exc_type,exc_value,tb) + exc_value.child_traceback=''.join(exc_lines) + os.write(errpipe_write,pickle.dumps(exc_value)) + os._exit(255) + if gc_was_enabled: + gc.enable() + os.close(errpipe_write) + if p2cread and p2cwrite: + os.close(p2cread) + if c2pwrite and c2pread: + os.close(c2pwrite) + if errwrite and errread: + os.close(errwrite) + data=os.read(errpipe_read,1048576) + os.close(errpipe_read) + if data!="": + os.waitpid(self.pid,0) + child_exception=pickle.loads(data) + raise child_exception + def _handle_exitstatus(self,sts): + if os.WIFSIGNALED(sts): + self.returncode=-os.WTERMSIG(sts) + elif os.WIFEXITED(sts): + self.returncode=os.WEXITSTATUS(sts) + else: + raise RuntimeError("Unknown child exit status!") + def poll(self,_deadstate=None): + if self.returncode is None: + try: + pid,sts=os.waitpid(self.pid,os.WNOHANG) + if pid==self.pid: + self._handle_exitstatus(sts) + except os.error: + if _deadstate is not None: + self.returncode=_deadstate + return self.returncode + def wait(self): + if self.returncode is None: + pid,sts=os.waitpid(self.pid,0) + self._handle_exitstatus(sts) + return self.returncode + def _communicate(self,input): + read_set=[] + write_set=[] + stdout=None + stderr=None + if self.stdin: + self.stdin.flush() + if input: + write_set.append(self.stdin) + else: + self.stdin.close() + if self.stdout: + read_set.append(self.stdout) + stdout=[] + if self.stderr: + read_set.append(self.stderr) + stderr=[] + input_offset=0 + while read_set or write_set: + rlist,wlist,xlist=select.select(read_set,write_set,[]) + if self.stdin in wlist: + bytes_written=os.write(self.stdin.fileno(),buffer(input,input_offset,512)) + input_offset+=bytes_written + if input_offset>=len(input): + self.stdin.close() + write_set.remove(self.stdin) + if self.stdout in rlist: + data=os.read(self.stdout.fileno(),1024) + if data=="": + self.stdout.close() + read_set.remove(self.stdout) + stdout.append(data) + if self.stderr in rlist: + data=os.read(self.stderr.fileno(),1024) + if data=="": + self.stderr.close() + read_set.remove(self.stderr) + stderr.append(data) + if stdout is not None: + stdout=''.join(stdout) + if stderr is not None: + stderr=''.join(stderr) + if self.universal_newlines and hasattr(file,'newlines'): + if stdout: + stdout=self._translate_newlines(stdout) + if stderr: + stderr=self._translate_newlines(stderr) + self.wait() + return(stdout,stderr) + diff -Nru showq-0.4.1+git20090622/wafadmin/py3kfixes.py showq-0.4.1+git20090622+dfsg0/wafadmin/py3kfixes.py --- showq-0.4.1+git20090622/wafadmin/py3kfixes.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/py3kfixes.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,61 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os +all_modifs={} +def modif(filename,fun): + f=open(filename,'r') + txt=f.read() + f.close() + txt=fun(txt) + f=open(filename,'w') + f.write(txt) + f.close() +def subst(filename): + def do_subst(fun): + global all_modifs + try: + all_modifs[filename]+=fun + except KeyError: + all_modifs[filename]=[fun] + return fun + return do_subst +def r1(code): + code=code.replace("'iluvcuteoverload'","b'iluvcuteoverload'") + code=code.replace("ABI=7","ABI=37") + return code +def r2(code): + code=code.replace("p.stdin.write('\\n')","p.stdin.write(b'\\n')") + code=code.replace("out=str(out)","out=out.decode('utf-8')") + return code +def r3(code): + code=code.replace("m.update(str(lst))","m.update(str(lst).encode())") + return code +def r4(code): + code=code.replace("up(self.__class__.__name__)","up(self.__class__.__name__.encode())") + code=code.replace("up(self.env.variant())","up(self.env.variant().encode())") + code=code.replace("up(x.parent.abspath())","up(x.parent.abspath().encode())") + code=code.replace("up(x.name)","up(x.name.encode())") + return code +def r5(code): + code=code.replace("cPickle.dump(data,file,-1)","cPickle.dump(data,file)") + return code +def fixdir(dir): + import subprocess + try: + proc=subprocess.Popen("2to3 -x imports -x imports2 -x import -w -n wafadmin".split(),stdout=subprocess.PIPE,stderr=subprocess.PIPE) + stdout,setderr=proc.communicate() + except: + import sys,shutil + shutil.rmtree(dir) + raise + global all_modifs + for k in all_modifs: + for v in all_modifs[k]: + modif(os.path.join(dir,'wafadmin',k),v) + +subst('Constants.py')(r1) +subst('Tools/ccroot.py')(r2) +subst('Utils.py')(r3) +subst('Task.py')(r4) +subst('Build.py')(r5) diff -Nru showq-0.4.1+git20090622/wafadmin/Runner.py showq-0.4.1+git20090622+dfsg0/wafadmin/Runner.py --- showq-0.4.1+git20090622/wafadmin/Runner.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Runner.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,152 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import sys,random,time,threading,traceback +try:from Queue import Queue +except ImportError:from queue import Queue +import Build,Utils,Logs,Options +from Logs import debug,error +from Constants import* +GAP=15 +run_old=threading.Thread.run +def run(*args,**kwargs): + try: + run_old(*args,**kwargs) + except(KeyboardInterrupt,SystemExit): + raise + except: + sys.excepthook(*sys.exc_info()) +threading.Thread.run=run +class TaskConsumer(threading.Thread): + def __init__(self,m): + threading.Thread.__init__(self) + self.setDaemon(1) + self.master=m + self.start() + def run(self): + try: + self.loop() + except: + pass + def loop(self): + m=self.master + while 1: + tsk=m.ready.get() + if m.stop: + m.out.put(tsk) + continue + try: + tsk.generator.bld.printout(tsk.display()) + if tsk.__class__.stat:ret=tsk.__class__.stat(tsk) + else:ret=tsk.call_run() + except Exception,e: + tsk.err_msg=Utils.ex_stack() + tsk.hasrun=EXCEPTION + m.error_handler(tsk) + m.out.put(tsk) + continue + if ret: + tsk.err_code=ret + tsk.hasrun=CRASHED + else: + try: + tsk.post_run() + except Utils.WafError: + pass + except Exception: + tsk.err_msg=Utils.ex_stack() + tsk.hasrun=EXCEPTION + else: + tsk.hasrun=SUCCESS + if tsk.hasrun!=SUCCESS: + m.error_handler(tsk) + m.out.put(tsk) +class Parallel(object): + def __init__(self,bld,j=2): + self.numjobs=j + self.manager=bld.task_manager + self.manager.current_group=0 + self.total=self.manager.total() + self.outstanding=[] + self.maxjobs=MAXJOBS + self.frozen=[] + self.ready=Queue(0) + self.out=Queue(0) + self.count=0 + self.processed=1 + self.consumers=None + self.stop=False + self.error=False + def get_next(self): + if not self.outstanding: + return None + return self.outstanding.pop(0) + def postpone(self,tsk): + if random.randint(0,1): + self.frozen.insert(0,tsk) + else: + self.frozen.append(tsk) + def refill_task_list(self): + while self.count>self.numjobs+GAP or self.count>=self.maxjobs: + self.get_out() + while not self.outstanding: + if self.count: + self.get_out() + if self.frozen: + self.outstanding+=self.frozen + self.frozen=[] + elif not self.count: + (self.maxjobs,tmp)=self.manager.get_next_set() + if tmp:self.outstanding+=tmp + break + def get_out(self): + ret=self.out.get() + self.manager.add_finished(ret) + if not self.stop and getattr(ret,'more_tasks',None): + self.outstanding+=ret.more_tasks + self.total+=len(ret.more_tasks) + self.count-=1 + def error_handler(self,tsk): + if not Options.options.keep: + self.stop=True + self.error=True + def start(self): + while not self.stop: + self.refill_task_list() + tsk=self.get_next() + if not tsk: + if self.count: + continue + else: + break + if tsk.hasrun: + self.processed+=1 + self.manager.add_finished(tsk) + continue + try: + st=tsk.runnable_status() + except Exception,e: + tsk.err_msg=Utils.ex_stack() + tsk.hasrun=EXCEPTION + self.processed+=1 + self.error_handler(tsk) + self.manager.add_finished(tsk) + continue + if st==ASK_LATER: + self.postpone(tsk) + elif st==SKIP_ME: + self.processed+=1 + tsk.hasrun=SKIPPED + self.manager.add_finished(tsk) + else: + tsk.position=(self.processed,self.total) + self.count+=1 + self.ready.put(tsk) + self.processed+=1 + if not self.consumers: + self.consumers=[TaskConsumer(self)for i in xrange(self.numjobs)] + while self.error and self.count: + self.get_out() + assert(self.count==0 or self.stop) + diff -Nru showq-0.4.1+git20090622/wafadmin/Scripting.py showq-0.4.1+git20090622+dfsg0/wafadmin/Scripting.py --- showq-0.4.1+git20090622/wafadmin/Scripting.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Scripting.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,375 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,shutil,traceback,datetime,inspect +import Utils,Configure,Build,Logs,Options,Environment,Task +from Logs import error,warn,info +from Constants import* +g_gz='bz2' +commands=[] +def prepare_impl(t,cwd,ver,wafdir): + Options.tooldir=[t] + Options.launch_dir=cwd + if'--version'in sys.argv: + opt_obj=Options.Handler() + opt_obj.curdir=cwd + opt_obj.parse_args() + sys.exit(0) + msg1='Waf: Please run waf from a directory containing a file named "%s" or run distclean'%WSCRIPT_FILE + build_dir_override=None + candidate=None + lst=os.listdir(cwd) + search_for_candidate=True + if WSCRIPT_FILE in lst: + candidate=cwd + elif'configure'in sys.argv and not WSCRIPT_BUILD_FILE in lst: + calldir=os.path.abspath(os.path.dirname(sys.argv[0])) + if WSCRIPT_FILE in os.listdir(calldir): + candidate=calldir + search_for_candidate=False + else: + error('arg[0] directory does not contain a wscript file') + sys.exit(1) + build_dir_override=cwd + while search_for_candidate: + if len(cwd)<=3: + break + dirlst=os.listdir(cwd) + if WSCRIPT_FILE in dirlst: + candidate=cwd + if'configure'in sys.argv and candidate: + break + if Options.lockfile in dirlst: + env=Environment.Environment() + env.load(os.path.join(cwd,Options.lockfile)) + candidate=env['cwd']or cwd + break + cwd=os.path.dirname(cwd) + if not candidate: + if'-h'in sys.argv or'--help'in sys.argv: + warn('No wscript file found: the help message may be incomplete') + opt_obj=Options.Handler() + opt_obj.curdir=cwd + opt_obj.parse_args() + else: + error(msg1) + sys.exit(0) + try: + os.chdir(candidate) + except OSError: + raise Utils.WafError("the folder %r is unreadable"%candidate) + Utils.set_main_module(os.path.join(candidate,WSCRIPT_FILE)) + if build_dir_override: + d=getattr(Utils.g_module,BLDDIR,None) + if d: + msg=' Overriding build directory %s with %s'%(d,build_dir_override) + warn(msg) + Utils.g_module.blddir=build_dir_override + def set_def(obj,name=''): + n=name or obj.__name__ + if not n in Utils.g_module.__dict__: + setattr(Utils.g_module,n,obj) + for k in[dist,distclean,distcheck,build,clean,install,uninstall]: + set_def(k) + set_def(Configure.ConfigurationContext,'configure_context') + for k in['build','clean','install','uninstall']: + set_def(Build.BuildContext,k+'_context') + opt_obj=Options.Handler(Utils.g_module) + opt_obj.curdir=candidate + try: + f=Utils.g_module.set_options + except AttributeError: + pass + else: + opt_obj.sub_options(['']) + opt_obj.parse_args() + if not'init'in Utils.g_module.__dict__: + Utils.g_module.init=Utils.nada + if not'shutdown'in Utils.g_module.__dict__: + Utils.g_module.shutdown=Utils.nada + main() +def prepare(t,cwd,ver,wafdir): + if WAFVERSION!=ver: + msg='Version mismatch: waf %s <> wafadmin %s (wafdir %s)'%(ver,WAFVERSION,wafdir) + print('\033[91mError: %s\033[0m'%msg) + sys.exit(1) + try: + prepare_impl(t,cwd,ver,wafdir) + except Utils.WafError,e: + error(e) + sys.exit(1) + except KeyboardInterrupt: + Utils.pprint('RED','Interrupted') + sys.exit(68) +def main(): + global commands + commands=Options.arg_line[:] + while commands: + x=commands.pop(0) + ini=datetime.datetime.now() + if x=='configure': + fun=configure + elif x=='build': + fun=build + else: + fun=getattr(Utils.g_module,x,None) + if not fun: + raise Utils.WscriptError('No such command %r'%x) + ctx=getattr(Utils.g_module,x+'_context',Utils.Context)() + if x in['init','shutdown','dist','distclean','distcheck']: + try: + fun(ctx) + except TypeError: + fun() + else: + fun(ctx) + ela='' + if not Options.options.progress_bar: + ela=' (%s)'%Utils.get_elapsed_time(ini) + if x!='init'and x!='shutdown': + info('%r finished successfully%s'%(x,ela)) + if not commands and x!='shutdown': + commands.append('shutdown') +def configure(conf): + src=getattr(Options.options,SRCDIR,None) + if not src:src=getattr(Utils.g_module,SRCDIR,None) + if not src: + src='.' + incomplete_src=1 + src=os.path.abspath(src) + bld=getattr(Options.options,BLDDIR,None) + if not bld: + bld=getattr(Utils.g_module,BLDDIR,None) + if bld=='.': + raise Utils.WafError('Setting blddir="." may cause distclean problems') + if not bld: + bld='build' + incomplete_bld=1 + bld=os.path.abspath(bld) + try:os.makedirs(bld) + except OSError:pass + targets=Options.options.compile_targets + Options.options.compile_targets=None + Options.is_install=False + conf.srcdir=src + conf.blddir=bld + conf.post_init() + if'incomplete_src'in vars(): + conf.check_message_1('Setting srcdir to') + conf.check_message_2(src) + if'incomplete_bld'in vars(): + conf.check_message_1('Setting blddir to') + conf.check_message_2(bld) + conf.sub_config(['']) + conf.store() + env=Environment.Environment() + env[BLDDIR]=bld + env[SRCDIR]=src + env['argv']=sys.argv + env['commands']=Options.commands + env['options']=Options.options.__dict__ + env['hash']=conf.hash + env['files']=conf.files + env['environ']=dict(conf.environ) + env['cwd']=os.path.split(Utils.g_module.root_path)[0] + if Utils.g_module.root_path!=src: + env.store(os.path.join(src,Options.lockfile)) + env.store(Options.lockfile) + Options.options.compile_targets=targets +def clean(bld): + '''removes the build files''' + try: + proj=Environment.Environment(Options.lockfile) + except IOError: + raise Utils.WafError('Nothing to clean (project not configured)') + bld.load_dirs(proj[SRCDIR],proj[BLDDIR]) + bld.load_envs() + bld.is_install=0 + bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]]) + try: + bld.clean() + finally: + bld.save() +def check_configured(bld): + if not Configure.autoconfig: + return bld + conf_cls=getattr(Utils.g_module,'configure_context',Utils.Context) + bld_cls=getattr(Utils.g_module,'build_context',Utils.Context) + def reconf(proj): + back=(Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose) + Options.commands=proj['commands'] + Options.options.__dict__=proj['options'] + conf=conf_cls() + conf.environ=proj['environ'] + configure(conf) + (Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose)=back + try: + proj=Environment.Environment(Options.lockfile) + except IOError: + conf=conf_cls() + configure(conf) + else: + try: + bld=bld_cls() + bld.load_dirs(proj[SRCDIR],proj[BLDDIR]) + bld.load_envs() + except Utils.WafError: + reconf(proj) + return bld_cls() + try: + proj=Environment.Environment(Options.lockfile) + except IOError: + raise Utils.WafError('Auto-config: project does not configure (bug)') + h=0 + try: + for file in proj['files']: + if file.endswith('configure'): + h=hash((h,Utils.readf(file))) + else: + mod=Utils.load_module(file) + h=hash((h,mod.waf_hash_val)) + except(OSError,IOError): + warn('Reconfiguring the project: a file is unavailable') + reconf(proj) + else: + if(h!=proj['hash']): + warn('Reconfiguring the project: the configuration has changed') + reconf(proj) + return bld_cls() +def install(bld): + '''installs the build files''' + bld=check_configured(bld) + Options.commands['install']=True + Options.commands['uninstall']=False + Options.is_install=True + bld.is_install=INSTALL + build_impl(bld) + bld.install() +def uninstall(bld): + '''removes the installed files''' + Options.commands['install']=False + Options.commands['uninstall']=True + Options.is_install=True + bld.is_install=UNINSTALL + try: + def runnable_status(self): + return SKIP_ME + setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status) + setattr(Task.Task,'runnable_status',runnable_status) + build_impl(bld) + bld.install() + finally: + setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back) +def build(bld): + bld=check_configured(bld) + Options.commands['install']=False + Options.commands['uninstall']=False + Options.is_install=False + bld.is_install=0 + return build_impl(bld) +def build_impl(bld): + try: + proj=Environment.Environment(Options.lockfile) + except IOError: + raise Utils.WafError("Project not configured (run 'waf configure' first)") + bld.load_dirs(proj[SRCDIR],proj[BLDDIR]) + bld.load_envs() + info("Waf: Entering directory `%s'"%bld.bldnode.abspath()) + bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]]) + bld.pre_build() + try: + bld.compile() + finally: + if Options.options.progress_bar:print('') + info("Waf: Leaving directory `%s'"%bld.bldnode.abspath()) + bld.post_build() + bld.install() +excludes='.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg Makefile Makefile.in config.log'.split() +dist_exts='~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split() +def dont_dist(name,src,build_dir): + global excludes,dist_exts + if(name.startswith(',,')or name.startswith('++')or name.startswith('.waf-1.')or(src=='.'and name==Options.lockfile)or name in excludes or name==build_dir): + return True + for ext in dist_exts: + if name.endswith(ext): + return True + return False +def copytree(src,dst,build_dir): + names=os.listdir(src) + os.makedirs(dst) + for name in names: + srcname=os.path.join(src,name) + dstname=os.path.join(dst,name) + if dont_dist(name,src,build_dir): + continue + if os.path.isdir(srcname): + copytree(srcname,dstname,build_dir) + else: + shutil.copy2(srcname,dstname) +def distclean(ctx=None): + '''removes the build directory''' + lst=os.listdir('.') + for f in lst: + if f==Options.lockfile: + try: + proj=Environment.Environment(f) + shutil.rmtree(proj[BLDDIR]) + except(OSError,IOError): + pass + try: + os.remove(f) + except(OSError,IOError): + pass + if f.startswith('.waf-'): + shutil.rmtree(f,ignore_errors=True) +def dist(appname='',version=''): + '''makes a tarball for redistributing the sources''' + import tarfile + if not appname:appname=getattr(Utils.g_module,APPNAME,'noname') + if not version:version=getattr(Utils.g_module,VERSION,'1.0') + tmp_folder=appname+'-'+version + arch_name=tmp_folder+'.tar.'+g_gz + try: + shutil.rmtree(tmp_folder) + except(OSError,IOError): + pass + try: + os.remove(arch_name) + except(OSError,IOError): + pass + copytree('.',tmp_folder,getattr(Utils.g_module,BLDDIR,None)) + dist_hook=getattr(Utils.g_module,'dist_hook',None) + if dist_hook: + back=os.getcwd() + os.chdir(tmp_folder) + try: + dist_hook() + finally: + os.chdir(back) + tar=tarfile.open(arch_name,'w:'+g_gz) + tar.add(tmp_folder) + tar.close() + info('The archive is ready: %s'%arch_name) + if os.path.exists(tmp_folder):shutil.rmtree(tmp_folder) + return arch_name +def distcheck(appname='',version=''): + '''checks if the sources compile (tarball from 'dist')''' + import tempfile,tarfile + if not appname:appname=getattr(Utils.g_module,APPNAME,'noname') + if not version:version=getattr(Utils.g_module,VERSION,'1.0') + waf=os.path.abspath(sys.argv[0]) + tarball=dist(appname,version) + t=tarfile.open(tarball) + for x in t:t.extract(x) + t.close() + path=appname+'-'+version + instdir=tempfile.mkdtemp('.inst','%s-%s'%(appname,version)) + ret=Utils.pproc.Popen([waf,'configure','install','uninstall','--destdir='+instdir],cwd=path).wait() + if ret: + raise Utils.WafError('distcheck failed with code %i'%ret) + if os.path.exists(instdir): + raise Utils.WafError('distcheck succeeded, but files were left in %s'%instdir) + shutil.rmtree(path) +def add_subdir(dir,bld): + bld.recurse(dir,'build') + diff -Nru showq-0.4.1+git20090622/wafadmin/TaskGen.py showq-0.4.1+git20090622+dfsg0/wafadmin/TaskGen.py --- showq-0.4.1+git20090622/wafadmin/TaskGen.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/TaskGen.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,335 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import os,traceback,copy +import Build,Task,Utils,Logs,Options +from Logs import debug,error,warn +from Constants import* +typos={'sources':'source','targets':'target','include':'includes','define':'defines','importpath':'importpaths','install_var':'install_path','install_subdir':'install_path','inst_var':'install_path','inst_dir':'install_path',} +class register_obj(type): + def __init__(cls,name,bases,dict): + super(register_obj,cls).__init__(name,bases,dict) + name=cls.__name__ + suffix='_taskgen' + if name.endswith(suffix): + task_gen.classes[name.replace(suffix,'')]=cls +class task_gen(object): + __metaclass__=register_obj + mappings={} + mapped={} + prec=Utils.DefaultDict(list) + traits=Utils.DefaultDict(set) + classes={} + def __init__(self,*kw,**kwargs): + self.prec=Utils.DefaultDict(list) + self.source='' + self.target='' + self.meths=[] + self.mappings={} + self.features=list(kw) + self.tasks=[] + self.default_chmod=O644 + self.default_install_path=None + self.allnodes=[] + self.bld=kwargs.get('bld',Build.bld) + self.env=self.bld.env.copy() + self.path=self.bld.path + self.name='' + self.idx=self.bld.idx[self.path.id]=self.bld.idx.get(self.path.id,0)+1 + for key,val in kwargs.iteritems(): + setattr(self,key,val) + self.bld.task_manager.add_task_gen(self) + self.bld.all_task_gen.append(self) + def __str__(self): + return(""%(self.name or self.target,self.__class__.__name__,str(self.path))) + def __setattr__(self,name,attr): + real=typos.get(name,name) + if real!=name: + warn('typo %s -> %s'%(name,real)) + if Logs.verbose>0: + traceback.print_stack() + object.__setattr__(self,real,attr) + def to_list(self,value): + if isinstance(value,str):return value.split() + else:return value + def apply(self): + keys=set(self.meths) + self.features=Utils.to_list(self.features) + for x in self.features+['*']: + st=task_gen.traits[x] + if not st: + warn('feature %r does not exist - bind at least one method to it'%x) + keys.update(st) + prec={} + prec_tbl=self.prec or task_gen.prec + for x in prec_tbl: + if x in keys: + prec[x]=prec_tbl[x] + tmp=[] + for a in keys: + for x in prec.values(): + if a in x:break + else: + tmp.append(a) + out=[] + while tmp: + e=tmp.pop() + if e in keys:out.append(e) + try: + nlst=prec[e] + except KeyError: + pass + else: + del prec[e] + for x in nlst: + for y in prec: + if x in prec[y]: + break + else: + tmp.append(x) + if prec:raise Utils.WafError("graph has a cycle %s"%str(prec)) + out.reverse() + self.meths=out + debug('task_gen: posting %s %d'%(self,id(self))) + for x in out: + try: + v=getattr(self,x) + except AttributeError: + raise Utils.WafError("tried to retrieve %s which is not a valid method"%x) + debug('task_gen: -> %s (%d)'%(x,id(self))) + v() + def post(self): + if not self.name: + if isinstance(self.target,list): + self.name=' '.join(self.target) + else: + self.name=self.target + if getattr(self,'posted',None): + return + self.apply() + debug('task_gen: posted %s'%self.name) + self.posted=True + def get_hook(self,ext): + try:return self.mappings[ext] + except KeyError: + try:return task_gen.mappings[ext] + except KeyError:return None + def create_task(self,name,env=None): + task=Task.TaskBase.classes[name](env or self.env,generator=self) + self.tasks.append(task) + return task + def name_to_obj(self,name): + return self.bld.name_to_obj(name,self.env) + def find_sources_in_dirs(self,dirnames,excludes=[],exts=[]): + err_msg="'%s' attribute must be a list" + if not isinstance(excludes,list): + raise Utils.WscriptError(err_msg%'excludes') + if not isinstance(exts,list): + raise Utils.WscriptError(err_msg%'exts') + lst=[] + dirnames=self.to_list(dirnames) + ext_lst=exts or self.mappings.keys()+task_gen.mappings.keys() + for name in dirnames: + anode=self.path.find_dir(name) + if not anode or not anode.is_child_of(self.bld.srcnode): + raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path"", or it's not child of '%s'."%(name,self.bld.srcnode)) + self.bld.rescan(anode) + for name in self.bld.cache_dir_contents[anode.id]: + if name.startswith('.'): + continue + (base,ext)=os.path.splitext(name) + if ext in ext_lst and not name in lst and not name in excludes: + lst.append((anode.relpath_gen(self.path)or'.')+os.path.sep+name) + lst.sort() + self.source=self.to_list(self.source) + if not self.source:self.source=lst + else:self.source+=lst + def clone(self,env): + newobj=task_gen(bld=self.bld) + for x in self.__dict__: + if x in['env','bld']: + continue + elif x in["path","features"]: + setattr(newobj,x,getattr(self,x)) + else: + setattr(newobj,x,copy.copy(getattr(self,x))) + newobj.__class__=self.__class__ + if isinstance(env,str): + newobj.env=self.bld.all_envs[env].copy() + else: + newobj.env=env.copy() + return newobj + def get_inst_path(self): + return getattr(self,'_install_path',getattr(self,'default_install_path','')) + def set_inst_path(self,val): + self._install_path=val + install_path=property(get_inst_path,set_inst_path) + def get_chmod(self): + return getattr(self,'_chmod',getattr(self,'default_chmod',O644)) + def set_chmod(self,val): + self._chmod=val + chmod=property(get_chmod,set_chmod) +def declare_extension(var,func): + try: + for x in Utils.to_list(var): + task_gen.mappings[x]=func + except: + raise Utils.WscriptError('declare_extension takes either a list or a string %r'%var) + task_gen.mapped[func.__name__]=func +def declare_order(*k): + assert(len(k)>1) + n=len(k)-1 + for i in xrange(n): + f1=k[i] + f2=k[i+1] + if not f1 in task_gen.prec[f2]: + task_gen.prec[f2].append(f1) +def declare_chain(name='',action='',ext_in='',ext_out='',reentrant=1,color='BLUE',install=0,before=[],after=[],decider=None,rule=None,scan=None): + action=action or rule + if isinstance(action,str): + act=Task.simple_task_type(name,action,color=color) + else: + act=Task.task_type_from_func(name,action,color=color) + act.ext_in=tuple(Utils.to_list(ext_in)) + act.ext_out=tuple(Utils.to_list(ext_out)) + act.before=Utils.to_list(before) + act.after=Utils.to_list(after) + act.scan=scan + def x_file(self,node): + if decider: + ext=decider(self,node) + elif isinstance(ext_out,str): + ext=ext_out + if isinstance(ext,str): + out_source=node.change_ext(ext) + if reentrant: + self.allnodes.append(out_source) + elif isinstance(ext,list): + out_source=[node.change_ext(x)for x in ext] + if reentrant: + for i in xrange(reentrant): + self.allnodes.append(out_source[i]) + else: + raise Utils.WafError("do not know how to process %s"%str(ext)) + tsk=self.create_task(name) + tsk.set_inputs(node) + tsk.set_outputs(out_source) + if node.__class__.bld.is_install==INSTALL: + tsk.install=install + declare_extension(act.ext_in,x_file) +def bind_feature(name,methods): + lst=Utils.to_list(methods) + task_gen.traits[name].update(lst) +def taskgen(func): + setattr(task_gen,func.__name__,func) +def feature(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for name in k: + task_gen.traits[name].update([func.__name__]) + return func + return deco +def before(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for fun_name in k: + if not func.__name__ in task_gen.prec[fun_name]: + task_gen.prec[fun_name].append(func.__name__) + return func + return deco +def after(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for fun_name in k: + if not fun_name in task_gen.prec[func.__name__]: + task_gen.prec[func.__name__].append(fun_name) + return func + return deco +def extension(var): + def deco(func): + setattr(task_gen,func.__name__,func) + try: + for x in Utils.to_list(var): + task_gen.mappings[x]=func + except: + raise Utils.WafError('extension takes either a list or a string %r'%var) + task_gen.mapped[func.__name__]=func + return func + return deco +def apply_core(self): + find_resource=self.path.find_resource + for filename in self.to_list(self.source): + x=self.get_hook(filename) + if x: + x(self,filename) + else: + node=find_resource(filename) + if not node:raise Utils.WafError("source not found: '%s' in '%s'"%(filename,str(self.path))) + self.allnodes.append(node) + for node in self.allnodes: + x=self.get_hook(node.suffix()) + if not x: + raise Utils.WafError("Do not know how to process %s in %s, mappings are %s"%(str(node),str(self.__class__),str(self.__class__.mappings))) + x(self,node) +feature('*')(apply_core) +def exec_rule(self): + if not getattr(self,'rule',None): + return + try: + self.meths.remove('apply_core') + except ValueError: + pass + func=self.rule + vars2=[] + if isinstance(func,str): + (func,vars2)=Task.compile_fun('',self.rule,shell=getattr(self,'shell',True)) + func.code=self.rule + vars=getattr(self,'vars',vars2) + if not vars: + if isinstance(self.rule,str): + vars=self.rule + else: + vars=Utils.h_fun(self.rule) + name=getattr(self,'name',None)or self.target or self.rule + cls=Task.task_type_from_func(name,func,vars) + tsk=self.create_task(name) + if getattr(self,'target',None): + cls.quiet=True + tsk.outputs=[self.path.find_or_declare(x)for x in self.to_list(self.target)] + if getattr(self,'source',None): + cls.quiet=True + tsk.inputs=[] + for x in self.to_list(self.source): + y=self.path.find_resource(x) + if not y: + raise Utils.WafError('input file %r could not be found (%r)'%(x,self.path.abspath())) + tsk.inputs.append(y) + if getattr(self,'always',None): + Task.always_run(cls) + if getattr(self,'scan',None): + cls.scan=self.scan + if getattr(self,'install_path',None): + tsk.install_path=self.install_path + if getattr(self,'cwd',None): + tsk.cwd=self.cwd + if getattr(self,'on_results',None): + Task.update_outputs(cls) + for x in['after','before']: + setattr(cls,x,getattr(self,x,[])) +feature('*')(exec_rule) +before('apply_core')(exec_rule) +def sequence_order(self): + if self.meths and self.meths[-1]!='sequence_order': + self.meths.append('sequence_order') + return + if getattr(self,'seq_start',None): + return + if getattr(self.bld,'prev',None): + self.bld.prev.post() + for x in self.bld.prev.tasks: + for y in self.tasks: + y.set_run_after(x) + self.bld.prev=self +feature('seq')(sequence_order) + diff -Nru showq-0.4.1+git20090622/wafadmin/Task.py showq-0.4.1+git20090622+dfsg0/wafadmin/Task.py --- showq-0.4.1+git20090622/wafadmin/Task.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Task.py 2009-05-01 23:35:38.000000000 +0000 @@ -0,0 +1,676 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import os,shutil,sys,re,random,datetime +from Utils import md5 +import Build,Runner,Utils,Node,Logs,Options +from Logs import debug,warn,error +from Constants import* +algotype=NORMAL +COMPILE_TEMPLATE_SHELL=''' +def f(task): + env = task.env + wd = getattr(task, 'cwd', None) + p = env.get_flat + cmd = \'\'\' %s \'\'\' % s + return task.exec_command(cmd, cwd=wd) +''' +COMPILE_TEMPLATE_NOSHELL=''' +def f(task): + env = task.env + wd = getattr(task, 'cwd', None) + def to_list(xx): + if isinstance(xx, str): return [xx] + return xx + lst = [] + %s + lst = [x for x in lst if x] + return task.exec_command(lst, cwd=wd) +''' +file_deps=Utils.nada +class TaskManager(object): + def __init__(self): + self.groups=[] + self.tasks_done=[] + self.current_group=0 + self.groups_names={} + def get_next_set(self): + ret=None + while not ret and self.current_group0: + self.set_order(keys[i],keys[j]) + elif val<0: + self.set_order(keys[j],keys[i]) + def tasks_in_parallel(self): + if not self.ready:self.prepare() + keys=self.cstr_groups.keys() + unconnected=[] + remainder=[] + for u in keys: + for k in self.cstr_order.values(): + if u in k: + remainder.append(u) + break + else: + unconnected.append(u) + toreturn=[] + for y in unconnected: + toreturn.extend(self.cstr_groups[y]) + for y in unconnected: + try:self.cstr_order.__delitem__(y) + except KeyError:pass + self.cstr_groups.__delitem__(y) + if not toreturn and remainder: + raise Utils.WafError("circular order constraint detected %r"%remainder) + return toreturn + def tasks_by_max_jobs(self): + if not self.ready:self.prepare() + if not self.temp_tasks:self.temp_tasks=self.tasks_in_parallel() + if not self.temp_tasks:return(None,None) + maxjobs=MAXJOBS + ret=[] + remaining=[] + for t in self.temp_tasks: + m=getattr(t,"maxjobs",getattr(self.__class__,"maxjobs",MAXJOBS)) + if m>maxjobs: + remaining.append(t) + elif m task failed (err #%d): %r"%(self.err_code,self) + except AttributeError: + return" -> task failed: %r"%self + elif self.hasrun==MISSING: + return" -> missing files: %r"%self + else: + return'' + def install(self): + bld=self.generator.bld + d=self.attr('install') + if self.attr('install_path'): + lst=[a.relpath_gen(bld.srcnode)for a in self.outputs] + perm=self.attr('chmod',O644) + if self.attr('src'): + lst+=[a.relpath_gen(bld.srcnode)for a in self.inputs] + if self.attr('filename'): + dir=self.install_path.rstrip(os.sep)+os.sep+self.attr('filename') + bld.install_as(dir,lst[0],self.env,perm) + else: + bld.install_files(self.install_path,lst,self.env,perm) +class Task(TaskBase): + vars=[] + def __init__(self,env,**kw): + TaskBase.__init__(self,**kw) + self.env=env + self.inputs=[] + self.outputs=[] + self.deps_nodes=[] + self.run_after=[] + def __str__(self): + env=self.env + src_str=' '.join([a.nice_path(env)for a in self.inputs]) + tgt_str=' '.join([a.nice_path(env)for a in self.outputs]) + if self.outputs:sep=' -> ' + else:sep='' + return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str) + def __repr__(self): + return"".join(['\n\t{task: ',self.__class__.__name__," ",",".join([x.name for x in self.inputs])," -> ",",".join([x.name for x in self.outputs]),'}']) + def unique_id(self): + try: + return self.uid + except AttributeError: + m=md5() + up=m.update + up(self.__class__.__name__) + up(self.env.variant()) + p=None + for x in self.inputs+self.outputs: + if p!=x.parent.id: + p=x.parent.id + up(x.parent.abspath()) + up(x.name) + self.uid=m.digest() + return self.uid + def set_inputs(self,inp): + if isinstance(inp,list):self.inputs+=inp + else:self.inputs.append(inp) + def set_outputs(self,out): + if isinstance(out,list):self.outputs+=out + else:self.outputs.append(out) + def set_run_after(self,task): + assert isinstance(task,TaskBase) + self.run_after.append(task) + def add_file_dependency(self,filename): + node=self.generator.bld.current.find_resource(filename) + self.deps_nodes.append(node) + def signature(self): + try:return self.cache_sig[0] + except AttributeError:pass + m=md5() + exp_sig=self.sig_explicit_deps() + m.update(exp_sig) + imp_sig=self.scan and self.sig_implicit_deps()or SIG_NIL + m.update(imp_sig) + var_sig=self.sig_vars() + m.update(var_sig) + ret=m.digest() + self.cache_sig=(ret,exp_sig,imp_sig,var_sig) + return ret + def runnable_status(self): + if self.inputs and(not self.outputs): + if not getattr(self.__class__,'quiet',None): + warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r"%self) + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + env=self.env + bld=self.generator.bld + try: + new_sig=self.signature() + except KeyError: + debug("task: something is wrong, computing the task %r signature failed"%self) + return RUN_ME + key=self.unique_id() + try: + prev_sig=bld.task_sigs[key][0] + except KeyError: + debug("task: task %r must run as it was never run before or the task code changed"%self) + return RUN_ME + try: + for node in self.outputs: + variant=node.variant(env) + if bld.node_sigs[variant][node.id]!=new_sig: + return RUN_ME + except KeyError: + debug("task: task %r must run as the output nodes do not exist"%self) + return RUN_ME + if Logs.verbose:self.debug_why(bld.task_sigs[key]) + if new_sig!=prev_sig: + return RUN_ME + return SKIP_ME + def post_run(self): + bld=self.generator.bld + env=self.env + sig=self.signature() + cnt=0 + variant=env.variant() + for node in self.outputs: + try: + os.stat(node.abspath(env)) + except OSError: + self.has_run=MISSING + self.err_msg='-> missing file: %r'%node.abspath(env) + raise Utils.WafError + bld.node_sigs[variant][node.id]=sig + if Options.cache_global: + ssig=sig.encode('hex') + dest=os.path.join(Options.cache_global,'%s_%d_%s'%(ssig,cnt,node.name)) + try:shutil.copy2(node.abspath(env),dest) + except IOError:warn('Could not write the file to the cache') + cnt+=1 + bld.task_sigs[self.unique_id()]=self.cache_sig + def can_retrieve_cache(self): + if not Options.cache_global:return None + if Options.options.nocache:return None + if not self.outputs:return None + env=self.env + sig=self.signature() + cnt=0 + for node in self.outputs: + variant=node.variant(env) + ssig=sig.encode('hex') + orig=os.path.join(Options.cache_global,'%s_%d_%s'%(ssig,cnt,node.name)) + try: + shutil.copy2(orig,node.abspath(env)) + os.utime(orig,None) + except(OSError,IOError): + debug('task: failed retrieving file') + return None + else: + cnt+=1 + for node in self.outputs: + self.generator.bld.node_sigs[variant][node.id]=sig + self.generator.bld.printout('restoring from cache %r\n'%node.bldpath(env)) + return 1 + def debug_why(self,old_sigs): + new_sigs=self.cache_sig + def v(x): + return x.encode('hex') + debug("Task %r"%self) + msgs=['Task must run','* Source file or manual dependency','* Implicit dependency','* Environment variable'] + tmp='task: -> %s: %s %s' + for x in xrange(len(msgs)): + if(new_sigs[x]!=old_sigs[x]): + debug(tmp%(msgs[x],v(old_sigs[x]),v(new_sigs[x]))) + def sig_explicit_deps(self): + bld=self.generator.bld + m=md5() + for x in self.inputs+getattr(self,'dep_nodes',[]): + if not x.parent.id in bld.cache_scanned_folders: + bld.rescan(x.parent) + variant=x.variant(self.env) + m.update(bld.node_sigs[variant][x.id]) + if bld.deps_man: + additional_deps=bld.deps_man + for x in self.inputs+self.outputs: + try: + d=additional_deps[x.id] + except KeyError: + continue + for v in d: + if isinstance(v,Node.Node): + bld.rescan(v.parent) + variant=v.variant(self.env) + try: + v=bld.node_sigs[variant][v.id] + except KeyError: + v='' + elif hasattr(v,'__call__'): + v=v() + m.update(v) + return m.digest() + def sig_vars(self): + m=md5() + bld=self.generator.bld + env=self.env + act_sig=bld.hash_env_vars(env,self.__class__.vars) + m.update(act_sig) + dep_vars=getattr(self,'dep_vars',None) + if dep_vars: + m.update(bld.hash_env_vars(env,dep_vars)) + return m.digest() + scan=None + def sig_implicit_deps(self): + bld=self.generator.bld + key=self.unique_id() + prev_sigs=bld.task_sigs.get(key,()) + if prev_sigs: + try: + if prev_sigs[2]==self.compute_sig_implicit_deps(): + return prev_sigs[2] + except(KeyError,OSError): + pass + (nodes,names)=self.scan() + if Logs.verbose: + debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names))) + bld.node_deps[key]=nodes + bld.raw_deps[key]=names + sig=self.compute_sig_implicit_deps() + return sig + def compute_sig_implicit_deps(self): + m=md5() + upd=m.update + bld=self.generator.bld + tstamp=bld.node_sigs + env=self.env + for k in bld.node_deps.get(self.unique_id(),[]): + if not k.parent.id in bld.cache_scanned_folders: + bld.rescan(k.parent) + if k.id&3==2: + upd(tstamp[0][k.id]) + else: + upd(tstamp[env.variant()][k.id]) + return m.digest() +def funex(c): + dc={} + exec(c,dc) + return dc['f'] +reg_act=re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})",re.M) +def compile_fun_shell(name,line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('backslash'):return'\\\\' + elif g('subst'):extr.append((g('var'),g('code')));return"%s" + return None + line=reg_act.sub(repl,line) + parm=[] + dvars=[] + app=parm.append + for(var,meth)in extr: + if var=='SRC': + if meth:app('task.inputs%s'%meth) + else:app('" ".join([a.srcpath(env) for a in task.inputs])') + elif var=='TGT': + if meth:app('task.outputs%s'%meth) + else:app('" ".join([a.bldpath(env) for a in task.outputs])') + else: + if not var in dvars:dvars.append(var) + app("p('%s')"%var) + if parm:parm="%% (%s) "%(',\n\t\t'.join(parm)) + else:parm='' + c=COMPILE_TEMPLATE_SHELL%(line,parm) + debug('action: %s'%c) + return(funex(c),dvars) +def compile_fun_noshell(name,line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>" + return None + line2=reg_act.sub(repl,line) + params=line2.split('<<|@|>>') + buf=[] + dvars=[] + app=buf.append + for x in xrange(len(extr)): + params[x]=params[x].strip() + if params[x]: + app("lst.extend(%r)"%params[x].split()) + (var,meth)=extr[x] + if var=='SRC': + if meth:app('lst.append(task.inputs%s)'%meth) + else:app("lst.extend([a.srcpath(env) for a in task.inputs])") + elif var=='TGT': + if meth:app('lst.append(task.outputs%s)'%meth) + else:app("lst.extend([a.bldpath(env) for a in task.outputs])") + else: + app('lst.extend(to_list(env[%r]))'%var) + if not var in dvars:dvars.append(var) + if extr: + if params[-1]: + app("lst.extend(%r)"%params[-1].split()) + fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf) + debug('action: %s'%fun) + return(funex(fun),dvars) +def compile_fun(name,line,shell=None): + if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0: + shell=True + if shell is None: + if sys.platform=='win32': + shell=False + else: + shell=True + if shell: + return compile_fun_shell(name,line) + else: + return compile_fun_noshell(name,line) +def simple_task_type(name,line,color='GREEN',vars=[],ext_in=[],ext_out=[],before=[],after=[],shell=None): + (fun,dvars)=compile_fun(name,line,shell) + fun.code=line + return task_type_from_func(name,fun,vars or dvars,color,ext_in,ext_out,before,after) +def task_type_from_func(name,func,vars=[],color='GREEN',ext_in=[],ext_out=[],before=[],after=[]): + params={'run':func,'vars':vars,'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),} + cls=type(Task)(name,(Task,),params) + TaskBase.classes[name]=cls + return cls +def always_run(cls): + old=cls.runnable_status + def always(self): + old(self) + return RUN_ME + cls.runnable_status=always +def update_outputs(cls): + old_post_run=cls.post_run + def post_run(self): + old_post_run(self) + bld=self.outputs[0].__class__.bld + bld.node_sigs[self.env.variant()][self.outputs[0].id]=Utils.h_file(self.outputs[0].abspath(self.env)) + cls.post_run=post_run +def extract_outputs(tasks): + v={} + for x in tasks: + try: + (ins,outs)=v[x.env.variant()] + except KeyError: + ins={} + outs={} + v[x.env.variant()]=(ins,outs) + for a in getattr(x,'inputs',[]): + try:ins[a.id].append(x) + except KeyError:ins[a.id]=[x] + for a in getattr(x,'outputs',[]): + try:outs[a.id].append(x) + except KeyError:outs[a.id]=[x] + for(ins,outs)in v.values(): + links=set(ins.iterkeys()).intersection(outs.iterkeys()) + for k in links: + for a in ins[k]: + for b in outs[k]: + a.set_run_after(b) +def extract_deps(tasks): + extract_outputs(tasks) + out_to_task={} + for x in tasks: + v=x.env.variant() + try: + lst=x.outputs + except AttributeError: + pass + else: + for node in lst: + out_to_task[(v,node.id)]=x + dep_to_task={} + for x in tasks: + try: + x.signature() + except: + pass + variant=x.env.variant() + key=x.unique_id() + for k in x.generator.bld.node_deps.get(x.unique_id(),[]): + try:dep_to_task[(v,k.id)].append(x) + except KeyError:dep_to_task[(v,k.id)]=[x] + deps=set(dep_to_task.keys()).intersection(set(out_to_task.keys())) + for idx in deps: + for k in dep_to_task[idx]: + k.set_run_after(out_to_task[idx]) + for x in tasks: + try: + delattr(x,'cache_sig') + except AttributeError: + pass + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/ar.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/ar.py --- showq-0.4.1+git20090622/wafadmin/Tools/ar.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/ar.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,35 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys +import Task +from Configure import conftest +ar_str='${AR} ${ARFLAGS} ${TGT} ${SRC}' +cls=Task.simple_task_type('ar_link_static',ar_str,color='YELLOW',ext_in='.o',shell=False) +cls.maxjobs=1 +old=cls.run +def wrap(self): + try:os.remove(self.outputs[0].abspath(self.env)) + except OSError:pass + return old(self) +setattr(cls,'run',wrap) +def detect(conf): + comp=conf.environ.get('AR','') + if not comp:comp=conf.env['AR'] + if not comp:comp=conf.find_program('ar',var='AR') + if not comp:return + ranlib=conf.environ.get('RANLIB','') + if not ranlib:ranlib=conf.env['RANLIB'] + if not ranlib:ranlib=conf.find_program('ranlib',var='RANLIB') + if not ranlib:return + v=conf.env + v['AR']=comp + v['ARFLAGS']='rcs' + v['RANLIB']=ranlib + v['RANLIBFLAGS']='' +def find_ar(conf): + v=conf.env + conf.check_tool('ar') + if not v['AR']:conf.fatal('ar is required for static libraries - not found') + +conftest(find_ar) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/bison.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/bison.py --- showq-0.4.1+git20090622/wafadmin/Tools/bison.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/bison.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,17 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import TaskGen +def decide_ext(self,node): + c_ext='.tab.c' + if node.name.endswith('.yc'):c_ext='.tab.cc' + if'-d'in self.env['BISONFLAGS']: + return[c_ext,c_ext.replace('c','h')] + else: + return c_ext +TaskGen.declare_chain(name='bison',rule='cd ${SRC[0].bld_dir(env)} && ${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}',ext_in='.y .yc .yy',decider=decide_ext,before='cc cxx',) +def detect(conf): + bison=conf.find_program('bison',var='BISON',mandatory=True) + v=conf.env + v['BISONFLAGS']='-d' + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/boost.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/boost.py --- showq-0.4.1+git20090622/wafadmin/Tools/boost.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/boost.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,216 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os.path,glob,types,re,sys +import Configure,config_c,Options,Utils,Logs +from Logs import warn +from Configure import conf +boost_code=''' +#include +#include +int main() { std::cout << BOOST_VERSION << std::endl; } +''' +boost_libpath=['/usr/lib','/usr/local/lib','/opt/local/lib','/sw/lib','/lib'] +boost_cpppath=['/usr/include','/usr/local/include','/opt/local/include','/sw/include'] +STATIC_NOSTATIC='nostatic' +STATIC_BOTH='both' +STATIC_ONLYSTATIC='onlystatic' +is_versiontag=re.compile('^\d+_\d+_?\d*$') +is_threadingtag=re.compile('^mt$') +is_abitag=re.compile('^[sgydpn]+$') +is_toolsettag=re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|msvc|qcc|sun|vacpp)\d*$') +def set_options(opt): + opt.add_option('--boost-includes',type='string',default='',dest='boostincludes',help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35') + opt.add_option('--boost-libs',type='string',default='',dest='boostlibs',help='path to the directory where the boost libs are e.g. /usr/local/lib') +def string_to_version(s): + version=s.split('.') + if len(version)<3:return 0 + return int(version[0])*100000+int(version[1])*100+int(version[2]) +def version_string(version): + major=version/100000 + minor=version/100%1000 + minor_minor=version%100 + if minor_minor==0: + return"%d_%d"%(major,minor) + else: + return"%d_%d_%d"%(major,minor,minor_minor) +def libfiles(lib,pattern,lib_paths): + result=[] + for lib_path in lib_paths: + libname=pattern%('boost_'+lib+'*') + result+=glob.glob(lib_path+'/'+libname) + return result +def get_boost_version_number(self,dir): + try: + return self.run_c_code(compiler='cxx',code=boost_code,includes=dir,execute=1,env=self.env.copy(),type='cprogram',compile_mode='cxx',compile_filename='test.cpp') + except Configure.ConfigurationError,e: + return-1 +def set_default(kw,var,val): + if not var in kw: + kw[var]=val +def tags_score(tags,kw): + score=0 + needed_tags={'threading':kw['tag_threading'],'abi':kw['tag_abi'],'toolset':kw['tag_toolset'],'version':kw['tag_version']} + if kw['tag_toolset']is None: + v=kw['env'] + toolset=v['CXX_NAME'] + if v['CXX_VERSION']: + version_no=v['CXX_VERSION'].split('.') + toolset+=version_no[0] + if len(version_no)>1: + toolset+=version_no[1] + needed_tags['toolset']=toolset + found_tags={} + for tag in tags: + if is_versiontag.match(tag):found_tags['version']=tag + if is_threadingtag.match(tag):found_tags['threading']=tag + if is_abitag.match(tag):found_tags['abi']=tag + if is_toolsettag.match(tag):found_tags['toolset']=tag + for tagname in needed_tags.iterkeys(): + if needed_tags[tagname]is not None and tagname in found_tags: + if re.compile(needed_tags[tagname]).match(found_tags[tagname]): + score+=kw['score_'+tagname][0] + else: + score+=kw['score_'+tagname][1] + return score +def validate_boost(self,kw): + ver=kw.get('version','') + for x in'min_version max_version version'.split(): + set_default(kw,x,ver) + set_default(kw,'lib','') + kw['lib']=Utils.to_list(kw['lib']) + set_default(kw,'env',self.env) + set_default(kw,'libpath',boost_libpath) + set_default(kw,'cpppath',boost_cpppath) + for x in'tag_threading tag_version tag_toolset'.split(): + set_default(kw,x,None) + set_default(kw,'tag_abi','^[^d]*$') + set_default(kw,'score_threading',(10,-10)) + set_default(kw,'score_abi',(10,-10)) + set_default(kw,'score_toolset',(1,-1)) + set_default(kw,'score_version',(100,-100)) + set_default(kw,'score_min',0) + set_default(kw,'static',STATIC_NOSTATIC) + set_default(kw,'found_includes',False) + set_default(kw,'min_score',0) + set_default(kw,'errmsg','not found') + set_default(kw,'okmsg','ok') +def find_boost_includes(self,kw): + boostPath=getattr(Options.options,'boostincludes','') + if boostPath: + boostPath=[os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))] + else: + boostPath=Utils.to_list(kw['cpppath']) + min_version=string_to_version(kw.get('min_version','')) + max_version=string_to_version(kw.get('max_version',''))or(sys.maxint-1) + version=0 + for include_path in boostPath: + boost_paths=glob.glob(os.path.join(include_path,'boost*')) + for path in boost_paths: + pathname=os.path.split(path)[-1] + ret=-1 + if pathname=='boost': + path=include_path + ret=self.get_boost_version_number(path) + elif pathname.startswith('boost-'): + ret=self.get_boost_version_number(path) + ret=int(ret) + if ret!=-1 and ret>=min_version and ret<=max_version and ret>version: + boost_path=path + version=ret + if not version: + self.fatal('boost headers not found! (required version min: %s max: %s)'%(kw['min_version'],kw['max_version'])) + return False + found_version=version_string(version) + versiontag='^'+found_version+'$' + if kw['tag_version']is None: + kw['tag_version']=versiontag + elif kw['tag_version']!=versiontag: + warn('boost header version %r and tag_version %r do not match!'%(versiontag,kw['tag_version'])) + env=self.env + env['CPPPATH_BOOST']=boost_path + env['BOOST_VERSION']=found_version + self.found_includes=1 + ret='Version %s (%s)'%(found_version,boost_path) + return ret +def find_boost_library(self,lib,kw): + def find_library_from_list(lib,files): + lib_pattern=re.compile('.*boost_(.*?)\..*') + result=(None,None) + resultscore=kw['min_score']-1 + for file in files: + m=lib_pattern.search(file,1) + if m: + libname=m.group(1) + libtags=libname.split('-')[1:] + currentscore=tags_score(libtags,kw) + if currentscore>resultscore: + result=(libname,file) + resultscore=currentscore + return result + lib_paths=getattr(Options.options,'boostlibs','') + if lib_paths: + lib_paths=[os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))] + else: + lib_paths=Utils.to_list(kw['libpath']) + v=kw.get('env',self.env) + (libname,file)=(None,None) + if kw['static']in[STATIC_NOSTATIC,STATIC_BOTH]: + st_env_prefix='LIB' + files=libfiles(lib,v['shlib_PATTERN'],lib_paths) + (libname,file)=find_library_from_list(lib,files) + if libname is None and kw['static']in[STATIC_ONLYSTATIC,STATIC_BOTH]: + st_env_prefix='STATICLIB' + staticLibPattern=v['staticlib_PATTERN'] + if self.env['CC_NAME']=='msvc': + staticLibPattern='lib'+staticLibPattern + files=libfiles(lib,staticLibPattern,lib_paths) + (libname,file)=find_library_from_list(lib,files) + if libname is not None: + v['LIBPATH_BOOST_'+lib.upper()]=os.path.split(file)[0] + if self.env['CC_NAME']=='msvc'and os.path.splitext(file)[1]=='.lib': + v[st_env_prefix+'_BOOST_'+lib.upper()]='libboost_'+libname + else: + v[st_env_prefix+'_BOOST_'+lib.upper()]='boost_'+libname + return + self.fatal('lib boost_'+lib+' not found!') +def check_boost(self,*k,**kw): + self.validate_boost(kw) + ret=None + try: + if not kw.get('found_includes',None): + self.check_message_1(kw.get('msg_includes','boost headers')) + ret=self.find_boost_includes(kw) + except Configure.ConfigurationError,e: + if'errmsg'in kw: + self.check_message_2(kw['errmsg'],'YELLOW') + if'mandatory'in kw: + if Logs.verbose>1: + raise + else: + self.fatal('the configuration failed (see %r)'%self.log.name) + else: + if'okmsg'in kw: + self.check_message_2(kw.get('okmsg_includes',ret)) + for lib in kw['lib']: + self.check_message_1('library boost_'+lib) + try: + self.find_boost_library(lib,kw) + except Configure.ConfigurationError,e: + if'errmsg'in kw: + self.check_message_2(kw['errmsg'],'YELLOW') + if'mandatory'in kw: + if Logs.verbose>1: + raise + else: + self.fatal('the configuration failed (see %r)'%self.log.name) + else: + if'okmsg'in kw: + self.check_message_2(kw['okmsg']) + return ret + +conf(get_boost_version_number) +conf(validate_boost) +conf(find_boost_includes) +conf(find_boost_library) +conf(check_boost) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/cc.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/cc.py --- showq-0.4.1+git20090622/wafadmin/Tools/cc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/cc.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,70 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import os +import TaskGen,Build,Utils,Task +from Logs import debug +import ccroot +from TaskGen import feature,before,extension,after +g_cc_flag_vars=['CCDEPS','FRAMEWORK','FRAMEWORKPATH','STATICLIB','LIB','LIBPATH','LINKFLAGS','RPATH','CCFLAGS','CPPPATH','CPPFLAGS','CCDEFINES'] +EXT_CC=['.c'] +g_cc_type_vars=['CCFLAGS','LINKFLAGS'] +class cc_taskgen(ccroot.ccroot_abstract): + pass +def init_cc(self): + self.p_flag_vars=set(self.p_flag_vars).union(g_cc_flag_vars) + self.p_type_vars=set(self.p_type_vars).union(g_cc_type_vars) + if not self.env['CC_NAME']: + raise Utils.WafError("At least one compiler (gcc, ..) must be selected") +def apply_obj_vars_cc(self): + env=self.env + app=env.append_unique + cpppath_st=env['CPPPATH_ST'] + for i in env['INC_PATHS']: + app('_CCINCFLAGS',cpppath_st%i.bldpath(env)) + app('_CCINCFLAGS',cpppath_st%i.srcpath(env)) + for i in env['CPPPATH']: + app('_CCINCFLAGS',cpppath_st%i) +def apply_defines_cc(self): + self.defines=getattr(self,'defines',[]) + lst=self.to_list(self.defines)+self.to_list(self.env['CCDEFINES']) + milst=[] + for defi in lst: + if not defi in milst: + milst.append(defi) + libs=self.to_list(self.uselib) + for l in libs: + val=self.env['CCDEFINES_'+l] + if val:milst+=val + self.env['DEFLINES']=["%s %s"%(x[0],Utils.trimquotes('='.join(x[1:])))for x in[y.split('=')for y in milst]] + y=self.env['CCDEFINES_ST'] + self.env['_CCDEFFLAGS']=[y%x for x in milst] +def c_hook(self,node): + task=self.create_task('cc') + if getattr(self,'obj_ext',None): + obj_ext=self.obj_ext + else: + obj_ext='_%d.o'%self.idx + task.inputs=[node] + task.outputs=[node.change_ext(obj_ext)] + self.compiled_tasks.append(task) + return task +cc_str='${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}' +cls=Task.simple_task_type('cc',cc_str,'GREEN',ext_out='.o',ext_in='.c',shell=False) +cls.scan=ccroot.scan +cls.vars.append('CCDEPS') +link_str='${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT} ${LINKFLAGS}' +cls=Task.simple_task_type('cc_link',link_str,color='YELLOW',ext_in='.o',shell=False) +cls.maxjobs=1 +cls2=Task.task_type_from_func('vnum_cc_link',ccroot.link_vnum,cls.vars,color='CYAN',ext_in='.o') +cls2.maxjobs=1 + +feature('cc')(init_cc) +before('apply_type_vars')(init_cc) +after('default_cc')(init_cc) +feature('cc')(apply_obj_vars_cc) +after('apply_incpaths')(apply_obj_vars_cc) +feature('cc')(apply_defines_cc) +after('apply_lib_vars')(apply_defines_cc) +extension(EXT_CC)(c_hook) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/ccroot.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/ccroot.py --- showq-0.4.1+git20090622/wafadmin/Tools/ccroot.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/ccroot.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,354 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,re +import TaskGen,Task,Utils,preproc,Logs,Build,Options +from Logs import error,debug,warn +from Utils import md5 +from TaskGen import taskgen,after,before,feature +from Constants import* +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO +import config_c +USE_TOP_LEVEL=False +def get_cc_version(conf,cc,gcc=False,icc=False): + cmd=cc+['-dM','-E','-'] + try: + p=Utils.pproc.Popen(cmd,stdin=Utils.pproc.PIPE,stdout=Utils.pproc.PIPE,stderr=Utils.pproc.PIPE) + p.stdin.write('\n') + out=p.communicate()[0] + except: + conf.fatal('could not determine the compiler version %r'%cmd) + out=str(out) + if gcc: + if out.find('__INTEL_COMPILER')>=0: + conf.fatal('The intel compiler pretends to be gcc') + if out.find('__GNUC__')<0: + conf.fatal('Could not determine the compiler type') + if icc and out.find('__INTEL_COMPILER')<0: + conf.fatal('Not icc/icpc') + k={} + if icc or gcc: + out=out.split('\n') + import shlex + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) + return k +class DEBUG_LEVELS: + ULTRADEBUG="ultradebug" + DEBUG="debug" + RELEASE="release" + OPTIMIZED="optimized" + CUSTOM="custom" + ALL=[ULTRADEBUG,DEBUG,RELEASE,OPTIMIZED,CUSTOM] +def scan(self): + debug('ccroot: _scan_preprocessor(self, node, env, path_lst)') + if len(self.inputs)==1: + node=self.inputs[0] + (nodes,names)=preproc.get_deps(node,self.env,nodepaths=self.env['INC_PATHS']) + if Logs.verbose: + debug('deps: deps for %s: %r; unresolved %r'%(str(node),nodes,names)) + return(nodes,names) + all_nodes=[] + all_names=[] + seen=[] + for node in self.inputs: + (nodes,names)=preproc.get_deps(node,self.env,nodepaths=self.env['INC_PATHS']) + if Logs.verbose: + debug('deps: deps for %s: %r; unresolved %r'%(str(node),nodes,names)) + for x in nodes: + if id(x)in seen:continue + seen.append(id(x)) + all_nodes.append(x) + for x in names: + if not x in all_names: + all_names.append(x) + return(all_nodes,all_names) +class ccroot_abstract(TaskGen.task_gen): + def __init__(self,*k,**kw): + if len(k)>1: + k=list(k) + if k[1][0]!='c': + k[1]='c'+k[1] + TaskGen.task_gen.__init__(self,*k,**kw) +def get_target_name(self): + tp='program' + for x in self.features: + if x in['cshlib','cstaticlib']: + tp=x.lstrip('c') + pattern=self.env[tp+'_PATTERN'] + if not pattern:pattern='%s' + dir,name=os.path.split(self.target) + return os.path.join(dir,pattern%name) +def install_shlib(self): + nums=self.vnum.split('.') + path=self.install_path + if not path:return + libname=self.outputs[0].name + name3=libname+'.'+self.vnum + name2=libname+'.'+nums[0] + name1=libname + filename=self.outputs[0].abspath(self.env) + bld=self.outputs[0].__class__.bld + bld.install_as(os.path.join(path,name3),filename,env=self.env) + bld.symlink_as(os.path.join(path,name2),name3) + bld.symlink_as(os.path.join(path,name1),name3) +def default_cc(self): + Utils.def_attrs(self,includes='',defines='',rpaths='',uselib='',uselib_local='',add_objects='',p_flag_vars=[],p_type_vars=[],compiled_tasks=[],link_task=None) +def apply_verif(self): + if not(self.source or getattr(self,'add_objects',None)): + raise Utils.WafError('no source files specified for %s'%self) + if not self.target: + raise Utils.WafError('no target for %s'%self) +def vars_target_cprogram(self): + self.default_install_path=self.env['BINDIR']or'${PREFIX}/bin' + self.default_chmod=O755 +def vars_target_cstaticlib(self): + self.default_install_path=self.env['LIBDIR']or'${PREFIX}/lib${LIB_EXT}' + if sys.platform in['win32','cygwin']: + self.default_chmod=O755 +def install_target_cstaticlib(self): + if not self.bld.is_install:return + self.link_task.install_path=self.install_path +def install_target_cshlib(self): + if getattr(self,'vnum','')and sys.platform!='win32': + tsk=self.link_task + tsk.vnum=self.vnum + tsk.install=install_shlib +def apply_incpaths(self): + lst=[] + for lib in self.to_list(self.uselib): + for path in self.env['CPPPATH_'+lib]: + if not path in lst: + lst.append(path) + if preproc.go_absolute: + for path in preproc.standard_includes: + if not path in lst: + lst.append(path) + for path in self.to_list(self.includes): + if not path in lst: + if preproc.go_absolute or not os.path.isabs(path): + lst.append(path) + else: + self.env.prepend_value('CPPPATH',path) + for path in lst: + node=None + if os.path.isabs(path): + if preproc.go_absolute: + node=self.bld.root.find_dir(path) + elif path[0]=='#': + node=self.bld.srcnode + if len(path)>1: + node=node.find_dir(path[1:]) + else: + node=self.path.find_dir(path) + if node: + self.env.append_value('INC_PATHS',node) + if USE_TOP_LEVEL: + self.env.append_value('INC_PATHS',self.bld.srcnode) +def apply_type_vars(self): + for x in self.features: + if not x in['cprogram','cstaticlib','cshlib']: + continue + x=x.lstrip('c') + st=self.env[x+'_USELIB'] + if st:self.uselib=self.uselib+' '+st + for var in self.p_type_vars: + compvar='%s_%s'%(x,var) + value=self.env[compvar] + if value:self.env.append_value(var,value) +def apply_link(self): + link=getattr(self,'link',None) + if not link: + if'cstaticlib'in self.features:link='ar_link_static' + elif'cxx'in self.features:link='cxx_link' + else:link='cc_link' + if'cshlib'in self.features and getattr(self,'vnum',None): + if sys.platform=='darwin'or sys.platform=='win32': + self.vnum='' + else: + link='vnum_'+link + tsk=self.create_task(link) + outputs=[t.outputs[0]for t in self.compiled_tasks] + tsk.set_inputs(outputs) + tsk.set_outputs(self.path.find_or_declare(get_target_name(self))) + tsk.chmod=self.chmod + self.link_task=tsk +def apply_lib_vars(self): + env=self.env + uselib=self.to_list(self.uselib) + seen=[] + names=self.to_list(self.uselib_local)[:] + while names: + x=names.pop(0) + if x in seen: + continue + y=self.name_to_obj(x) + if not y: + raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')"%(x,self.name)) + if getattr(y,'uselib_local',None): + lst=y.to_list(y.uselib_local) + for u in lst: + if not u in seen: + names.append(u) + y.post() + seen.append(x) + libname=y.target[y.target.rfind(os.sep)+1:] + if'cshlib'in y.features or'cprogram'in y.features: + env.append_value('LIB',libname) + elif'cstaticlib'in y.features: + env.append_value('STATICLIB',libname) + if y.link_task is not None: + self.link_task.set_run_after(y.link_task) + dep_nodes=getattr(self.link_task,'dep_nodes',[]) + self.link_task.dep_nodes=dep_nodes+y.link_task.outputs + tmp_path=y.link_task.outputs[0].parent.bldpath(self.env) + if not tmp_path in env['LIBPATH']:env.prepend_value('LIBPATH',tmp_path) + morelibs=y.to_list(y.uselib) + for v in morelibs: + if v in uselib:continue + uselib=[v]+uselib + if getattr(y,'export_incdirs',None): + cpppath_st=self.env['CPPPATH_ST'] + for x in self.to_list(y.export_incdirs): + node=y.path.find_dir(x) + if not node: + raise Utils.WafError('object %s: invalid folder %s in export_incdirs'%(y.target,x)) + self.env.append_unique('INC_PATHS',node) + for x in uselib: + for v in self.p_flag_vars: + val=self.env[v+'_'+x] + if val:self.env.append_value(v,val) +def apply_objdeps(self): + if not getattr(self,'add_objects',None):return + seen=[] + names=self.to_list(self.add_objects) + while names: + x=names[0] + if x in seen: + names=names[1:] + continue + y=self.name_to_obj(x) + if not y: + raise Utils.WafError("object '%s' was not found in uselib_local (required by add_objects '%s')"%(x,self.name)) + if getattr(y,'add_objects',None): + added=0 + lst=y.to_list(y.add_objects) + lst.reverse() + for u in lst: + if u in seen:continue + added=1 + names=[u]+names + if added:continue + y.post() + seen.append(x) + for t in y.compiled_tasks: + self.link_task.inputs.extend(t.outputs) +def apply_obj_vars(self): + v=self.env + lib_st=v['LIB_ST'] + staticlib_st=v['STATICLIB_ST'] + libpath_st=v['LIBPATH_ST'] + staticlibpath_st=v['STATICLIBPATH_ST'] + rpath_st=v['RPATH_ST'] + app=v.append_unique + if v['FULLSTATIC']: + v.append_value('LINKFLAGS',v['FULLSTATIC_MARKER']) + for i in v['RPATH']: + if i and rpath_st: + app('LINKFLAGS',rpath_st%i) + for i in v['LIBPATH']: + app('LINKFLAGS',libpath_st%i) + app('LINKFLAGS',staticlibpath_st%i) + if v['STATICLIB']: + v.append_value('LINKFLAGS',v['STATICLIB_MARKER']) + k=[(staticlib_st%i)for i in v['STATICLIB']] + app('LINKFLAGS',k) + if not v['FULLSTATIC']: + if v['STATICLIB']or v['LIB']: + v.append_value('LINKFLAGS',v['SHLIB_MARKER']) + app('LINKFLAGS',[lib_st%i for i in v['LIB']]) +def apply_vnum(self): + if sys.platform!='darwin'and sys.platform!='win32': + try: + nums=self.vnum.split('.') + except AttributeError: + pass + else: + try:name3=self.soname + except AttributeError:name3=self.link_task.outputs[0].name+'.'+nums[0] + self.link_task.outputs.append(self.link_task.outputs[0].parent.find_or_declare(name3)) + self.env.append_value('LINKFLAGS',(self.env['SONAME_ST']%name3).split()) +def process_obj_files(self): + if not hasattr(self,'obj_files'):return + for x in self.obj_files: + node=self.path.find_resource(x) + self.link_task.inputs.append(node) +def add_obj_file(self,file): + if not hasattr(self,'obj_files'):self.obj_files=[] + if not'process_obj_files'in self.meths:self.meths.append('process_obj_files') + self.obj_files.append(file) +c_attrs={'cxxflag':'CXXFLAGS','cflag':'CCFLAGS','ccflag':'CCFLAGS','linkflag':'LINKFLAGS','ldflag':'LINKFLAGS','lib':'LIB','libpath':'LIBPATH','staticlib':'STATICLIB','staticlibpath':'STATICLIBPATH','rpath':'RPATH','framework':'FRAMEWORK','frameworkpath':'FRAMEWORKPATH'} +def add_extra_flags(self): + for x in self.__dict__.keys(): + y=x.lower() + if y[-1]=='s': + y=y[:-1] + if c_attrs.get(y,None): + self.env.append_unique(c_attrs[y],getattr(self,x)) +def link_vnum(self): + clsname=self.__class__.__name__.replace('vnum_','') + out=self.outputs + self.outputs=out[1:] + ret=Task.TaskBase.classes[clsname].__dict__['run'](self) + self.outputs=out + if ret: + return ret + try: + os.remove(self.outputs[0].abspath(self.env)) + except OSError: + pass + try: + os.symlink(self.outputs[1].name,self.outputs[0].bldpath(self.env)) + except: + return 1 + +feature('cc','cxx')(default_cc) +before('apply_core')(default_cc) +feature('cprogram','dprogram','cstaticlib','dstaticlib','cshlib','dshlib')(apply_verif) +feature('cprogram','dprogram')(vars_target_cprogram) +before('apply_core')(vars_target_cprogram) +feature('cstaticlib','dstaticlib','cshlib','dshlib')(vars_target_cstaticlib) +before('apply_core')(vars_target_cstaticlib) +feature('cprogram','dprogram','cstaticlib','dstaticlib','cshlib','dshlib')(install_target_cstaticlib) +after('apply_objdeps','apply_link')(install_target_cstaticlib) +feature('cshlib','dshlib')(install_target_cshlib) +after('apply_link')(install_target_cshlib) +feature('cc','cxx')(apply_incpaths) +after('apply_type_vars','apply_lib_vars','apply_core')(apply_incpaths) +feature('cc','cxx')(apply_type_vars) +after('init_cc','init_cxx')(apply_type_vars) +before('apply_lib_vars')(apply_type_vars) +feature('cprogram','cshlib','cstaticlib')(apply_link) +after('apply_core')(apply_link) +feature('cc','cxx')(apply_lib_vars) +after('apply_link','init_cc','init_cxx')(apply_lib_vars) +feature('cprogram','cstaticlib','cshlib')(apply_objdeps) +after('apply_obj_vars','apply_vnum','apply_link')(apply_objdeps) +feature('cprogram','cshlib','cstaticlib')(apply_obj_vars) +after('apply_lib_vars')(apply_obj_vars) +feature('cshlib')(apply_vnum) +after('apply_link')(apply_vnum) +before('apply_lib_vars')(apply_vnum) +after('apply_link')(process_obj_files) +taskgen(add_obj_file) +feature('cc','cxx')(add_extra_flags) +before('init_cxx','init_cc')(add_extra_flags) +before('apply_lib_vars','apply_obj_vars','apply_incpaths','init_cc')(add_extra_flags) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/compiler_cc.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/compiler_cc.py --- showq-0.4.1+git20090622/wafadmin/Tools/compiler_cc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/compiler_cc.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,imp,types,ccroot +import optparse +import Utils,Configure,Options +c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix5':['gcc'],'linux':['gcc','icc','suncc'],'sunos':['gcc','suncc'],'irix':['gcc'],'hpux':['gcc'],'default':['gcc']} +def __list_possible_compiler(platform): + try: + return c_compiler[platform] + except KeyError: + return c_compiler["default"] +def detect(conf): + try:test_for_compiler=Options.options.check_c_compiler + except AttributeError:conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')") + for c_compiler in test_for_compiler.split(): + try: + conf.check_tool(c_compiler) + except Configure.ConfigurationError: + pass + else: + if conf.env['CC']: + conf.check_message("%s"%c_compiler,'',True) + conf.env["COMPILER_CC"]="%s"%c_compiler + return + conf.check_message("%s"%c_compiler,'',False) + break + conf.env["COMPILER_CC"]=None +def set_options(opt): + detected_platform=Options.platform + possible_compiler_list=__list_possible_compiler(detected_platform) + test_for_compiler=str(" ").join(possible_compiler_list) + cc_compiler_opts=opt.add_option_group("C Compiler Options") + cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_c_compiler") + for c_compiler in test_for_compiler.split(): + opt.tool_options('%s'%c_compiler,option_group=cc_compiler_opts) + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/compiler_cxx.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/compiler_cxx.py --- showq-0.4.1+git20090622/wafadmin/Tools/compiler_cxx.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/compiler_cxx.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,imp,types,ccroot +import optparse +import Utils,Configure,Options +cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix5':['g++'],'linux':['g++','icpc','sunc++'],'sunos':['g++','sunc++'],'irix':['g++'],'hpux':['g++'],'default':['g++']} +def __list_possible_compiler(platform): + try: + return(cxx_compiler[platform]) + except KeyError: + return(cxx_compiler["default"]) +def detect(conf): + try:test_for_compiler=Options.options.check_cxx_compiler + except AttributeError:raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')") + for cxx_compiler in test_for_compiler.split(): + try: + conf.check_tool(cxx_compiler) + except Configure.ConfigurationError: + pass + else: + if conf.env['CXX']: + conf.check_message("%s"%cxx_compiler,'',True) + conf.env["COMPILER_CXX"]="%s"%cxx_compiler + return + conf.check_message("%s"%cxx_compiler,'',False) + break + conf.env["COMPILER_CXX"]=None +def set_options(opt): + detected_platform=Options.platform + possible_compiler_list=__list_possible_compiler(detected_platform) + test_for_compiler=str(" ").join(possible_compiler_list) + cxx_compiler_opts=opt.add_option_group("C++ Compiler Options") + cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_cxx_compiler") + for cxx_compiler in test_for_compiler.split(): + opt.tool_options('%s'%cxx_compiler,option_group=cxx_compiler_opts) + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/compiler_d.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/compiler_d.py --- showq-0.4.1+git20090622/wafadmin/Tools/compiler_d.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/compiler_d.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,23 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,imp,types +import Utils,Configure,Options +def detect(conf): + if getattr(Options.options,'check_dmd_first',None): + test_for_compiler=['dmd','gdc'] + else: + test_for_compiler=['gdc','dmd'] + for d_compiler in test_for_compiler: + conf.check_tool(d_compiler) + if conf.env['D_COMPILER']: + conf.check_message("%s"%d_compiler,'',True) + conf.env["COMPILER_D"]=d_compiler + return + conf.check_message("%s"%d_compiler,'',False) +def set_options(opt): + d_compiler_opts=opt.add_option_group("D Compiler Options") + d_compiler_opts.add_option('--check-dmd-first',action="store_true",help='checks for the gdc compiler before dmd (default is the other way round)',dest='check_dmd_first',default=False) + for d_compiler in['gdc','dmd']: + opt.tool_options('%s'%d_compiler,option_group=d_compiler_opts) + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/config_c.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/config_c.py --- showq-0.4.1+git20090622/wafadmin/Tools/config_c.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/config_c.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,469 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import os,imp,sys,shlex,shutil +from Utils import md5 +import Build,Utils,Configure,Task,Options,Logs,TaskGen +from Constants import* +from Configure import conf,conftest +cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',} +SNIP1=''' + int main() { + void *p; + p=(void*)(%s); + return 0; +} +''' +SNIP2=''' +int main() { + if ((%(type_name)s *) 0) return 0; + if (sizeof (%(type_name)s)) return 0; +} +''' +SNIP3=''' +int main() { + return 0; +} +''' +def parse_flags(line,uselib,env): + lst=shlex.split(line) + while lst: + x=lst.pop(0) + st=x[:2] + ot=x[2:] + if st=='-I'or st=='/I': + if not ot:ot=lst.pop(0) + env.append_unique('CPPPATH_'+uselib,ot) + elif st=='-D': + if not ot:ot=lst.pop(0) + env.append_unique('CXXDEFINES_'+uselib,ot) + env.append_unique('CCDEFINES_'+uselib,ot) + elif st=='-l': + if not ot:ot=lst.pop(0) + env.append_unique('LIB_'+uselib,ot) + elif st=='-L': + if not ot:ot=lst.pop(0) + env.append_unique('LIBPATH_'+uselib,ot) + elif x=='-pthread'or x.startswith('+'): + env.append_unique('CCFLAGS_'+uselib,x) + env.append_unique('CXXFLAGS_'+uselib,x) + env.append_unique('LINKFLAGS_'+uselib,x) + elif x.startswith('-std'): + env.append_unique('CCFLAGS_'+uselib,x) + env.append_unique('LINKFLAGS_'+uselib,x) + elif x.startswith('-Wl'): + env.append_unique('LINKFLAGS_'+uselib,x) + elif x.startswith('-m'): + env.append_unique('CCFLAGS_'+uselib,x) + env.append_unique('CXXFLAGS_'+uselib,x) +def ret_msg(self,f,kw): + if isinstance(f,str): + return f + return f(kw) +def validate_cfg(self,kw): + if not'path'in kw: + kw['path']='pkg-config --errors-to-stdout --print-errors' + if'atleast_pkgconfig_version'in kw: + if not'msg'in kw: + kw['msg']='Checking for pkg-config version >= %s'%kw['atleast_pkgconfig_version'] + return + if'modversion'in kw: + return + for x in cfg_ver.keys(): + y=x.replace('-','_') + if y in kw: + if not'package'in kw: + raise ValueError('%s requires a package'%x) + if not'msg'in kw: + kw['msg']='Checking for %s %s %s'%(kw['package'],cfg_ver[x],kw[y]) + return + if not'msg'in kw: + kw['msg']='Checking for %s'%kw['package'] + if not'okmsg'in kw: + kw['okmsg']='ok' + if not'errmsg'in kw: + kw['errmsg']='not found' +def cmd_and_log(self,cmd,kw): + Logs.debug('runner: %s\n'%cmd) + if self.log:self.log.write('%s\n'%cmd) + try: + p=Utils.pproc.Popen(cmd,stdout=Utils.pproc.PIPE,shell=True) + output=p.communicate()[0] + except WindowsError: + self.fatal('fail') + if p.returncode: + if not kw.get('errmsg',''): + if kw.get('mandatory',False): + kw['errmsg']=output.strip() + else: + kw['errmsg']='fail' + self.fatal('fail') + return output +def exec_cfg(self,kw): + if'atleast_pkgconfig_version'in kw: + cmd='%s --atleast-pkgconfig-version=%s'%(kw['path'],kw['atleast_pkgconfig_version']) + self.cmd_and_log(cmd,kw) + if not'okmsg'in kw: + kw['okmsg']='ok' + return + for x in cfg_ver: + y=x.replace('-','_') + if y in kw: + self.cmd_and_log('%s --%s=%s %s'%(kw['path'],x,kw[y],kw['package']),kw) + if not'okmsg'in kw: + kw['okmsg']='ok' + self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0) + break + if'modversion'in kw: + version=self.cmd_and_log('%s --modversion %s'%(kw['path'],kw['modversion']),kw).strip() + self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version) + return version + lst=[kw['path']] + for key,val in kw.get('define_variable',{}).iteritems(): + lst.append('--define-variable=%s=%s'%(key,val)) + lst.append(kw.get('args','')) + lst.append(kw['package']) + cmd=' '.join(lst) + ret=self.cmd_and_log(cmd,kw) + if not'okmsg'in kw: + kw['okmsg']='ok' + self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0) + parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env)) + return ret +def check_cfg(self,*k,**kw): + self.validate_cfg(kw) + if'msg'in kw: + self.check_message_1(kw['msg']) + ret=None + try: + ret=self.exec_cfg(kw) + except Configure.ConfigurationError,e: + if'errmsg'in kw: + self.check_message_2(kw['errmsg'],'YELLOW') + if'mandatory'in kw and kw['mandatory']: + if Logs.verbose>1: + raise + else: + self.fatal('the configuration failed (see %r)'%self.log.name) + else: + kw['success']=ret + if'okmsg'in kw: + self.check_message_2(self.ret_msg(kw['okmsg'],kw)) + return ret +def validate_c(self,kw): + if not'env'in kw: + kw['env']=self.env.copy() + env=kw['env'] + if not'compiler'in kw: + kw['compiler']='cc' + if env['CXX_NAME']and Task.TaskBase.classes.get('cxx',None): + kw['compiler']='cxx' + if not'type'in kw: + kw['type']='cprogram' + assert not(kw['type']!='cprogram'and kw.get('execute',0)),'can only execute programs' + def to_header(dct): + if'header_name'in dct: + dct=Utils.to_list(dct['header_name']) + return''.join(['#include <%s>\n'%x for x in dct]) + return'' + if not'compile_mode'in kw: + kw['compile_mode']=(kw['compiler']=='cxx')and'cxx'or'cc' + if not'compile_filename'in kw: + kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'') + if'framework_name'in kw: + try:TaskGen.task_gen.create_task_macapp + except AttributeError:self.fatal('frameworks require the osx tool') + fwkname=kw['framework_name'] + if not'uselib_store'in kw: + kw['uselib_store']=fwkname.upper() + if not kw.get('no_header',False): + if not'header_name'in kw: + kw['header_name']=[] + fwk='%s/%s.h'%(fwkname,fwkname) + if kw.get('remove_dot_h',None): + fwk=fwk[:-2] + kw['header_name']=Utils.to_list(kw['header_name'])+[fwk] + kw['msg']='Checking for framework %s'%fwkname + kw['framework']=fwkname + if'function_name'in kw: + fu=kw['function_name'] + if not'msg'in kw: + kw['msg']='Checking for function %s'%fu + kw['code']=to_header(kw)+SNIP1%fu + if not'uselib_store'in kw: + kw['uselib_store']=fu.upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(fu) + elif'type_name'in kw: + tu=kw['type_name'] + if not'msg'in kw: + kw['msg']='Checking for type %s'%tu + if not'header_name'in kw: + kw['header_name']='stdint.h' + kw['code']=to_header(kw)+SNIP2%{'type_name':tu} + if not'define_name'in kw: + kw['define_name']=self.have_define(tu.upper()) + elif'header_name'in kw: + if not'msg'in kw: + kw['msg']='Checking for header %s'%kw['header_name'] + l=Utils.to_list(kw['header_name']) + assert len(l)>0,'list of headers in header_name is empty' + kw['code']=to_header(kw)+SNIP3 + if not'uselib_store'in kw: + kw['uselib_store']=l[0].upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(l[0]) + if'lib'in kw: + if not'msg'in kw: + kw['msg']='Checking for library %s'%kw['lib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['lib'].upper() + if'staticlib'in kw: + if not'msg'in kw: + kw['msg']='Checking for static library %s'%kw['staticlib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['staticlib'].upper() + if'fragment'in kw: + kw['code']=kw['fragment'] + if not'msg'in kw: + kw['msg']='Checking for custom code' + if not'errmsg'in kw: + kw['errmsg']='fail' + for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]: + if flagsname in kw: + if not'msg'in kw: + kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname]) + if not'errmsg'in kw: + kw['errmsg']='fail' + if not'execute'in kw: + kw['execute']=False + if not'errmsg'in kw: + kw['errmsg']='not found' + if not'okmsg'in kw: + kw['okmsg']='ok' + if not'code'in kw: + kw['code']=SNIP3 + if not kw.get('success'):kw['success']=None + assert'msg'in kw,'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c' +def post_check(self,*k,**kw): + is_success=0 + if kw['execute']: + if kw['success']: + is_success=kw['success'] + else: + is_success=(kw['success']==0) + def define_or_stuff(): + nm=kw['define_name'] + if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str): + self.define(kw['define_name'],is_success) + else: + self.define_cond(kw['define_name'],is_success) + if'define_name'in kw: + if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw: + define_or_stuff() + if is_success and'uselib_store'in kw: + import cc,cxx + for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars): + lk=k.lower() + if k=='CPPPATH':lk='includes' + if k=='CXXDEFINES':lk='defines' + if k=='CCDEFINES':lk='defines' + if lk in kw: + val=kw[lk] + if isinstance(val,str): + val=val.rstrip(os.path.sep) + self.env.append_unique(k+'_'+kw['uselib_store'],val) +def check(self,*k,**kw): + self.validate_c(kw) + self.check_message_1(kw['msg']) + ret=None + try: + ret=self.run_c_code(*k,**kw) + except Configure.ConfigurationError,e: + self.check_message_2(kw['errmsg'],'YELLOW') + if'mandatory'in kw and kw['mandatory']: + if Logs.verbose>1: + raise + else: + self.fatal('the configuration failed (see %r)'%self.log.name) + else: + kw['success']=ret + self.check_message_2(self.ret_msg(kw['okmsg'],kw)) + self.post_check(*k,**kw) + if not kw.get('execute',False): + return ret==0 + return ret +def run_c_code(self,*k,**kw): + test_f_name=kw['compile_filename'] + dir=os.path.join(self.blddir,'.wscript-trybuild') + try: + shutil.rmtree(dir) + except OSError: + pass + os.makedirs(dir) + bdir=os.path.join(dir,'testbuild') + if not os.path.exists(bdir): + os.makedirs(bdir) + env=kw['env'] + dest=open(os.path.join(dir,test_f_name),'w') + dest.write(kw['code']) + dest.close() + back=os.path.abspath('.') + bld=Build.BuildContext() + bld.log=self.log + bld.all_envs.update(self.all_envs) + bld.all_envs['default']=env + bld.lst_variants=bld.all_envs.keys() + bld.load_dirs(dir,bdir) + os.chdir(dir) + bld.rescan(bld.srcnode) + o=bld.new_task_gen(features=[kw['compile_mode'],kw['type']],source=test_f_name,target='testprog') + for k,v in kw.iteritems(): + setattr(o,k,v) + self.log.write("==>\n%s\n<==\n"%kw['code']) + try: + bld.compile() + except Utils.WafError: + ret=Utils.ex_stack() + else: + ret=0 + os.chdir(back) + if ret: + self.log.write('command returned %r'%ret) + self.fatal(str(ret)) + if kw['execute']: + lastprog=o.link_task.outputs[0].abspath(env) + if kw['execute']: + args=Utils.to_list(kw.get('exec_args',[])) + try: + data=Utils.cmd_output([lastprog]+args).strip() + except ValueError,e: + self.fatal(Utils.ex_stack()) + ret=data + return ret +def check_cxx(self,*k,**kw): + kw['compiler']='cxx' + return self.check(*k,**kw) +def check_cc(self,*k,**kw): + kw['compiler']='cc' + return self.check(*k,**kw) +def define(self,define,value,quote=1): + assert define and isinstance(define,str) + tbl=self.env[DEFINES]or Utils.ordered_dict() + if isinstance(value,str): + if quote==1: + tbl[define]='"%s"'%str(value) + else: + tbl[define]=value + elif isinstance(value,int): + tbl[define]=value + else: + raise TypeError('define %r -> %r must be a string or an int'%(define,value)) + self.env[DEFINES]=tbl + self.env[define]=value +def undefine(self,define): + assert define and isinstance(define,str) + tbl=self.env[DEFINES]or Utils.ordered_dict() + value=UNDEFINED + tbl[define]=value + self.env[DEFINES]=tbl + self.env[define]=value +def define_cond(self,name,value): + if value: + self.define(name,1) + else: + self.undefine(name) +def is_defined(self,key): + defines=self.env[DEFINES] + if not defines: + return False + try: + value=defines[key] + except KeyError: + return False + else: + return value!=UNDEFINED +def get_define(self,define): + try:return self.env[DEFINES][define] + except KeyError:return None +def have_define(self,name): + return self.__dict__.get('HAVE_PAT','HAVE_%s')%Utils.quote_define_name(name) +def write_config_header(self,configfile='',env='',guard='',top=False): + if not configfile:configfile=WAF_CONFIG_H + waf_guard=guard or'_%s_WAF'%Utils.quote_define_name(configfile) + if not env:env=self.env + if top: + diff='' + else: + diff=Utils.diff_path(self.srcdir,self.curdir) + full=os.sep.join([self.blddir,env.variant(),diff,configfile]) + full=os.path.normpath(full) + (dir,base)=os.path.split(full) + try:os.makedirs(dir) + except:pass + dest=open(full,'w') + dest.write('/* Configuration header created by Waf - do not edit */\n') + dest.write('#ifndef %s\n#define %s\n\n'%(waf_guard,waf_guard)) + dest.write(self.get_config_header()) + env.append_value(CFG_FILES,os.path.join(diff,configfile)) + dest.write('\n#endif /* %s */\n'%waf_guard) + dest.close() +def get_config_header(self): + config_header=[] + tbl=self.env[DEFINES]or Utils.ordered_dict() + for key in tbl.allkeys: + value=tbl[key] + if value is None: + config_header.append('#define %s'%key) + elif value is UNDEFINED: + config_header.append('/* #undef %s */'%key) + else: + config_header.append('#define %s %s'%(key,value)) + return"\n".join(config_header) +def find_cpp(conf): + v=conf.env + cpp=None + if v['CPP']:cpp=v['CPP'] + elif'CPP'in conf.environ:cpp=conf.environ['CPP'] + if not cpp:cpp=conf.find_program('cpp',var='CPP') + if not cpp:cpp=v['CC'] + if not cpp:cpp=v['CXX'] + v['CPP']=cpp +def cc_add_flags(conf): + conf.add_os_flags('CFLAGS','CCFLAGS') + conf.add_os_flags('CPPFLAGS') + conf.add_os_flags('LINKFLAGS') +def cxx_add_flags(conf): + conf.add_os_flags('CXXFLAGS') + conf.add_os_flags('CPPFLAGS') + conf.add_os_flags('LINKFLAGS') +def cc_load_tools(conf): + conf.check_tool('cc') +def cxx_load_tools(conf): + conf.check_tool('cxx') + +conf(ret_msg) +conf(validate_cfg) +conf(cmd_and_log) +conf(exec_cfg) +conf(check_cfg) +conf(validate_c) +conf(post_check) +conf(check) +conf(run_c_code) +conf(check_cxx) +conf(check_cc) +conf(define) +conf(undefine) +conf(define_cond) +conf(is_defined) +conf(get_define) +conf(have_define) +conf(write_config_header) +conf(get_config_header) +conftest(find_cpp) +conftest(cc_add_flags) +conftest(cxx_add_flags) +conftest(cc_load_tools) +conftest(cxx_load_tools) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/cs.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/cs.py --- showq-0.4.1+git20090622/wafadmin/Tools/cs.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/cs.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import TaskGen,Utils,Task +from Logs import error +from TaskGen import before,after,taskgen,feature +flag_vars=['FLAGS','ASSEMBLIES'] +def init_cs(self): + Utils.def_attrs(self,flags='',assemblies='',resources='',uselib='') +def apply_uselib_cs(self): + if not self.uselib: + return + global flag_vars + for var in self.to_list(self.uselib): + for v in self.flag_vars: + val=self.env[v+'_'+var] + if val:self.env.append_value(v,val) +def apply_cs(self): + try:self.meths.remove('apply_core') + except ValueError:pass + assemblies_flags=[] + for i in self.to_list(self.assemblies)+self.env['ASSEMBLIES']: + assemblies_flags+='/r:'+i + self.env['_ASSEMBLIES']+=assemblies_flags + for i in self.to_list(self.resources): + self.env['_RESOURCES'].append('/resource:'+i) + self.env['_FLAGS']+=self.to_list(self.flags)+self.env['FLAGS'] + curnode=self.path + nodes=[] + for i in self.to_list(self.source): + nodes.append(curnode.find_resource(i)) + task=self.create_task('mcs') + task.inputs=nodes + task.set_outputs(self.path.find_or_declare(self.target)) +Task.simple_task_type('mcs','${MCS} ${SRC} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}',color='YELLOW') +def detect(conf): + mcs=conf.find_program('mcs',var='MCS') + if not mcs:mcs=conf.find_program('gmcs',var='MCS') + +feature('cs')(init_cs) +feature('cs')(apply_uselib_cs) +after('init_cs')(apply_uselib_cs) +feature('cs')(apply_cs) +after('apply_uselib_cs')(apply_cs) +before('apply_core')(apply_cs) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/cxx.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/cxx.py --- showq-0.4.1+git20090622/wafadmin/Tools/cxx.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/cxx.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,71 @@ +#! /usr/bin/env python +# encoding: utf-8 +import sys +if sys.hexversion < 0x020400f0: from sets import Set as set +import TaskGen,Task,Utils +from Logs import debug +import ccroot +from TaskGen import feature,before,extension,after +g_cxx_flag_vars=['CXXDEPS','FRAMEWORK','FRAMEWORKPATH','STATICLIB','LIB','LIBPATH','LINKFLAGS','RPATH','CXXFLAGS','CCFLAGS','CPPPATH','CPPFLAGS','CXXDEFINES'] +EXT_CXX=['.cpp','.cc','.cxx','.C','.c++'] +g_cxx_type_vars=['CXXFLAGS','LINKFLAGS'] +class cxx_taskgen(ccroot.ccroot_abstract): + pass +def init_cxx(self): + if not'cc'in self.features: + self.mappings['.c']=TaskGen.task_gen.mappings['.cxx'] + self.p_flag_vars=set(self.p_flag_vars).union(g_cxx_flag_vars) + self.p_type_vars=set(self.p_type_vars).union(g_cxx_type_vars) + if not self.env['CXX_NAME']: + raise Utils.WafError("At least one compiler (g++, ..) must be selected") +def apply_obj_vars_cxx(self): + env=self.env + app=env.append_unique + cxxpath_st=env['CPPPATH_ST'] + for i in env['INC_PATHS']: + app('_CXXINCFLAGS',cxxpath_st%i.bldpath(env)) + app('_CXXINCFLAGS',cxxpath_st%i.srcpath(env)) + for i in env['CPPPATH']: + app('_CXXINCFLAGS',cxxpath_st%i) +def apply_defines_cxx(self): + self.defines=getattr(self,'defines',[]) + lst=self.to_list(self.defines)+self.to_list(self.env['CXXDEFINES']) + milst=[] + for defi in lst: + if not defi in milst: + milst.append(defi) + libs=self.to_list(self.uselib) + for l in libs: + val=self.env['CXXDEFINES_'+l] + if val:milst+=self.to_list(val) + self.env['DEFLINES']=["%s %s"%(x[0],Utils.trimquotes('='.join(x[1:])))for x in[y.split('=')for y in milst]] + y=self.env['CXXDEFINES_ST'] + self.env['_CXXDEFFLAGS']=[y%x for x in milst] +def cxx_hook(self,node): + task=self.create_task('cxx') + if getattr(self,'obj_ext',None): + obj_ext=self.obj_ext + else: + obj_ext='_%d.o'%self.idx + task.inputs=[node] + task.outputs=[node.change_ext(obj_ext)] + self.compiled_tasks.append(task) + return task +cxx_str='${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' +cls=Task.simple_task_type('cxx',cxx_str,color='GREEN',ext_out='.o',ext_in='.cxx',shell=False) +cls.scan=ccroot.scan +cls.vars.append('CXXDEPS') +link_str='${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT} ${LINKFLAGS}' +cls=Task.simple_task_type('cxx_link',link_str,color='YELLOW',ext_in='.o',shell=False) +cls.maxjobs=1 +cls2=Task.task_type_from_func('vnum_cxx_link',ccroot.link_vnum,cls.vars,color='CYAN',ext_in='.o') +cls2.maxjobs=1 + +feature('cxx')(init_cxx) +before('apply_type_vars')(init_cxx) +after('default_cc')(init_cxx) +feature('cxx')(apply_obj_vars_cxx) +after('apply_incpaths')(apply_obj_vars_cxx) +feature('cxx')(apply_defines_cxx) +after('apply_lib_vars')(apply_defines_cxx) +extension(EXT_CXX)(cxx_hook) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/dbus.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/dbus.py --- showq-0.4.1+git20090622/wafadmin/Tools/dbus.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/dbus.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,27 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import Task,Utils +from TaskGen import taskgen,before,after,feature +def add_dbus_file(self,filename,prefix,mode): + if not hasattr(self,'dbus_lst'): + self.dbus_lst=[] + self.meths.append('process_dbus') + self.dbus_lst.append([filename,prefix,mode]) +def process_dbus(self): + for filename,prefix,mode in getattr(self,'dbus_lst',[]): + env=self.env.copy() + node=self.path.find_resource(filename) + if not node: + raise Utils.WafError('file not found '+filename) + env['DBUS_BINDING_TOOL_PREFIX']=prefix + env['DBUS_BINDING_TOOL_MODE']=mode + task=self.create_task('dbus_binding_tool',env) + task.set_inputs(node) + task.set_outputs(node.change_ext('.h')) +Task.simple_task_type('dbus_binding_tool','${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',color='BLUE',before='cc') +def detect(conf): + dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL') + +taskgen(add_dbus_file) +before('apply_core')(process_dbus) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/dmd.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/dmd.py --- showq-0.4.1+git20090622/wafadmin/Tools/dmd.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/dmd.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,47 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import sys +import ar +def find_dmd(conf): + v=conf.env + d_compiler=None + if v['D_COMPILER']: + d_compiler=v['D_COMPILER'] + if not d_compiler:d_compiler=conf.find_program('dmd',var='D_COMPILER') + if not d_compiler:return 0 + v['D_COMPILER']=d_compiler +def common_flags(conf): + v=conf.env + v['DFLAGS']=['-version=Posix'] + v['D_SRC_F']='' + v['D_TGT_F']=['-c','-of'] + v['DPATH_ST']='-I%s' + v['D_LINKER']=v['D_COMPILER'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of' + v['DLIB_ST']='-L-l%s' + v['DLIBPATH_ST']='-L-L%s' + v['DFLAGS_OPTIMIZED']=['-O'] + v['DFLAGS_DEBUG']=['-g','-debug'] + v['DFLAGS_ULTRADEBUG']=['-g','-debug'] + v['DLINKFLAGS']=['-quiet'] + v['D_shlib_DFLAGS']=['-fPIC'] + v['D_shlib_LINKFLAGS']=['-L-shared'] + v['DHEADER_ext']='.di' + v['D_HDR_F']=['-H','-Hf'] + if sys.platform=="win32": + v['D_program_PATTERN']='%s.exe' + v['D_shlib_PATTERN']='lib%s.dll' + v['D_staticlib_PATTERN']='lib%s.a' + else: + v['D_program_PATTERN']='%s' + v['D_shlib_PATTERN']='lib%s.so' + v['D_staticlib_PATTERN']='lib%s.a' +def detect(conf): + v=conf.env + find_dmd(conf) + ar.find_ar(conf) + conf.check_tool('d') + common_flags(conf) + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/d.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/d.py --- showq-0.4.1+git20090622/wafadmin/Tools/d.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/d.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,335 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,re,optparse +import ccroot +import TaskGen,Utils,Task,Configure,Logs,Build +from Logs import debug,error +from TaskGen import taskgen,feature,after,before,extension +EXT_D=['.d','.di','.D'] +D_METHS=['apply_core','apply_vnum','apply_objdeps'] +def filter_comments(filename): + txt=Utils.readf(filename) + buf=[] + i=0 + max=len(txt) + while i1: + self.features.append('d'+k[1]) +TaskGen.bind_feature('d',D_METHS) +def init_d(self): + Utils.def_attrs(self,dflags='',importpaths='',libs='',libpaths='',uselib='',uselib_local='',generate_headers=False,compiled_tasks=[],add_objects=[],link_task=None) +def apply_d_libs(self): + uselib=self.to_list(self.uselib) + seen=[] + local_libs=self.to_list(self.uselib_local) + libs=[] + libpaths=[] + env=self.env + while local_libs: + x=local_libs.pop() + if x in seen: + continue + else: + seen.append(x) + y=self.name_to_obj(x) + if not y: + raise Utils.WafError('object not found in uselib_local: obj %s uselib %s'%(self.name,x)) + if y.uselib_local: + added=0 + lst=y.to_list(y.uselib_local) + lst.reverse() + for u in lst: + if u in seen:continue + added=1 + local_libs=[u]+local_libs + if added:continue + y.post() + seen.append(x) + libname=y.target[y.target.rfind(os.sep)+1:] + if'dshlib'in y.features or'dstaticlib'in y.features: + env.append_unique('DLINKFLAGS',env['DLIBPATH_ST']%y.link_task.outputs[0].parent.bldpath(env)) + env.append_unique('DLINKFLAGS',env['DLIB_ST']%libname) + tmp_path=y.path.bldpath(env) + if not tmp_path in libpaths:libpaths=[tmp_path]+libpaths + if y.link_task is not None: + self.link_task.set_run_after(y.link_task) + dep_nodes=getattr(self.link_task,'dep_nodes',[]) + self.link_task.dep_nodes=dep_nodes+y.link_task.outputs + morelibs=y.to_list(y.uselib) + for v in morelibs: + if v in uselib:continue + uselib=[v]+uselib + self.uselib=uselib +def apply_d_link(self): + link=getattr(self,'link',None) + if not link: + if'dstaticlib'in self.features:link='ar_link_static' + else:link='d_link' + linktask=self.create_task(link) + outputs=[t.outputs[0]for t in self.compiled_tasks] + linktask.set_inputs(outputs) + linktask.set_outputs(self.path.find_or_declare(get_target_name(self))) + self.link_task=linktask +def apply_d_vars(self): + env=self.env + dpath_st=env['DPATH_ST'] + lib_st=env['DLIB_ST'] + libpath_st=env['DLIBPATH_ST'] + importpaths=self.to_list(self.importpaths) + libpaths=[] + libs=[] + uselib=self.to_list(self.uselib) + for i in uselib: + if env['DFLAGS_'+i]: + env.append_unique('DFLAGS',env['DFLAGS_'+i]) + for x in self.features: + if not x in['dprogram','dstaticlib','dshlib']: + continue + x.lstrip('d') + d_shlib_dflags=env['D_'+x+'_DFLAGS'] + if d_shlib_dflags: + env.append_unique('DFLAGS',d_shlib_dflags) + for i in uselib: + if env['DPATH_'+i]: + for entry in self.to_list(env['DPATH_'+i]): + if not entry in importpaths: + importpaths.append(entry) + for path in importpaths: + if os.path.isabs(path): + env.append_unique('_DIMPORTFLAGS',dpath_st%path) + else: + node=self.path.find_dir(path) + self.env.append_unique('INC_PATHS',node) + env.append_unique('_DIMPORTFLAGS',dpath_st%node.srcpath(env)) + env.append_unique('_DIMPORTFLAGS',dpath_st%node.bldpath(env)) + for i in uselib: + if env['LIBPATH_'+i]: + for entry in self.to_list(env['LIBPATH_'+i]): + if not entry in libpaths: + libpaths+=[entry] + libpaths=self.to_list(self.libpaths)+libpaths + for path in libpaths: + env.append_unique('DLINKFLAGS',libpath_st%path) + for i in uselib: + if env['LIB_'+i]: + for entry in self.to_list(env['LIB_'+i]): + if not entry in libs: + libs+=[entry] + libs=libs+self.to_list(self.libs) + for lib in libs: + env.append_unique('DLINKFLAGS',lib_st%lib) + for i in uselib: + dlinkflags=env['DLINKFLAGS_'+i] + if dlinkflags: + for linkflag in dlinkflags: + env.append_unique('DLINKFLAGS',linkflag) +def add_shlib_d_flags(self): + for linkflag in self.env['D_shlib_LINKFLAGS']: + self.env.append_unique('DLINKFLAGS',linkflag) +def d_hook(self,node): + task=self.create_task(self.generate_headers and'd_with_header'or'd') + try:obj_ext=self.obj_ext + except AttributeError:obj_ext='_%d.o'%self.idx + task.inputs=[node] + task.outputs=[node.change_ext(obj_ext)] + self.compiled_tasks.append(task) + if self.generate_headers: + header_node=node.change_ext(self.env['DHEADER_ext']) + task.outputs+=[header_node] +d_str='${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}' +d_with_header_str='${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \ +${D_HDR_F}${TGT[1].bldpath(env)} \ +${D_SRC_F}${SRC} \ +${D_TGT_F}${TGT[0].bldpath(env)}' +link_str='${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}' +cls=Task.simple_task_type('d',d_str,'GREEN',before='ar_link_static d_link',shell=False) +cls.scan=scan +Task.simple_task_type('d_with_header',d_with_header_str,'GREEN',before='ar_link_static d_link',shell=False) +Task.simple_task_type('d_link',link_str,color='YELLOW',shell=False) +def generate_header(self,filename,install_path): + if not hasattr(self,'header_lst'):self.header_lst=[] + self.meths.append('process_header') + self.header_lst.append([filename,install_path]) +def process_header(self): + env=self.env + for i in getattr(self,'header_lst',[]): + node=self.path.find_resource(i[0]) + if not node: + raise Utils.WafError('file not found on d obj '+i[0]) + task=self.create_task('d_header') + task.set_inputs(node) + task.set_outputs(node.change_ext('.di')) +d_header_str='${D_COMPILER} ${D_HEADER} ${SRC}' +Task.simple_task_type('d_header',d_header_str,color='BLUE',shell=False) + +feature('d')(init_d) +before('apply_type_vars')(init_d) +feature('d')(init_d) +before('apply_d_libs')(init_d) +feature('d')(apply_d_libs) +after('apply_d_link')(apply_d_libs) +before('apply_vnum')(apply_d_libs) +feature('dprogram','dshlib','dstaticlib')(apply_d_link) +after('apply_core')(apply_d_link) +feature('d')(apply_d_vars) +after('apply_core')(apply_d_vars) +feature('dshlib')(add_shlib_d_flags) +after('apply_d_vars')(add_shlib_d_flags) +extension(EXT_D)(d_hook) +taskgen(generate_header) +before('apply_core')(process_header) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/flex.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/flex.py --- showq-0.4.1+git20090622/wafadmin/Tools/flex.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/flex.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,13 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import TaskGen +def decide_ext(self,node): + if'cxx'in self.features:return'.lex.cc' + else:return'.lex.c' +TaskGen.declare_chain(name='flex',rule='${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',ext_in='.l',decider=decide_ext,before='cc cxx',) +def detect(conf): + conf.find_program('flex',var='FLEX',mandatory=True) + v=conf.env + v['FLEXFLAGS']='' + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/gas.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gas.py --- showq-0.4.1+git20090622/wafadmin/Tools/gas.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gas.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,36 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys +import Task +from TaskGen import extension,taskgen,after,before +EXT_ASM=['.s','.S','.asm','.ASM','.spp','.SPP'] +as_str='${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}' +Task.simple_task_type('asm',as_str,'PINK',ext_out='.o',shell=False) +def asm_hook(self,node): + task=self.create_task('asm') + try:obj_ext=self.obj_ext + except AttributeError:obj_ext='_%d.o'%self.idx + task.inputs=[node] + task.outputs=[node.change_ext(obj_ext)] + self.compiled_tasks.append(task) + self.meths.append('asm_incflags') +def asm_incflags(self): + if self.env['ASINCFLAGS']:self.env['_ASINCFLAGS']=self.env['ASINCFLAGS'] + if'cxx'in self.features:self.env['_ASINCFLAGS']=self.env['_CXXINCFLAGS'] + else:self.env['_ASINCFLAGS']=self.env['_CCINCFLAGS'] +def detect(conf): + comp=conf.environ.get('AS','') + if not comp:comp=conf.env['AS'] + if not comp:comp=conf.find_program('as',var='AS') + if not comp:comp=conf.find_program('gas',var='AS') + if not comp:comp=conf.env['CC'] + if not comp:return + v=conf.env + v['ASFLAGS']='' + +extension(EXT_ASM)(asm_hook) +taskgen(asm_incflags) +after('apply_obj_vars_cc')(asm_incflags) +after('apply_obj_vars_cxx')(asm_incflags) +before('apply_link')(asm_incflags) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/gcc.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gcc.py --- showq-0.4.1+git20090622/wafadmin/Tools/gcc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gcc.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,88 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys +import Configure,Options,Utils,TaskGen +import ccroot,ar +from Configure import conftest +def find_gcc(conf): + v=conf.env + cc=None + if v['CC']: + cc=v['CC'] + elif'CC'in conf.environ: + cc=conf.environ['CC'] + if not cc:cc=conf.find_program('gcc',var='CC') + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('gcc was not found') + cc=conf.cmd_to_list(cc) + ccroot.get_cc_version(conf,cc,gcc=True) + v['CC_NAME']='gcc' + v['CC']=cc +def gcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']='' + v['CC_TGT_F']=['-c','-o',''] + v['CPPPATH_ST']='-I%s' + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o',''] + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STATICLIB_ST']='-l%s' + v['STATICLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['CCDEFINES_ST']='-D%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Wl,-Bdynamic' + v['STATICLIB_MARKER']='-Wl,-Bstatic' + v['FULLSTATIC_MARKER']='-static' + v['program_PATTERN']='%s' + v['shlib_CCFLAGS']=['-fPIC','-DPIC'] + v['shlib_LINKFLAGS']=['-shared'] + v['shlib_PATTERN']='lib%s.so' + v['staticlib_LINKFLAGS']=['-Wl,-Bstatic'] + v['staticlib_PATTERN']='lib%s.a' + v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] + v['CCFLAGS_MACBUNDLE']=['-fPIC'] + v['macbundle_PATTERN']='%s.bundle' +def gcc_modifier_win32(conf): + v=conf.env + v['program_PATTERN']='%s.exe' + v['shlib_PATTERN']='%s.dll' + v['staticlib_PATTERN']='%s.lib' + v['shlib_CCFLAGS']=[] + v['staticlib_LINKFLAGS']=[] +def gcc_modifier_cygwin(conf): + return conf.gcc_modifier_win32() +def gcc_modifier_darwin(conf): + v=conf.env + v['shlib_CCFLAGS']=['-fPIC','-compatibility_version','1','-current_version','1'] + v['shlib_LINKFLAGS']=['-dynamiclib'] + v['shlib_PATTERN']='lib%s.dylib' + v['staticlib_LINKFLAGS']=[] + v['SHLIB_MARKER']='' + v['STATICLIB_MARKER']='' +def gcc_modifier_aix5(conf): + v=conf.env + v['program_LINKFLAGS']=['-Wl,-brtl'] + v['shlib_LINKFLAGS']=['-shared','-Wl,-brtl,-bexpfull'] + v['SHLIB_MARKER']='' +def detect(conf): + conf.find_gcc() + conf.find_cpp() + conf.find_ar() + conf.gcc_common_flags() + target_platform=conf.env['TARGET_PLATFORM']or sys.platform + gcc_modifier_func=globals().get('gcc_modifier_'+target_platform) + if gcc_modifier_func: + gcc_modifier_func(conf) + conf.cc_load_tools() + conf.cc_add_flags() + +conftest(find_gcc) +conftest(gcc_common_flags) +conftest(gcc_modifier_win32) +conftest(gcc_modifier_cygwin) +conftest(gcc_modifier_darwin) +conftest(gcc_modifier_aix5) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/gdc.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gdc.py --- showq-0.4.1+git20090622/wafadmin/Tools/gdc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gdc.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,47 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import sys +import ar +def find_gdc(conf): + v=conf.env + d_compiler=None + if v['D_COMPILER']: + d_compiler=v['D_COMPILER'] + if not d_compiler:d_compiler=conf.find_program('gdc',var='D_COMPILER') + if not d_compiler:return 0 + v['D_COMPILER']=d_compiler +def common_flags(conf): + v=conf.env + v['DFLAGS']=[] + v['D_SRC_F']='' + v['D_TGT_F']=['-c','-o',''] + v['DPATH_ST']='-I%s' + v['D_LINKER']=v['D_COMPILER'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']=['-o',''] + v['DLIB_ST']='-l%s' + v['DLIBPATH_ST']='-L%s' + v['DLINKFLAGS']=[] + v['DFLAGS_OPTIMIZED']=['-O3'] + v['DFLAGS_DEBUG']=['-O0'] + v['DFLAGS_ULTRADEBUG']=['-O0'] + v['D_shlib_DFLAGS']=[] + v['D_shlib_LINKFLAGS']=['-shared'] + v['DHEADER_ext']='.di' + v['D_HDR_F']='-fintfc -fintfc-file=' + if sys.platform=="win32": + v['D_program_PATTERN']='%s.exe' + v['D_shlib_PATTERN']='lib%s.dll' + v['D_staticlib_PATTERN']='lib%s.a' + else: + v['D_program_PATTERN']='%s' + v['D_shlib_PATTERN']='lib%s.so' + v['D_staticlib_PATTERN']='lib%s.a' +def detect(conf): + v=conf.env + find_gdc(conf) + ar.find_ar(conf) + conf.check_tool('d') + common_flags(conf) + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/glib2.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/glib2.py --- showq-0.4.1+git20090622/wafadmin/Tools/glib2.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/glib2.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,84 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import Task,Utils +from TaskGen import taskgen,before,after,feature +def add_marshal_file(self,filename,prefix): + if not hasattr(self,'marshal_list'): + self.marshal_list=[] + self.meths.append('process_marshal') + self.marshal_list.append((filename,prefix)) +def process_marshal(self): + for f,prefix in getattr(self,'marshal_list',[]): + node=self.path.find_resource(f) + if not node: + raise Utils.WafError('file not found %r'%f) + h_node=node.change_ext('.h') + c_node=node.change_ext('.c') + task=self.create_task('glib_genmarshal') + task.set_inputs(node) + task.set_outputs([h_node,c_node]) + task.env['GLIB_GENMARSHAL_PREFIX']=prefix + self.allnodes.append(c_node) +def genmarshal_func(self): + bld=self.inputs[0].__class__.bld + get=self.env.get_flat + cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(self.env),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath(self.env)) + ret=bld.exec_command(cmd1) + if ret:return ret + f=open(self.outputs[1].abspath(self.env),'wb') + f.write('''#include "%s"\n'''%self.outputs[0].name) + f.close() + cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(self.env),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath(self.env)) + ret=Utils.exec_command(cmd2) + if ret:return ret +def add_enums_from_template(self,source='',target='',template='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments}) +def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments}) +def process_enums(self): + for enum in getattr(self,'enums_list',[]): + env=self.env.copy() + task=self.create_task('glib_mkenums',env) + inputs=[] + source_list=self.to_list(enum['source']) + if not source_list: + raise Utils.WafError('missing source '+str(enum)) + source_list=[self.path.find_resource(k)for k in source_list] + inputs+=source_list + env['GLIB_MKENUMS_SOURCE']=[k.srcpath(env)for k in source_list] + if not enum['target']: + raise Utils.WafError('missing target '+str(enum)) + tgt_node=self.path.find_or_declare(enum['target']) + if tgt_node.name.endswith('.c'): + self.allnodes.append(tgt_node) + env['GLIB_MKENUMS_TARGET']=tgt_node.abspath(env) + options=[] + if enum['template']: + template_node=self.path.find_resource(enum['template']) + options.append('--template %s'%(template_node.abspath(env))) + inputs.append(template_node) + params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'} + for param,option in params.iteritems(): + if enum[param]: + options.append('%s %r'%(option,enum[param])) + env['GLIB_MKENUMS_OPTIONS']=' '.join(options) + task.set_inputs(inputs) + task.set_outputs(tgt_node) +Task.task_type_from_func('glib_genmarshal',func=genmarshal_func,vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'],color='BLUE',before='cc') +Task.simple_task_type('glib_mkenums','${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',color='PINK',before='cc') +def detect(conf): + glib_genmarshal=conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL') + mk_enums_tool=conf.find_program('glib-mkenums',var='GLIB_MKENUMS') + +taskgen(add_marshal_file) +before('apply_core')(process_marshal) +taskgen(add_enums_from_template) +taskgen(add_enums) +before('apply_core')(process_enums) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/gnome.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gnome.py --- showq-0.4.1+git20090622/wafadmin/Tools/gnome.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gnome.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,155 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,re +import TaskGen,Utils,Runner,Task,Build,Options,Logs +import cc +from Logs import error +from TaskGen import taskgen,before,after,feature +n1_regexp=re.compile('(.*)',re.M) +n2_regexp=re.compile('(.*)',re.M) +def postinstall_schemas(prog_name): + if Build.bld.is_install: + dir=Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas'%prog_name) + if not Options.options.destdir: + Utils.pprint('YELLOW','Installing GConf schema') + command='gconftool-2 --install-schema-file=%s 1> /dev/null'%dir + ret=Utils.exec_command(command) + else: + Utils.pprint('YELLOW','GConf schema not installed. After install, run this:') + Utils.pprint('YELLOW','gconftool-2 --install-schema-file=%s'%dir) +def postinstall_icons(): + dir=Build.bld.get_install_path('${DATADIR}/icons/hicolor') + if Build.bld.is_install: + if not Options.options.destdir: + Utils.pprint('YELLOW',"Updating Gtk icon cache.") + command='gtk-update-icon-cache -q -f -t %s'%dir + ret=Utils.exec_command(command) + else: + Utils.pprint('YELLOW','Icon cache not updated. After install, run this:') + Utils.pprint('YELLOW','gtk-update-icon-cache -q -f -t %s'%dir) +def postinstall_scrollkeeper(prog_name): + if Build.bld.is_install: + if os.access('/var/log/scrollkeeper.log',os.W_OK): + dir1=Build.bld.get_install_path('${PREFIX}/var/scrollkeeper') + dir2=Build.bld.get_install_path('${DATADIR}/omf/%s'%prog_name) + command='scrollkeeper-update -q -p %s -o %s'%(dir1,dir2) + ret=Utils.exec_command(command) +def postinstall(prog_name='myapp',schemas=1,icons=1,scrollkeeper=1): + if schemas:postinstall_schemas(prog_name) + if icons:postinstall_icons() + if scrollkeeper:postinstall_scrollkeeper(prog_name) +class gnome_doc_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def init_gnome_doc(self): + self.default_install_path='${PREFIX}/share' +def apply_gnome_doc(self): + self.env['APPNAME']=self.doc_module + lst=self.to_list(self.doc_linguas) + bld=self.bld + for x in lst: + tsk=self.create_task('xml2po') + node=self.path.find_resource(x+'/'+x+'.po') + src=self.path.find_resource('C/%s.xml'%self.doc_module) + out=self.path.find_or_declare('%s/%s.xml'%(x,self.doc_module)) + tsk.set_inputs([node,src]) + tsk.set_outputs(out) + tsk2=self.create_task('xsltproc2po') + out2=self.path.find_or_declare('%s/%s-%s.omf'%(x,self.doc_module,x)) + tsk2.set_outputs(out2) + node=self.path.find_resource(self.doc_module+".omf.in") + tsk2.inputs=[node,out] + tsk2.run_after.append(tsk) + if bld.is_install: + path=self.install_path+'gnome/help/%s/%s'%(self.doc_module,x) + bld.install_files(self.install_path+'omf',out2.abspath(self.env)) + for y in self.to_list(self.doc_figures): + try: + os.stat(self.path.abspath()+'/'+x+'/'+y) + bld.install_as(path+'/'+y,self.path.abspath()+'/'+x+'/'+y) + except: + bld.install_as(path+'/'+y,self.path.abspath()+'/C/'+y) + bld.install_as(path+'/%s.xml'%self.doc_module,out.abspath(self.env)) +class xml_to_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def init_xml_to(self): + Utils.def_attrs(self,source='xmlfile',xslt='xlsltfile',target='hey',default_install_path='${PREFIX}',task_created=None) +def apply_xml_to(self): + xmlfile=self.path.find_resource(self.source) + xsltfile=self.path.find_resource(self.xslt) + tsk=self.create_task('xmlto') + tsk.set_inputs([xmlfile,xsltfile]) + tsk.set_outputs(xmlfile.change_ext('html')) + tsk.install_path=self.install_path +def sgml_scan(self): + node=self.inputs[0] + env=self.env + variant=node.variant(env) + fi=open(node.abspath(env),'r') + content=fi.read() + fi.close() + name=n1_regexp.findall(content)[0] + num=n2_regexp.findall(content)[0] + doc_name=name+'.'+num + if not self.outputs: + self.outputs=[self.generator.path.find_or_declare(doc_name)] + return([],[doc_name]) +class gnome_sgml2man_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def apply_gnome_sgml2man(self): + assert(getattr(self,'appname',None)) + def install_result(task): + out=task.outputs[0] + name=out.name + ext=name[-1] + env=task.env + self.bld.install_files('${DATADIR}/man/man%s/'%ext,out.abspath(env),env) + self.bld.rescan(self.path) + for name in self.bld.cache_dir_contents[self.path.id]: + base,ext=os.path.splitext(name) + if ext!='.sgml':continue + task=self.create_task('sgml2man') + task.set_inputs(self.path.find_resource(name)) + task.task_generator=self + if self.bld.is_install:task.install=install_result + task.scan() +cls=Task.simple_task_type('sgml2man','${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null',color='BLUE') +cls.scan=sgml_scan +cls.quiet=1 +Task.simple_task_type('xmlto','${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}') +Task.simple_task_type('xml2po','${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}',color='BLUE') +xslt_magic="""${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \ +--stringparam db2omf.basename ${APPNAME} \ +--stringparam db2omf.format docbook \ +--stringparam db2omf.lang C \ +--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \ +--stringparam db2omf.omf_dir ${PREFIX}/share/omf \ +--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \ +--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \ +--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \ +${DB2OMF} ${SRC[1].abspath(env)}""" +Task.simple_task_type('xsltproc2po',xslt_magic,color='BLUE') +def detect(conf): + conf.check_tool('gnu_dirs glib2 dbus') + sgml2man=conf.find_program('docbook2man',var='SGML2MAN') + def getstr(varname): + return getattr(Options.options,varname,'') + conf.define('GNOMELOCALEDIR',os.path.join(conf.env['DATADIR'],'locale')) + xml2po=conf.find_program('xml2po',var='XML2PO') + xsltproc2po=conf.find_program('xsltproc',var='XSLTPROC2PO') + conf.env['XML2POFLAGS']='-e -p' + conf.env['SCROLLKEEPER_DATADIR']=Utils.cmd_output("scrollkeeper-config --pkgdatadir",silent=1).strip() + conf.env['DB2OMF']=Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils",silent=1).strip() +def set_options(opt): + opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]') + +feature('gnome_doc')(init_gnome_doc) +feature('gnome_doc')(apply_gnome_doc) +after('init_gnome_doc')(apply_gnome_doc) +feature('xml_to')(init_xml_to) +feature('xml_to')(apply_xml_to) +after('init_xml_to')(apply_xml_to) +feature('gnome_sgml2man')(apply_gnome_sgml2man) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/gnu_dirs.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gnu_dirs.py --- showq-0.4.1+git20090622/wafadmin/Tools/gnu_dirs.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/gnu_dirs.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,63 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import Utils,Options +_options=[x.split(', ')for x in''' +bindir, user executables, ${EXEC_PREFIX}/bin +sbindir, system admin executables, ${EXEC_PREFIX}/sbin +libexecdir, program executables, ${EXEC_PREFIX}/libexec +sysconfdir, read-only single-machine data, ${PREFIX}/etc +sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com +localstatedir, modifiable single-machine data, ${PREFIX}/var +libdir, object code libraries, ${EXEC_PREFIX}/lib +includedir, C header files, ${PREFIX}/include +oldincludedir, C header files for non-gcc, /usr/include +datarootdir, read-only arch.-independent data root, ${PREFIX}/share +datadir, read-only architecture-independent data, ${DATAROOTDIR} +infodir, info documentation, ${DATAROOTDIR}/info +localedir, locale-dependent data, ${DATAROOTDIR}/locale +mandir, man documentation, ${DATAROOTDIR}/man +docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} +htmldir, html documentation, ${DOCDIR} +dvidir, dvi documentation, ${DOCDIR} +pdfdir, pdf documentation, ${DOCDIR} +psdir, ps documentation, ${DOCDIR} +'''.split('\n')if x] +def detect(conf): + def get_param(varname,default): + return getattr(Options.options,varname,'')or default + env=conf.env + env['EXEC_PREFIX']=get_param('EXEC_PREFIX',env['PREFIX']) + env['PACKAGE']=Utils.g_module.APPNAME or env['PACKAGE'] + complete=False + iter=0 + while not complete and iter-1 + def prune(node,name): + if name=='.svn':return True + return False + src_nodes=[x for x in source_root_node.find_iter_impl(dir=False,accept_name=acc,is_prune=prune)] + bld_nodes=[x.change_ext('.class')for x in src_nodes] + self.env['OUTDIR']=[source_root_node.abspath(self.env)] + tsk=self.create_task('javac') + tsk.set_inputs(src_nodes) + tsk.set_outputs(bld_nodes) + if self.jarname: + tsk=self.create_task('jar_create') + tsk.set_inputs(bld_nodes) + tsk.set_outputs(self.path.find_or_declare(self.jarname)) + if not self.env['JAROPTS']: + if self.jaropts: + self.env['JAROPTS']=self.jaropts + else: + dirs='.' + self.env['JAROPTS']=['-C',''.join(self.env['OUTDIR']),dirs] +Task.simple_task_type('jar_create','${JAR} ${JARCREATE} ${TGT} ${JAROPTS}',color='GREEN') +cls=Task.simple_task_type('javac','${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}') +cls.color='BLUE' +def post_run_javac(self): + par={} + for x in self.inputs: + par[x.parent.id]=x.parent + inner={} + for k in par.values(): + path=k.abspath(self.env) + lst=os.listdir(path) + for u in lst: + if u.find('$')>=0: + inner_class_node=k.find_or_declare(u) + inner[inner_class_node.id]=inner_class_node + to_add=set(inner.keys())-set([x.id for x in self.outputs]) + for x in to_add: + self.outputs.append(inner[x]) + return Task.Task.post_run(self) +cls.post_run=post_run_javac +def detect(conf): + java_path=conf.environ['PATH'].split(os.pathsep) + v=conf.env + if'JAVA_HOME'in conf.environ: + java_path=[os.path.join(conf.environ['JAVA_HOME'],'bin')]+java_path + conf.env['JAVA_HOME']=[conf.environ['JAVA_HOME']] + for x in'javac java jar'.split(): + conf.find_program(x,var=x.upper(),path_list=java_path) + conf.env[x.upper()]=conf.cmd_to_list(conf.env[x.upper()]) + v['JAVA_EXT']=['.java'] + if'CLASSPATH'in conf.environ: + v['CLASSPATH']=conf.environ['CLASSPATH'] + if not v['JAR']:conf.fatal('jar is required for making java packages') + if not v['JAVAC']:conf.fatal('javac is required for compiling java classes') + v['JARCREATE']='cf' +def check_java_class(self,classname,with_classpath=None): + import shutil + javatestdir='.waf-javatest' + classpath=javatestdir + if self.env['CLASSPATH']: + classpath+=os.pathsep+self.env['CLASSPATH'] + if isinstance(with_classpath,str): + classpath+=os.pathsep+with_classpath + shutil.rmtree(javatestdir,True) + os.mkdir(javatestdir) + java_file=open(os.path.join(javatestdir,'Test.java'),'w') + java_file.write(class_check_source) + java_file.close() + Utils.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False) + cmd=self.env['JAVA']+['-cp',classpath,'Test',classname] + self.log.write("%s\n"%str(cmd)) + found=Utils.exec_command(cmd,shell=False,log=self.log) + self.check_message('Java class %s'%classname,"",not found) + shutil.rmtree(javatestdir,True) + return found + +feature('jar')(jar_files) +before('apply_core')(jar_files) +feature('javac')(apply_java) +before('apply_core')(apply_java) +conf(check_java_class) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/kde4.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/kde4.py --- showq-0.4.1+git20090622/wafadmin/Tools/kde4.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/kde4.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,59 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,re +import Options,TaskGen,Task,Utils +from TaskGen import taskgen,feature,after +class msgfmt_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def init_msgfmt(self): + self.default_install_path='${KDE4_LOCALE_INSTALL_DIR}' +def apply_msgfmt(self): + for lang in self.to_list(self.langs): + node=self.path.find_resource(lang+'.po') + task=self.create_task('msgfmt') + task.set_inputs(node) + task.set_outputs(node.change_ext('.mo')) + if not self.bld.is_install:continue + langname=lang.split('/') + langname=langname[-1] + task.install_path=self.install_path+os.sep+langname+os.sep+'LC_MESSAGES' + task.filename=getattr(self,'appname','set_your_appname')+'.mo' + task.chmod=self.chmod +def detect(conf): + kdeconfig=conf.find_program('kde4-config') + if not kdeconfig: + conf.fatal('we need kde4-config') + prefix=Utils.cmd_output('%s --prefix'%kdeconfig,silent=True).strip() + file='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(file) + except OSError: + file='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(file) + except OSError:conf.fatal('could not open %s'%file) + try: + txt=Utils.readf(file) + except(OSError,IOError): + conf.fatal('could not read %s'%file) + txt=txt.replace('\\\n','\n') + fu=re.compile('#(.*)\n') + txt=fu.sub('',txt) + setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + found=setregexp.findall(txt) + for(_,key,val)in found: + conf.env[key]=val + conf.env['LIB_KDECORE']='kdecore' + conf.env['LIB_KDEUI']='kdeui' + conf.env['LIB_KIO']='kio' + conf.env['LIB_KHTML']='khtml' + conf.env['LIB_KPARTS']='kparts' + conf.env['LIBPATH_KDECORE']=conf.env['KDE4_LIB_INSTALL_DIR'] + conf.env['CPPPATH_KDECORE']=conf.env['KDE4_INCLUDE_INSTALL_DIR'] + conf.env.append_value('CPPPATH_KDECORE',conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE") + conf.env['MSGFMT']=conf.find_program('msgfmt') +Task.simple_task_type('msgfmt','${MSGFMT} ${SRC} -o ${TGT}',color='BLUE',shell=False) + +feature('msgfmt')(init_msgfmt) +feature('msgfmt')(apply_msgfmt) +after('init_msgfmt')(apply_msgfmt) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/libtool.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/libtool.py --- showq-0.4.1+git20090622/wafadmin/Tools/libtool.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/libtool.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,242 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import sys,re,os,optparse +import TaskGen,Task,Utils,preproc +from Logs import error,debug,warn +from TaskGen import taskgen,after,before,feature +REVISION="0.1.3" +fakelibtool_vardeps=['CXX','PREFIX'] +def fakelibtool_build(task): + env=task.env + dest=open(task.outputs[0].abspath(env),'w') + sname=task.inputs[0].name + fu=dest.write + fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n") + if env['vnum']: + nums=env['vnum'].split('.') + libname=task.inputs[0].name + name3=libname+'.'+env['vnum'] + name2=libname+'.'+nums[0] + name1=libname + fu("dlname='%s'\n"%name2) + strn=" ".join([name3,name2,name1]) + fu("library_names='%s'\n"%(strn)) + else: + fu("dlname='%s'\n"%sname) + fu("library_names='%s %s %s'\n"%(sname,sname,sname)) + fu("old_library=''\n") + vars=' '.join(env['libtoolvars']+env['LINKFLAGS']) + fu("dependency_libs='%s'\n"%vars) + fu("current=0\n") + fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n") + fu("dlopen=''\ndlpreopen=''\n") + fu("libdir='%s/lib'\n"%env['PREFIX']) + dest.close() + return 0 +def read_la_file(path): + sp=re.compile(r'^([^=]+)=\'(.*)\'$') + dc={} + file=open(path,"r") + for line in file.readlines(): + try: + _,left,right,_=sp.split(line.strip()) + dc[left]=right + except ValueError: + pass + file.close() + return dc +def apply_link_libtool(self): + if self.type!='program': + linktask=self.link_task + latask=self.create_task('fakelibtool') + latask.set_inputs(linktask.outputs) + latask.set_outputs(linktask.outputs[0].change_ext('.la')) + self.latask=latask + if self.bld.is_install: + self.bld.install_files('${PREFIX}/lib',linktask.outputs[0].abspath(self.env),self.env) +def apply_libtool(self): + self.env['vnum']=self.vnum + paths=[] + libs=[] + libtool_files=[] + libtool_vars=[] + for l in self.env['LINKFLAGS']: + if l[:2]=='-L': + paths.append(l[2:]) + elif l[:2]=='-l': + libs.append(l[2:]) + for l in libs: + for p in paths: + dict=read_la_file(p+'/lib'+l+'.la') + linkflags2=dict.get('dependency_libs','') + for v in linkflags2.split(): + if v.endswith('.la'): + libtool_files.append(v) + libtool_vars.append(v) + continue + self.env.append_unique('LINKFLAGS',v) + break + self.env['libtoolvars']=libtool_vars + while libtool_files: + file=libtool_files.pop() + dict=read_la_file(file) + for v in dict['dependency_libs'].split(): + if v[-3:]=='.la': + libtool_files.append(v) + continue + self.env.append_unique('LINKFLAGS',v) +Task.task_type_from_func('fakelibtool',vars=fakelibtool_vardeps,func=fakelibtool_build,color='BLUE',after="cc_link cxx_link ar_link_static") +class libtool_la_file: + def __init__(self,la_filename): + self.__la_filename=la_filename + self.linkname=str(os.path.split(la_filename)[-1])[:-3] + if self.linkname.startswith("lib"): + self.linkname=self.linkname[3:] + self.dlname=None + self.library_names=None + self.old_library=None + self.dependency_libs=None + self.current=None + self.age=None + self.revision=None + self.installed=None + self.shouldnotlink=None + self.dlopen=None + self.dlpreopen=None + self.libdir='/usr/lib' + if not self.__parse(): + raise"file %s not found!!"%(la_filename) + def __parse(self): + if not os.path.isfile(self.__la_filename):return 0 + la_file=open(self.__la_filename,'r') + for line in la_file: + ln=line.strip() + if not ln:continue + if ln[0]=='#':continue + (key,value)=str(ln).split('=',1) + key=key.strip() + value=value.strip() + if value=="no":value=False + elif value=="yes":value=True + else: + try:value=int(value) + except ValueError:value=value.strip("'") + setattr(self,key,value) + la_file.close() + return 1 + def get_libs(self): + libs=[] + if self.dependency_libs: + libs=str(self.dependency_libs).strip().split() + if libs==None: + libs=[] + libs.insert(0,"-l%s"%self.linkname.strip()) + libs.insert(0,"-L%s"%self.libdir.strip()) + return libs + def __str__(self): + return'''\ +dlname = "%(dlname)s" +library_names = "%(library_names)s" +old_library = "%(old_library)s" +dependency_libs = "%(dependency_libs)s" +version = %(current)s.%(age)s.%(revision)s +installed = "%(installed)s" +shouldnotlink = "%(shouldnotlink)s" +dlopen = "%(dlopen)s" +dlpreopen = "%(dlpreopen)s" +libdir = "%(libdir)s"'''%self.__dict__ +class libtool_config: + def __init__(self,la_filename): + self.__libtool_la_file=libtool_la_file(la_filename) + tmp=self.__libtool_la_file + self.__version=[int(tmp.current),int(tmp.age),int(tmp.revision)] + self.__sub_la_files=[] + self.__sub_la_files.append(la_filename) + self.__libs=None + def __cmp__(self,other): + if not other: + return 1 + othervers=[int(s)for s in str(other).split(".")] + selfvers=self.__version + return cmp(selfvers,othervers) + def __str__(self): + return"\n".join([str(self.__libtool_la_file),' '.join(self.__libtool_la_file.get_libs()),'* New getlibs:',' '.join(self.get_libs())]) + def __get_la_libs(self,la_filename): + return libtool_la_file(la_filename).get_libs() + def get_libs(self): + libs_list=list(self.__libtool_la_file.get_libs()) + libs_map={} + while len(libs_list)>0: + entry=libs_list.pop(0) + if entry: + if str(entry).endswith(".la"): + if entry not in self.__sub_la_files: + self.__sub_la_files.append(entry) + libs_list.extend(self.__get_la_libs(entry)) + else: + libs_map[entry]=1 + self.__libs=libs_map.keys() + return self.__libs + def get_libs_only_L(self): + if not self.__libs:self.get_libs() + libs=self.__libs + libs=[s for s in libs if str(s).startswith('-L')] + return libs + def get_libs_only_l(self): + if not self.__libs:self.get_libs() + libs=self.__libs + libs=[s for s in libs if str(s).startswith('-l')] + return libs + def get_libs_only_other(self): + if not self.__libs:self.get_libs() + libs=self.__libs + libs=[s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))] + return libs +def useCmdLine(): + usage='''Usage: %prog [options] PathToFile.la +example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la +nor: %prog --libs /usr/lib/libamarok.la''' + parser=optparse.OptionParser(usage) + a=parser.add_option + a("--version",dest="versionNumber",action="store_true",default=False,help="output version of libtool-config") + a("--debug",dest="debug",action="store_true",default=False,help="enable debug") + a("--libs",dest="libs",action="store_true",default=False,help="output all linker flags") + a("--libs-only-l",dest="libs_only_l",action="store_true",default=False,help="output -l flags") + a("--libs-only-L",dest="libs_only_L",action="store_true",default=False,help="output -L flags") + a("--libs-only-other",dest="libs_only_other",action="store_true",default=False,help="output other libs (e.g. -pthread)") + a("--atleast-version",dest="atleast_version",default=None,help="return 0 if the module is at least version ATLEAST_VERSION") + a("--exact-version",dest="exact_version",default=None,help="return 0 if the module is exactly version EXACT_VERSION") + a("--max-version",dest="max_version",default=None,help="return 0 if the module is at no newer than version MAX_VERSION") + (options,args)=parser.parse_args() + if len(args)!=1 and not options.versionNumber: + parser.error("incorrect number of arguments") + if options.versionNumber: + print("libtool-config version %s"%REVISION) + return 0 + ltf=libtool_config(args[0]) + if options.debug: + print(ltf) + if options.atleast_version: + if ltf>=options.atleast_version:return 0 + sys.exit(1) + if options.exact_version: + if ltf==options.exact_version:return 0 + sys.exit(1) + if options.max_version: + if ltf<=options.max_version:return 0 + sys.exit(1) + def p(x): + print(" ".join(x)) + if options.libs:p(ltf.get_libs()) + elif options.libs_only_l:p(ltf.get_libs_only_l()) + elif options.libs_only_L:p(ltf.get_libs_only_L()) + elif options.libs_only_other:p(ltf.get_libs_only_other()) + return 0 +if __name__=='__main__': + useCmdLine() + +feature("libtool")(apply_link_libtool) +after('apply_link')(apply_link_libtool) +feature("libtool")(apply_libtool) +before('apply_core')(apply_libtool) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/lua.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/lua.py --- showq-0.4.1+git20090622/wafadmin/Tools/lua.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/lua.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,13 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import TaskGen +from TaskGen import taskgen,feature +from Constants import* +TaskGen.declare_chain(name='luac',rule='${LUAC} -s -o ${TGT} ${SRC}',ext_in='.lua',ext_out='.luac',reentrant=0,install='LUADIR',) +def init_lua(self): + self.default_chmod=O755 +def detect(conf): + conf.find_program('luac',var='LUAC',mandatory=True) + +feature('lua')(init_lua) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/misc.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/misc.py --- showq-0.4.1+git20090622/wafadmin/Tools/misc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/misc.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,313 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import shutil,re,os +import TaskGen,Node,Task,Utils,Build,Constants +from TaskGen import feature,taskgen,after,before +from Logs import debug +def copy_func(tsk): + env=tsk.env + infile=tsk.inputs[0].abspath(env) + outfile=tsk.outputs[0].abspath(env) + try: + shutil.copy2(infile,outfile) + except(OSError,IOError): + return 1 + else: + if tsk.chmod:os.chmod(outfile,tsk.chmod) + return 0 +def action_process_file_func(tsk): + if not tsk.fun:raise Utils.WafError('task must have a function attached to it for copy_func to work!') + return tsk.fun(tsk) +class cmd_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def apply_cmd(self): + if not self.fun:raise Utils.WafError('cmdobj needs a function!') + tsk=Task.TaskBase() + tsk.fun=self.fun + tsk.env=self.env + self.tasks.append(tsk) + tsk.install_path=self.install_path +class copy_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def apply_copy(self): + Utils.def_attrs(self,fun=copy_func) + self.default_install_path=0 + lst=self.to_list(self.source) + self.meths.remove('apply_core') + for filename in lst: + node=self.path.find_resource(filename) + if not node:raise Utils.WafError('cannot find input file %s for processing'%filename) + target=self.target + if not target or len(lst)>1:target=node.name + newnode=self.path.find_or_declare(target) + tsk=self.create_task('copy') + tsk.set_inputs(node) + tsk.set_outputs(newnode) + tsk.fun=self.fun + tsk.chmod=self.chmod + if not tsk.env: + tsk.debug() + raise Utils.WafError('task without an environment') +def subst_func(tsk): + m4_re=re.compile('@(\w+)@',re.M) + env=tsk.env + infile=tsk.inputs[0].abspath(env) + outfile=tsk.outputs[0].abspath(env) + code=Utils.readf(infile) + code=code.replace('%','%%') + s=m4_re.sub(r'%(\1)s',code) + di=tsk.dict or{} + if not di: + names=m4_re.findall(code) + for i in names: + di[i]=env.get_flat(i)or env.get_flat(i.upper()) + file=open(outfile,'w') + file.write(s%di) + file.close() + if tsk.chmod:os.chmod(outfile,tsk.chmod) +class subst_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def apply_subst(self): + Utils.def_attrs(self,fun=subst_func) + self.default_install_path=0 + lst=self.to_list(self.source) + self.meths.remove('apply_core') + self.dict=getattr(self,'dict',{}) + for filename in lst: + node=self.path.find_resource(filename) + if not node:raise Utils.WafError('cannot find input file %s for processing'%filename) + if self.target: + newnode=self.path.find_or_declare(self.target) + else: + newnode=node.change_ext('') + try: + self.dict=self.dict.get_merged_dict() + except AttributeError: + pass + if self.dict and not self.env['DICT_HASH']: + self.env=self.env.copy() + keys=self.dict.keys() + keys.sort() + lst=[self.dict[x]for x in keys] + self.env['DICT_HASH']=str(Utils.h_list(lst)) + tsk=self.create_task('copy') + tsk.set_inputs(node) + tsk.set_outputs(newnode) + tsk.fun=self.fun + tsk.dict=self.dict + tsk.dep_vars=['DICT_HASH'] + tsk.install_path=self.install_path + tsk.chmod=self.chmod + if not tsk.env: + tsk.debug() + raise Utils.WafError('task without an environment') +class cmd_arg(object): + def __init__(self,name,template='%s'): + self.name=name + self.template=template + self.node=None +class input_file(cmd_arg): + def find_node(self,base_path): + assert isinstance(base_path,Node.Node) + self.node=base_path.find_resource(self.name) + if self.node is None: + raise Utils.WafError("Input file %s not found in "%(self.name,base_path)) + def get_path(self,env,absolute): + if absolute: + return self.template%self.node.abspath(env) + else: + return self.template%self.node.srcpath(env) +class output_file(cmd_arg): + def find_node(self,base_path): + assert isinstance(base_path,Node.Node) + self.node=base_path.find_or_declare(self.name) + if self.node is None: + raise Utils.WafError("Output file %s not found in "%(self.name,base_path)) + def get_path(self,env,absolute): + if absolute: + return self.template%self.node.abspath(env) + else: + return self.template%self.node.bldpath(env) +class cmd_dir_arg(cmd_arg): + def __init__(self,name,template=None): + cmd_arg.__init__(self) + self.name=name + self.node=None + if template is None: + self.template='%s' + else: + self.template=template + def find_node(self,base_path): + assert isinstance(base_path,Node.Node) + self.node=base_path.find_dir(self.name) + if self.node is None: + raise Utils.WafError("Directory %s not found in "%(self.name,base_path)) +class input_dir(cmd_dir_arg): + def get_path(self,dummy_env,dummy_absolute): + return self.template%self.node.abspath() +class output_dir(cmd_dir_arg): + def get_path(self,env,dummy_absolute): + return self.template%self.node.abspath(env) +class command_output(Task.Task): + color="BLUE" + def __init__(self,env,command,command_node,command_args,stdin,stdout,cwd,os_env,stderr): + Task.Task.__init__(self,env,normal=1) + assert isinstance(command,(str,Node.Node)) + self.command=command + self.command_args=command_args + self.stdin=stdin + self.stdout=stdout + self.cwd=cwd + self.os_env=os_env + self.stderr=stderr + if command_node is not None:self.dep_nodes=[command_node] + self.dep_vars=[] + def run(self): + task=self + def input_path(node,template): + if task.cwd is None: + return template%node.bldpath(task.env) + else: + return template%node.abspath() + def output_path(node,template): + fun=node.abspath + if task.cwd is None:fun=node.bldpath + return template%fun(task.env) + if isinstance(task.command,Node.Node): + argv=[input_path(task.command,'%s')] + else: + argv=[task.command] + for arg in task.command_args: + if isinstance(arg,str): + argv.append(arg) + else: + assert isinstance(arg,cmd_arg) + argv.append(arg.get_path(task.env,(task.cwd is not None))) + if task.stdin: + stdin=open(input_path(task.stdin,'%s')) + else: + stdin=None + if task.stdout: + stdout=open(output_path(task.stdout,'%s'),"w") + else: + stdout=None + if task.stderr: + stderr=open(output_path(task.stderr,'%s'),"w") + else: + stderr=None + if task.cwd is None: + cwd=('None (actually %r)'%os.getcwd()) + else: + cwd=repr(task.cwd) + debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r"%(cwd,stdin,stdout,argv)) + if task.os_env is None: + os_env=os.environ + else: + os_env=task.os_env + command=Utils.pproc.Popen(argv,stdin=stdin,stdout=stdout,stderr=stderr,cwd=task.cwd,env=os_env) + return command.wait() +class cmd_output_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def init_cmd_output(self): + Utils.def_attrs(self,stdin=None,stdout=None,stderr=None,command=None,command_is_external=False,argv=[],dependencies=[],dep_vars=[],hidden_inputs=[],hidden_outputs=[],cwd=None,os_env=None) +def apply_cmd_output(self): + if self.command is None: + raise Utils.WafError("command-output missing command") + if self.command_is_external: + cmd=self.command + cmd_node=None + else: + cmd_node=self.path.find_resource(self.command) + assert cmd_node is not None,('''Could not find command '%s' in source tree. +Hint: if this is an external command, +use command_is_external=True''')%(self.command,) + cmd=cmd_node + if self.cwd is None: + cwd=None + else: + assert isinstance(cwd,CmdDirArg) + self.cwd.find_node(self.path) + args=[] + inputs=[] + outputs=[] + for arg in self.argv: + if isinstance(arg,cmd_arg): + arg.find_node(self.path) + if isinstance(arg,input_file): + inputs.append(arg.node) + if isinstance(arg,output_file): + outputs.append(arg.node) + if self.stdout is None: + stdout=None + else: + assert isinstance(self.stdout,basestring) + stdout=self.path.find_or_declare(self.stdout) + if stdout is None: + raise Utils.WafError("File %s not found"%(self.stdout,)) + outputs.append(stdout) + if self.stderr is None: + stderr=None + else: + assert isinstance(self.stderr,basestring) + stderr=self.path.find_or_declare(self.stderr) + if stderr is None: + raise Utils.WafError("File %s not found"%(self.stderr,)) + outputs.append(stderr) + if self.stdin is None: + stdin=None + else: + assert isinstance(self.stdin,basestring) + stdin=self.path.find_resource(self.stdin) + if stdin is None: + raise Utils.WafError("File %s not found"%(self.stdin,)) + inputs.append(stdin) + for hidden_input in self.to_list(self.hidden_inputs): + node=self.path.find_resource(hidden_input) + if node is None: + raise Utils.WafError("File %s not found in dir %s"%(hidden_input,self.path)) + inputs.append(node) + for hidden_output in self.to_list(self.hidden_outputs): + node=self.path.find_or_declare(hidden_output) + if node is None: + raise Utils.WafError("File %s not found in dir %s"%(hidden_output,self.path)) + outputs.append(node) + if not(inputs or getattr(self,'no_inputs',None)): + raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs') + if not(outputs or getattr(self,'no_outputs',None)): + raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs') + task=command_output(self.env,cmd,cmd_node,self.argv,stdin,stdout,cwd,self.os_env,stderr) + Utils.copy_attrs(self,task,'before after ext_in ext_out',only_if_set=True) + self.tasks.append(task) + task.inputs=inputs + task.outputs=outputs + task.dep_vars=self.to_list(self.dep_vars) + for dep in self.dependencies: + assert dep is not self + dep.post() + for dep_task in dep.tasks: + task.set_run_after(dep_task) + if not task.inputs: + task.runnable_status=type(Task.TaskBase.run)(runnable_status,task,task.__class__) + task.post_run=type(Task.TaskBase.run)(post_run,task,task.__class__) +def post_run(self): + for x in self.outputs: + h=Utils.h_file(x.abspath(self.env)) + self.generator.bld.node_sigs[self.env.variant()][x.id]=h +def runnable_status(self): + return Constants.RUN_ME +Task.task_type_from_func('copy',vars=[],func=action_process_file_func) +TaskGen.task_gen.classes['command-output']=cmd_output_taskgen + +feature('cmd')(apply_cmd) +feature('copy')(apply_copy) +before('apply_core')(apply_copy) +feature('subst')(apply_subst) +before('apply_core')(apply_subst) +feature('command-output')(init_cmd_output) +feature('command-output')(apply_cmd_output) +after('init_cmd_output')(apply_cmd_output) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/msvc.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/msvc.py --- showq-0.4.1+git20090622/wafadmin/Tools/msvc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/msvc.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,513 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys,re,string,optparse +import Utils,TaskGen,Runner,Configure,Task,Options +from Logs import debug,error,warn +from TaskGen import after,before,feature +from Configure import conftest,conf +import ccroot,cc,cxx,ar,winres +from libtool import read_la_file +import _winreg +all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64')] +all_icl_platforms=[('Itanium','ia64'),('intel64','amd64'),('em64t','amd64'),('ia32','x86')] +def setup_msvc(conf,versions): + platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms] + desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] + versiondict=dict(versions) + for version in desired_versions: + try: + targets=dict(versiondict[version]) + for target in platforms: + try: + arch,(p1,p2,p3)=targets[target] + compiler,version=version.split() + return compiler,p1,p2,p3 + except KeyError:continue + except KeyError:continue + conf.fatal('msvc: Impossible to find a valid architecture for building') +def get_msvc_version(conf,version,target,vcvars): + batfile=os.path.join(conf.blddir,"waf-print-msvc.bat") + f=open(batfile,'w') + f.write("""@echo off +set INCLUDE= +set LIB= +call %1 %2 +echo PATH=%PATH% +echo INCLUDE=%INCLUDE% +echo LIB=%LIB% +""") + f.close() + sout=Utils.cmd_output(['cmd','/E:on','/V:on','/C',batfile,vcvars,target]) + lines=sout.splitlines() + if lines[0].find("Setting environment")==-1 and lines[0].find("Setting SDK environment")==-1 and lines[1].find('Intel(R) C++ Compiler')==-1: + conf.fatal('msvc: Impossible to find a valid architecture for building') + for line in lines[1:]: + if line.startswith('PATH='): + MSVC_PATH=line[5:].split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR=[i for i in line[8:].split(';')if i] + elif line.startswith('LIB='): + MSVC_LIBDIR=[i for i in line[4:].split(';')if i] + return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) +def gather_wsdk_versions(conf,versions): + version_pattern=re.compile('^v..?.?\...?.?') + try: + all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + try: + all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + return + index=0 + while 1: + try: + version=_winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + try: + msvc_version=_winreg.OpenKey(all_versions,version) + path,type=_winreg.QueryValueEx(msvc_version,'InstallationFolder') + except WindowsError: + continue + if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')): + targets=[] + for target,arch in all_msvc_platforms: + try: + targets.append((target,(arch,conf.get_msvc_version(version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))))) + except Configure.ConfigurationError: + pass + versions.append(('wsdk '+version[1:],targets)) +def gather_msvc_versions(conf,versions): + version_pattern=re.compile('^..?\...?') + for vcver,vcvar in[('VCExpress','exp'),('VisualStudio','')]: + try: + all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver) + except WindowsError: + try: + all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\'+vcver) + except WindowsError: + continue + index=0 + while 1: + try: + version=_winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + try: + msvc_version=_winreg.OpenKey(all_versions,version+"\\Setup\\VS") + path,type=_winreg.QueryValueEx(msvc_version,'ProductDir') + targets=[] + if os.path.isfile(os.path.join(path,'VC','vcvarsall.bat')): + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,conf.get_msvc_version(version,target,os.path.join(path,'VC','vcvarsall.bat'))))) + except: + pass + elif os.path.isfile(os.path.join(path,'Common7','Tools','vsvars32.bat')): + try: + targets.append(('x86',('x86',conf.get_msvc_version(version,'x86',os.path.join(path,'Common7','Tools','vsvars32.bat'))))) + except Configure.ConfigurationError: + pass + versions.append(('msvc '+version,targets)) + except WindowsError: + continue +def gather_icl_versions(conf,versions): + version_pattern=re.compile('^...?.?\....?.?') + try: + all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') + except WindowsError: + try: + all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++') + except WindowsError: + return + index=0 + while 1: + try: + version=_winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + icl_version=_winreg.OpenKey(all_versions,version+'\\'+target) + path,type=_winreg.QueryValueEx(icl_version,'ProductDir') + if os.path.isfile(os.path.join(path,'bin','iclvars.bat')): + try: + targets.append((target,(arch,conf.get_msvc_version(version,target,os.path.join(path,'bin','iclvars.bat'))))) + except Configure.ConfigurationError: + pass + except WindowsError: + continue + major=version[0:2] + versions.append(('intel '+major,targets)) +def get_msvc_versions(conf): + if not conf.env['MSVC_INSTALLED_VERSIONS']: + conf.env['MSVC_INSTALLED_VERSIONS']=[] + conf.gather_msvc_versions(conf.env['MSVC_INSTALLED_VERSIONS']) + conf.gather_wsdk_versions(conf.env['MSVC_INSTALLED_VERSIONS']) + conf.gather_icl_versions(conf.env['MSVC_INSTALLED_VERSIONS']) + return conf.env['MSVC_INSTALLED_VERSIONS'] +def detect_msvc(conf): + versions=get_msvc_versions(conf) + return setup_msvc(conf,versions) +def msvc_linker(task): + static=task.__class__.name.find('static')>0 + e=env=task.env + subsystem=getattr(task.generator,'subsystem','') + if subsystem: + subsystem='/subsystem:%s'%subsystem + outfile=task.outputs[0].bldpath(e) + manifest=outfile+'.manifest' + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + lst=[] + if static: + lst.extend(to_list(env['STLIBLINK'])) + else: + lst.extend(to_list(env['LINK'])) + lst.extend(to_list(subsystem)) + if static: + lst.extend(to_list(env['STLINKFLAGS'])) + else: + lst.extend(to_list(env['LINKFLAGS'])) + lst.extend([a.srcpath(env)for a in task.inputs]) + lst.extend(to_list('/OUT:%s'%outfile)) + lst=[x for x in lst if x] + lst=[lst] + ret=task.exec_command(*lst) + if ret:return ret + pdbnode=task.outputs[0].change_ext('.pdb') + pdbfile=pdbnode.bldpath(e) + if os.path.exists(pdbfile): + task.outputs.append(pdbnode) + if not static and os.path.exists(manifest): + debug('msvc: manifesttool') + mtool=e['MT'] + if not mtool: + return 0 + mode='' + if'cprogram'in task.generator.features: + mode='1' + elif'cshlib'in task.generator.features: + mode='2' + debug('msvc: embedding manifest') + lst=[] + lst.extend(to_list(e['MT'])) + lst.extend(to_list(e['MTFLAGS'])) + lst.extend(to_list("-manifest")) + lst.extend(to_list(manifest)) + lst.extend(to_list("-outputresource:%s;%s"%(outfile,mode))) + lst=[lst] + ret=task.exec_command(*lst) + return ret +g_msvc_systemlibs=""" +aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet +cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs +credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d +ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp +faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid +gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop +kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi +mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree +msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm +netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp +odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 +osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu +ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm +rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 +shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 +traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg +version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm +wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp +""".split() +def find_lt_names_msvc(self,libname,is_static=False): + lt_names=['lib%s.la'%libname,'%s.la'%libname,] + for path in self.env['LIBPATH']: + for la in lt_names: + laf=os.path.join(path,la) + dll=None + if os.path.exists(laf): + ltdict=read_la_file(laf) + lt_libdir=None + if ltdict.get('libdir',''): + lt_libdir=ltdict['libdir'] + if not is_static and ltdict.get('library_names',''): + dllnames=ltdict['library_names'].split() + dll=dllnames[0].lower() + dll=re.sub('\.dll$','',dll) + return(lt_libdir,dll,False) + elif ltdict.get('old_library',''): + olib=ltdict['old_library'] + if os.path.exists(os.path.join(path,olib)): + return(path,olib,True) + elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)): + return(lt_libdir,olib,True) + else: + return(None,olib,True) + else: + raise Utils.WafError('invalid libtool object file: %s'%laf) + return(None,None,None) +def libname_msvc(self,libname,is_static=False,mandatory=False): + lib=libname.lower() + lib=re.sub('\.lib$','',lib) + if lib in g_msvc_systemlibs: + return lib + lib=re.sub('^lib','',lib) + if lib=='m': + return None + (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static) + if lt_path!=None and lt_libname!=None: + if lt_static==True: + return os.path.join(lt_path,lt_libname) + if lt_path!=None: + _libpaths=[lt_path]+self.env['LIBPATH'] + else: + _libpaths=self.env['LIBPATH'] + static_libs=['%ss.lib'%lib,'lib%ss.lib'%lib,'%s.lib'%lib,'lib%s.lib'%lib,] + dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,] + libnames=static_libs + if not is_static: + libnames=dynamic_libs+static_libs + for path in _libpaths: + for libn in libnames: + if os.path.exists(os.path.join(path,libn)): + debug('msvc: lib found: %s'%os.path.join(path,libn)) + return re.sub('\.lib$','',libn) + if mandatory: + self.fatal("The library %r could not be found"%libname) + return re.sub('\.lib$','',libname) +def check_lib_msvc(self,libname,is_static=False,uselib_store=None,mandatory=False): + libn=self.libname_msvc(libname,is_static,mandatory) + if not uselib_store: + uselib_store=libname.upper() + if is_static: + self.env['LIB_'+uselib_store]=[libn] + else: + self.env['STATICLIB_'+uselib_store]=[libn] +def check_libs_msvc(self,libnames,is_static=False,mandatory=False): + for libname in Utils.to_list(libnames): + self.check_lib_msvc(libname,is_static,mandatory=mandatory) +def apply_obj_vars_msvc(self): + if self.env['CC_NAME']!='msvc': + return + try: + self.meths.remove('apply_obj_vars') + except ValueError: + pass + env=self.env + app=env.append_unique + cpppath_st=env['CPPPATH_ST'] + lib_st=env['LIB_ST'] + staticlib_st=env['STATICLIB_ST'] + libpath_st=env['LIBPATH_ST'] + staticlibpath_st=env['STATICLIBPATH_ST'] + for i in env['LIBPATH']: + app('LINKFLAGS',libpath_st%i) + if not self.libpaths.count(i): + self.libpaths.append(i) + for i in env['LIBPATH']: + app('LINKFLAGS',staticlibpath_st%i) + if not self.libpaths.count(i): + self.libpaths.append(i) + if not env['FULLSTATIC']: + if env['STATICLIB']or env['LIB']: + app('LINKFLAGS',env['SHLIB_MARKER']) + for i in env['STATICLIB']: + app('LINKFLAGS',lib_st%i) + for i in env['LIB']: + app('LINKFLAGS',lib_st%i) +def apply_link_msvc(self): + if self.env['CC_NAME']!='msvc': + return + link=getattr(self,'link',None) + if not link: + if'cstaticlib'in self.features:link='msvc_link_static' + elif'cxx'in self.features:link='msvc_cxx_link' + else:link='msvc_cc_link' + self.vnum='' + self.link=link +def init_msvc(self): + try:getattr(self,'libpaths') + except AttributeError:self.libpaths=[] +Task.task_type_from_func('msvc_link_static',vars=['STLIBLINK','STLINKFLAGS'],color='YELLOW',func=msvc_linker,ext_in='.o') +Task.task_type_from_func('msvc_cc_link',vars=['LINK','LINK_SRC_F','LINKFLAGS','MT','MTFLAGS'],color='YELLOW',func=msvc_linker,ext_in='.o') +Task.task_type_from_func('msvc_cxx_link',vars=['LINK','LINK_SRC_F','LINKFLAGS','MT','MTFLAGS'],color='YELLOW',func=msvc_linker,ext_in='.o') +rc_str='${RC} ${RCFLAGS} /fo ${TGT} ${SRC}' +Task.simple_task_type('rc',rc_str,color='GREEN',before='cc cxx',shell=False) +def no_autodetect(conf): + conf.eval_rules(detect.replace('autodetect','')) +detect=''' +autodetect +find_msvc +msvc_common_flags +cc_load_tools +cxx_load_tools +cc_add_flags +cxx_add_flags +''' +def autodetect(conf): + v=conf.env + compiler,path,includes,libdirs=detect_msvc(conf) + v['PATH']=path + v['CPPPATH']=includes + v['LIBPATH']=libdirs + v['MSVC_COMPILER']=compiler +def find_msvc(conf): + if sys.platform!='win32': + conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet') + v=conf.env + compiler,path,includes,libdirs=detect_msvc(conf) + v['PATH']=path + v['CPPPATH']=includes + v['LIBPATH']=libdirs + if compiler=='msvc'or compiler=='wsdk': + compiler_name='CL' + linker_name='LINK' + lib_name='LIB' + elif compiler=='intel': + compiler_name='ICL' + linker_name='XILINK' + lib_name='XILIB' + else: + conf.fatal('Unknown compiler type : %s'%compiler) + cxx=None + if v['CXX']:cxx=v['CXX'] + elif'CXX'in conf.environ:cxx=conf.environ['CXX'] + if not cxx:cxx=conf.find_program(compiler_name,var='CXX',path_list=path) + if not cxx:conf.fatal('%s was not found (compiler)'%compiler_name) + cxx=conf.cmd_to_list(cxx) + env=dict(conf.environ) + env.update(PATH=';'.join(path)) + if not Utils.cmd_output([cxx,'/nologo','/?'],silent=True,env=env): + conf.fatal('the msvc compiler could not be identified') + v['CC']=v['CXX']=cxx + v['CC_NAME']=v['CXX_NAME']='msvc' + try:v.prepend_value('CPPPATH',conf.environ['INCLUDE']) + except KeyError:pass + try:v.prepend_value('LIBPATH',conf.environ['LIB']) + except KeyError:pass + if not v['LINK_CXX']: + link=conf.find_program(linker_name,path_list=path) + if link:v['LINK_CXX']=link + else:conf.fatal('%s was not found (linker)'%linker_name) + v['LINK']=link + if not v['LINK_CC']:v['LINK_CC']=v['LINK_CXX'] + if not v['STLIBLINK']: + stliblink=conf.find_program(lib_name,path_list=path) + if not stliblink:return + v['STLIBLINK']=stliblink + v['STLINKFLAGS']=['/NOLOGO'] + manifesttool=conf.find_program('MT',path_list=path) + if manifesttool: + v['MT']=manifesttool + v['MTFLAGS']=['/NOLOGO'] + conf.check_tool('winres') + if not conf.env['WINRC']: + warn('Resource compiler not found. Compiling resource file is disabled') +def exec_command_msvc(self,*k,**kw): + if self.env['CC_NAME']=='msvc': + if isinstance(k[0],list): + lst=[] + carry='' + for a in k[0]: + if(len(a)==3 and(a.startswith('/F')or a.startswith('/Y')))or(a=='/doc'): + carry=a + else: + lst.append(carry+a) + carry='' + k=[lst] + env=dict(os.environ) + env.update(PATH=';'.join(self.env['PATH'])) + kw['env']=env + return self.generator.bld.exec_command(*k,**kw) +for k in'cc cxx msvc_cc_link msvc_cxx_link msvc_link_static winrc'.split(): + cls=Task.TaskBase.classes.get(k,None) + if cls: + cls.exec_command=exec_command_msvc +def msvc_common_flags(conf): + v=conf.env + v['CPPFLAGS']=['/W3','/nologo','/EHsc'] + v['CCDEFINES']=['WIN32'] + v['CXXDEFINES']=['WIN32'] + v['_CCINCFLAGS']=[] + v['_CCDEFFLAGS']=[] + v['_CXXINCFLAGS']=[] + v['_CXXDEFFLAGS']=[] + v['CC_SRC_F']='' + v['CC_TGT_F']=['/c','/Fo'] + v['CXX_SRC_F']='' + v['CXX_TGT_F']=['/c','/Fo'] + v['CPPPATH_ST']='/I%s' + v['CPPFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE'] + v['CPPFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE'] + v['CPPFLAGS_POSIX']=['/SUBSYSTEM:POSIX'] + v['CPPFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS'] + v['CPPFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE'] + v['CPPFLAGS_CRT_MULTITHREADED']=['/MT'] + v['CPPFLAGS_CRT_MULTITHREADED_DLL']=['/MD'] + v['CPPDEFINES_CRT_MULTITHREADED']=['_MT'] + v['CPPDEFINES_CRT_MULTITHREADED_DLL']=['_MT','_DLL'] + v['CPPFLAGS_CRT_MULTITHREADED_DBG']=['/MTd'] + v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd'] + v['CPPDEFINES_CRT_MULTITHREADED_DBG']=['_DEBUG','_MT'] + v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG']=['_DEBUG','_MT','_DLL'] + v['CCFLAGS']=['/TC'] + v['CCFLAGS_OPTIMIZED']=['/O2','/DNDEBUG'] + v['CCFLAGS_RELEASE']=['/O2','/DNDEBUG'] + v['CCFLAGS_DEBUG']=['/Od','/RTC1','/D_DEBUG','/ZI'] + v['CCFLAGS_ULTRADEBUG']=['/Od','/RTC1','/D_DEBUG','/ZI'] + v['CXXFLAGS']=['/TP'] + v['CXXFLAGS_OPTIMIZED']=['/O2','/DNDEBUG'] + v['CXXFLAGS_RELEASE']=['/O2','/DNDEBUG'] + v['CXXFLAGS_DEBUG']=['/Od','/RTC1','/D_DEBUG','/ZI'] + v['CXXFLAGS_ULTRADEBUG']=['/Od','/RTC1','/D_DEBUG','/ZI'] + v['LIB']=[] + v['LINK_TGT_F']='/OUT:' + v['LINK_SRC_F']='' + v['LIB_ST']='%s.lib' + v['LIBPATH_ST']='/LIBPATH:%s' + v['STATICLIB_ST']='%s.lib' + v['STATICLIBPATH_ST']='/LIBPATH:%s' + v['CCDEFINES_ST']='/D%s' + v['CXXDEFINES_ST']='/D%s' + v['LINKFLAGS']=['/NOLOGO','/MANIFEST'] + v['shlib_CCFLAGS']=[''] + v['shlib_CXXFLAGS']=[''] + v['shlib_LINKFLAGS']=['/DLL'] + v['shlib_PATTERN']='%s.dll' + v['staticlib_LINKFLAGS']=[''] + v['staticlib_PATTERN']='%s.lib' + v['program_PATTERN']='%s.exe' + +conf(get_msvc_version) +conf(gather_wsdk_versions) +conf(gather_msvc_versions) +conf(gather_icl_versions) +conf(get_msvc_versions) +conf(find_lt_names_msvc) +conf(libname_msvc) +conf(check_lib_msvc) +conf(check_libs_msvc) +feature('cprogram','cshlib','cstaticlib')(apply_obj_vars_msvc) +after('apply_lib_vars')(apply_obj_vars_msvc) +before('apply_obj_vars')(apply_obj_vars_msvc) +feature('cprogram','cshlib','cstaticlib')(apply_link_msvc) +before('apply_link')(apply_link_msvc) +feature('cc','cxx')(init_msvc) +after('init_cc','init_cxx')(init_msvc) +before('apply_type_vars','apply_core')(init_msvc) +conftest(no_autodetect) +conftest(autodetect) +conftest(find_msvc) +conftest(msvc_common_flags) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/nasm.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/nasm.py --- showq-0.4.1+git20090622/wafadmin/Tools/nasm.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/nasm.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,35 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os +import TaskGen,Task,Utils +from TaskGen import taskgen,before,extension +nasm_str='${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}' +EXT_NASM=['.s','.S','.asm','.ASM','.spp','.SPP'] +def apply_nasm_vars(self): + if hasattr(self,'nasm_flags'): + for flag in self.to_list(self.nasm_flags): + self.env.append_value('NASM_FLAGS',flag) + if hasattr(self,'includes'): + for inc in self.to_list(self.includes): + node=self.path.find_dir(inc) + if not node: + raise Utils.WafError('cannot find the dir'+inc) + self.env.append_value('NASM_INCLUDES','-I%s'%node.srcpath(self.env)) + self.env.append_value('NASM_INCLUDES','-I%s'%node.bldpath(self.env)) +def nasm_file(self,node): + try:obj_ext=self.obj_ext + except AttributeError:obj_ext='_%d.o'%self.idx + task=self.create_task('nasm') + task.inputs=[node] + task.outputs=[node.change_ext(obj_ext)] + self.compiled_tasks.append(task) + self.meths.append('apply_nasm_vars') +Task.simple_task_type('nasm',nasm_str,color='BLUE',ext_out='.o',shell=False) +def detect(conf): + nasm=conf.find_program('nasm',var='NASM') + if not nasm:nasm=conf.find_program('yasm',var='NASM') + if not nasm:conf.fatal('could not find nasm (or yasm), install it or set PATH env var') + +before('apply_link')(apply_nasm_vars) +extension(EXT_NASM)(nasm_file) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/ocaml.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/ocaml.py --- showq-0.4.1+git20090622/wafadmin/Tools/ocaml.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/ocaml.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,238 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,re +import TaskGen,Utils,Task,Build +from Logs import error +from TaskGen import taskgen,feature,before,after,extension +EXT_MLL=['.mll'] +EXT_MLY=['.mly'] +EXT_MLI=['.mli'] +EXT_MLC=['.c'] +EXT_ML=['.ml'] +open_re=re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$',re.M) +foo=re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""",re.M) +def filter_comments(txt): + meh=[0] + def repl(m): + if m.group(1):meh[0]+=1 + elif m.group(2):meh[0]-=1 + elif not meh[0]:return m.group(0) + return'' + return foo.sub(repl,txt) +def scan(self): + node=self.inputs[0] + code=filter_comments(node.read(self.env)) + global open_re + names=[] + import_iterator=open_re.finditer(code) + if import_iterator: + for import_match in import_iterator: + names.append(import_match.group(1)) + found_lst=[] + raw_lst=[] + for name in names: + nd=None + for x in self.incpaths: + nd=x.find_resource(name.lower()+'.ml') + if not nd:nd=x.find_resource(name+'.ml') + if nd: + found_lst.append(nd) + break + else: + raw_lst.append(name) + return(found_lst,raw_lst) +native_lst=['native','all','c_object'] +bytecode_lst=['bytecode','all'] +class ocaml_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def init_ml(self): + Utils.def_attrs(self,type='all',incpaths_lst=[],bld_incpaths_lst=[],mlltasks=[],mlytasks=[],mlitasks=[],native_tasks=[],bytecode_tasks=[],linktasks=[],bytecode_env=None,native_env=None,compiled_tasks=[],includes='',uselib='',are_deps_set=0) +def init_envs_ml(self): + self.islibrary=getattr(self,'islibrary',False) + global native_lst,bytecode_lst + self.native_env=None + if self.type in native_lst: + self.native_env=self.env.copy() + if self.islibrary:self.native_env['OCALINKFLAGS']='-a' + self.bytecode_env=None + if self.type in bytecode_lst: + self.bytecode_env=self.env.copy() + if self.islibrary:self.bytecode_env['OCALINKFLAGS']='-a' + if self.type=='c_object': + self.native_env.append_unique('OCALINKFLAGS_OPT','-output-obj') +def apply_incpaths_ml(self): + inc_lst=self.includes.split() + lst=self.incpaths_lst + for dir in inc_lst: + node=self.path.find_dir(dir) + if not node: + error("node not found: "+str(dir)) + continue + self.bld.rescan(node) + if not node in lst:lst.append(node) + self.bld_incpaths_lst.append(node) +def apply_vars_ml(self): + for i in self.incpaths_lst: + if self.bytecode_env: + app=self.bytecode_env.append_value + app('OCAMLPATH','-I') + app('OCAMLPATH',i.srcpath(self.env)) + app('OCAMLPATH','-I') + app('OCAMLPATH',i.bldpath(self.env)) + if self.native_env: + app=self.native_env.append_value + app('OCAMLPATH','-I') + app('OCAMLPATH',i.bldpath(self.env)) + app('OCAMLPATH','-I') + app('OCAMLPATH',i.srcpath(self.env)) + varnames=['INCLUDES','OCAMLFLAGS','OCALINKFLAGS','OCALINKFLAGS_OPT'] + for name in self.uselib.split(): + for vname in varnames: + cnt=self.env[vname+'_'+name] + if cnt: + if self.bytecode_env:self.bytecode_env.append_value(vname,cnt) + if self.native_env:self.native_env.append_value(vname,cnt) +def apply_link_ml(self): + if self.bytecode_env: + ext=self.islibrary and'.cma'or'.run' + linktask=self.create_task('ocalink') + linktask.bytecode=1 + linktask.set_outputs(self.path.find_or_declare(self.target+ext)) + linktask.obj=self + linktask.env=self.bytecode_env + self.linktasks.append(linktask) + if self.native_env: + if self.type=='c_object':ext='.o' + elif self.islibrary:ext='.cmxa' + else:ext='' + linktask=self.create_task('ocalinkx') + linktask.set_outputs(self.path.find_or_declare(self.target+ext)) + linktask.obj=self + linktask.env=self.native_env + self.linktasks.append(linktask) + self.compiled_tasks.append(linktask) +def mll_hook(self,node): + mll_task=self.create_task('ocamllex',self.native_env) + mll_task.set_inputs(node) + mll_task.set_outputs(node.change_ext('.ml')) + self.mlltasks.append(mll_task) + self.allnodes.append(mll_task.outputs[0]) +def mly_hook(self,node): + mly_task=self.create_task('ocamlyacc',self.native_env) + mly_task.set_inputs(node) + mly_task.set_outputs([node.change_ext('.ml'),node.change_ext('.mli')]) + self.mlytasks.append(mly_task) + self.allnodes.append(mly_task.outputs[0]) + task=self.create_task('ocamlcmi',self.native_env) + task.set_inputs(mly_task.outputs[1]) + task.set_outputs(mly_task.outputs[1].change_ext('.cmi')) +def mli_hook(self,node): + task=self.create_task('ocamlcmi',self.native_env) + task.set_inputs(node) + task.set_outputs(node.change_ext('.cmi')) + self.mlitasks.append(task) +def mlc_hook(self,node): + task=self.create_task('ocamlcc',self.native_env) + task.set_inputs(node) + task.set_outputs(node.change_ext('.o')) + self.compiled_tasks.append(task) +def ml_hook(self,node): + if self.native_env: + task=self.create_task('ocamlx',self.native_env) + task.set_inputs(node) + task.set_outputs(node.change_ext('.cmx')) + task.obj=self + task.incpaths=self.bld_incpaths_lst + self.native_tasks.append(task) + if self.bytecode_env: + task=self.create_task('ocaml',self.bytecode_env) + task.set_inputs(node) + task.obj=self + task.bytecode=1 + task.incpaths=self.bld_incpaths_lst + task.set_outputs(node.change_ext('.cmo')) + self.bytecode_tasks.append(task) +def compile_may_start(self): + if not getattr(self,'flag_deps',''): + self.flag_deps=1 + if getattr(self,'bytecode',''):alltasks=self.obj.bytecode_tasks + else:alltasks=self.obj.native_tasks + self.signature() + tree=self.generator.bld + env=self.env + for node in self.inputs: + lst=tree.node_deps[self.unique_id()] + for depnode in lst: + for t in alltasks: + if t==self:continue + if depnode in t.inputs: + self.set_run_after(t) + delattr(self,'cache_sig') + self.signature() + return Task.Task.runnable_status(self) +b=Task.simple_task_type +cls=b('ocamlx','${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}',color='GREEN',shell=False) +cls.runnable_status=compile_may_start +cls.scan=scan +b=Task.simple_task_type +cls=b('ocaml','${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}',color='GREEN',shell=False) +cls.runnable_status=compile_may_start +cls.scan=scan +b('ocamlcmi','${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}',color='BLUE',before="ocaml ocamlcc ocamlx") +b('ocamlcc','cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}',color='GREEN') +b('ocamllex','${OCAMLLEX} ${SRC} -o ${TGT}',color='BLUE',before="ocamlcmi ocaml ocamlcc") +b('ocamlyacc','${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}',color='BLUE',before="ocamlcmi ocaml ocamlcc") +def link_may_start(self): + if not getattr(self,'order',''): + if getattr(self,'bytecode',0):alltasks=self.obj.bytecode_tasks + else:alltasks=self.obj.native_tasks + seen=[] + pendant=[]+alltasks + while pendant: + task=pendant.pop(0) + if task in seen:continue + for x in task.run_after: + if not x in seen: + pendant.append(task) + break + else: + seen.append(task) + self.inputs=[x.outputs[0]for x in seen] + self.order=1 + return Task.Task.runnable_status(self) +act=b('ocalink','${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}',color='YELLOW',after="ocaml ocamlcc") +act.runnable_status=link_may_start +act=b('ocalinkx','${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}',color='YELLOW',after="ocamlx ocamlcc") +act.runnable_status=link_may_start +def detect(conf): + opt=conf.find_program('ocamlopt',var='OCAMLOPT') + occ=conf.find_program('ocamlc',var='OCAMLC') + if(not opt)or(not occ): + conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH') + conf.env['OCAMLC']=occ + conf.env['OCAMLOPT']=opt + conf.env['OCAMLLEX']=conf.find_program('ocamllex',var='OCAMLLEX') + conf.env['OCAMLYACC']=conf.find_program('ocamlyacc',var='OCAMLYACC') + conf.env['OCAMLFLAGS']='' + conf.env['OCAMLLIB']=Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep + conf.env['LIBPATH_OCAML']=Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep + conf.env['CPPPATH_OCAML']=Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep + conf.env['LIB_OCAML']='camlrun' + +feature('ocaml')(init_ml) +feature('ocaml')(init_envs_ml) +after('init_ml')(init_envs_ml) +feature('ocaml')(apply_incpaths_ml) +before('apply_vars_ml')(apply_incpaths_ml) +after('init_envs_ml')(apply_incpaths_ml) +feature('ocaml')(apply_vars_ml) +before('apply_core')(apply_vars_ml) +feature('ocaml')(apply_link_ml) +after('apply_core')(apply_link_ml) +extension(EXT_MLL)(mll_hook) +extension(EXT_MLY)(mly_hook) +extension(EXT_MLI)(mli_hook) +extension(EXT_MLC)(mlc_hook) +extension(EXT_ML)(ml_hook) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/osx.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/osx.py --- showq-0.4.1+git20090622/wafadmin/Tools/osx.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/osx.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,121 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,shutil,sys,platform +import TaskGen,Task,Build,Options +from TaskGen import taskgen,feature,after,before +from Logs import error,debug +def set_macosx_deployment_target(self): + if self.env['MACOSX_DEPLOYMENT_TARGET']: + os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET'] + elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ: + if sys.platform=='darwin': + os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2]) +def apply_framework(self): + for x in self.to_list(self.env['FRAMEWORKPATH']): + frameworkpath_st='-F%s' + self.env.append_unique('CXXFLAGS',frameworkpath_st%x) + self.env.append_unique('CCFLAGS',frameworkpath_st%x) + self.env.append_unique('LINKFLAGS',frameworkpath_st%x) + for x in self.to_list(self.env['FRAMEWORK']): + self.env.append_value('LINKFLAGS',['-framework',x]) +def create_task_macapp(self): + if'cprogram'in self.features and self.link_task: + apptask=self.create_task('macapp',self.env) + apptask.set_inputs(self.link_task.outputs) + apptask.set_outputs(self.link_task.outputs[0].change_ext('.app')) + self.apptask=apptask +def apply_link_osx(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + self.create_task_macapp() + name=self.link_task.outputs[0].name + if getattr(self,'vnum',None): + name=name.replace('.dylib','.%s.dylib'%self.vnum) + path=os.path.join(self.env['PREFIX'],'lib',name) + self.env.append_value('LINKFLAGS','-install_name') + self.env.append_value('LINKFLAGS',path) +def apply_bundle(self): + if not('cshlib'in self.features or'shlib'in self.features):return + if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False): + self.env['shlib_PATTERN']=self.env['macbundle_PATTERN'] + uselib=self.uselib=self.to_list(self.uselib) + if not'MACBUNDLE'in uselib:uselib.append('MACBUNDLE') +def apply_bundle_remove_dynamiclib(self): + if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False): + if not getattr(self,'vnum',None): + try: + self.env['LINKFLAGS'].remove('-dynamiclib') + except ValueError: + pass +app_dirs=['Contents',os.path.join('Contents','MacOS'),os.path.join('Contents','Resources')] +app_info=''' + + + + + CFBundlePackageType + APPL + CFBundleGetInfoString + Created by Waf + CFBundleSignature + ???? + NOTE + THIS IS A GENERATED FILE, DO NOT MODIFY + CFBundleExecutable + %s + + +''' +def app_build(task): + global app_dirs + env=task.env + i=0 + for p in task.outputs: + srcfile=p.srcpath(env) + debug('osx: creating directories') + try: + os.mkdir(srcfile) + [os.makedirs(os.path.join(srcfile,d))for d in app_dirs] + except(OSError,IOError): + pass + srcprg=task.inputs[i].srcpath(env) + dst=os.path.join(srcfile,'Contents','MacOS') + debug('osx: copy %s to %s'%(srcprg,dst)) + shutil.copy(srcprg,dst) + debug('osx: generate Info.plist') + f=open(os.path.join(srcfile,"Contents","Info.plist"),"w") + f.write(app_info%os.path.basename(srcprg)) + f.close() + i+=1 + return 0 +def install_shlib(task): + nums=task.vnum.split('.') + path=self.install_path + libname=task.outputs[0].name + name3=libname.replace('.dylib','.%s.dylib'%task.vnum) + name2=libname.replace('.dylib','.%s.dylib'%nums[0]) + name1=libname + filename=task.outputs[0].abspath(task.env) + bld=task.outputs[0].__class__.bld + bld.install_as(path+name3,filename,env=task.env) + bld.symlink_as(path+name2,name3) + bld.symlink_as(path+name1,name3) +def install_target_osx_cshlib(self): + if not self.bld.is_install:return + if getattr(self,'vnum','')and sys.platform!='win32': + self.link_task.install=install_shlib +Task.task_type_from_func('macapp',vars=[],func=app_build,after="cxx_link cc_link ar_link_static") + +feature('cc','cxx')(set_macosx_deployment_target) +before('apply_lib_vars')(set_macosx_deployment_target) +feature('cc','cxx')(apply_framework) +after('apply_lib_vars')(apply_framework) +taskgen(create_task_macapp) +after('apply_link')(apply_link_osx) +feature('cc','cxx')(apply_link_osx) +before('apply_link','apply_lib_vars')(apply_bundle) +feature('cc','cxx')(apply_bundle) +after('apply_link')(apply_bundle_remove_dynamiclib) +feature('cshlib')(apply_bundle_remove_dynamiclib) +feature('osx')(install_target_osx_cshlib) +after('install_target_cshlib')(install_target_osx_cshlib) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/perl.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/perl.py --- showq-0.4.1+git20090622/wafadmin/Tools/perl.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/perl.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,76 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os +import Task,Options,Utils +from Configure import conf +from TaskGen import extension,taskgen,feature,before +xsubpp_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' +EXT_XS=['.xs'] +def init_perlext(self): + self.uselib=self.to_list(getattr(self,'uselib','')) + if not'PERL'in self.uselib:self.uselib.append('PERL') + if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT') + self.env['shlib_PATTERN']=self.env['perlext_PATTERN'] +def xsubpp_file(self,node): + gentask=self.create_task('xsubpp') + gentask.set_inputs(node) + outnode=node.change_ext('.c') + gentask.set_outputs(outnode) + self.allnodes.append(outnode) +Task.simple_task_type('xsubpp',xsubpp_str,color='BLUE',before="cc cxx",shell=False) +def check_perl_version(conf,minver=None): + res=True + if not getattr(Options.options,'perlbinary',None): + perl=conf.find_program("perl",var="PERL") + if not perl: + return False + else: + perl=Options.options.perlbinary + conf.env['PERL']=perl + version=Utils.cmd_output(perl+" -e'printf \"%vd\", $^V'") + if not version: + res=False + version="Unknown" + elif not minver is None: + ver=tuple(map(int,version.split("."))) + if ver",r'#~\|^[]{}')] +chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39} +NUM='i' +OP='O' +IDENT='T' +STR='s' +CHAR='c' +tok_types=[NUM,STR,IDENT,OP] +exp_types=[r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',] +re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M) +accepted='a' +ignored='i' +undefined='u' +skipped='s' +def repl(m): + s=m.group(1) + if s is not None:return' ' + s=m.group(3) + if s is None:return'' + return s +def filter_comments(filename): + code=Utils.readf(filename) + if use_trigraphs: + for(a,b)in trig_def:code=code.split(a).join(b) + code=re_nl.sub('',code) + code=re_cpp.sub(repl,code) + return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)] +prec={} +ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',','] +for x in range(len(ops)): + syms=ops[x] + for u in syms.split(): + prec[u]=x +def reduce_nums(val_1,val_2,val_op): + try:a=0+val_1 + except TypeError:a=int(val_1) + try:b=0+val_2 + except TypeError:b=int(val_2) + d=val_op + if d=='%':c=a%b + elif d=='+':c=a+b + elif d=='-':c=a-b + elif d=='*':c=a*b + elif d=='/':c=a/b + elif d=='^':c=a^b + elif d=='|':c=a|b + elif d=='||':c=int(a or b) + elif d=='&':c=a&b + elif d=='&&':c=int(a and b) + elif d=='==':c=int(a==b) + elif d=='!=':c=int(a!=b) + elif d=='<=':c=int(a<=b) + elif d=='<':c=int(a':c=int(a>b) + elif d=='>=':c=int(a>=b) + elif d=='^':c=int(a^b) + elif d=='<<':c=a<>':c=a>>b + else:c=0 + return c +def get_num(lst): + if not lst:raise PreprocError("empty list for get_num") + (p,v)=lst[0] + if p==OP: + if v=='(': + count_par=1 + i=1 + while i=prec[v]: + num2=reduce_nums(num,num2,v) + return get_term([(NUM,num2)]+lst) + else: + num3,lst=get_num(lst[1:]) + num3=reduce_nums(num2,num3,v2) + return get_term([(NUM,num),(p,v),(NUM,num3)]+lst) + raise PreprocError("cannot reduce %r"%lst) +def reduce_eval(lst): + num,lst=get_term(lst) + return(NUM,num) +def stringize(lst): + lst=[str(v2)for(p2,v2)in lst] + return"".join(lst) +def paste_tokens(t1,t2): + p1=None + if t1[0]==OP and t2[0]==OP: + p1=OP + elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM): + p1=IDENT + elif t1[0]==NUM and t2[0]==NUM: + p1=NUM + if not p1: + raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2)) + return(p1,t1[1]+t2[1]) +def reduce_tokens(lst,defs,ban=[]): + i=0 + while i=len(lst): + raise PreprocError("expected '(' after %r (got nothing)"%v) + (p2,v2)=lst[i] + if p2!=OP or v2!='(': + raise PreprocError("expected '(' after %r"%v) + del lst[i] + one_param=[] + count_paren=0 + while i1: + (p3,v3)=accu[-1] + (p4,v4)=accu[-2] + if v3=='##': + accu.pop() + if v4==','and pt.*)>|"(?P.*)")') +def extract_include(txt,defs): + m=re_include.search(txt) + if m: + if m.group('a'):return'<',m.group('a') + if m.group('b'):return'"',m.group('b') + toks=tokenize(txt) + reduce_tokens(toks,defs,['waf_include']) + if not toks: + raise PreprocError("could not parse include %s"%txt) + if len(toks)==1: + if toks[0][0]==STR: + return'"',toks[0][1] + else: + if toks[0][1]=='<'and toks[-1][1]=='>': + return stringize(toks).lstrip('<').rstrip('>') + raise PreprocError("could not parse include %s."%txt) +def parse_char(txt): + if not txt:raise PreprocError("attempted to parse a null char") + if txt[0]!='\\': + return ord(txt) + c=txt[1] + if c=='x': + if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16) + return int(txt[2:],16) + elif c.isdigit(): + if c=='0'and len(txt)==2:return 0 + for i in 3,2,1: + if len(txt)>i and txt[1:1+i].isdigit(): + return(1+i,int(txt[1:1+i],8)) + else: + try:return chr_esc[c] + except KeyError:raise PreprocError("could not parse char literal '%s'"%txt) +def tokenize(s): + ret=[] + for match in re_clexer.finditer(s): + m=match.group + for name in tok_types: + v=m(name) + if v: + if name==IDENT: + try:v=g_optrans[v];name=OP + except KeyError: + if v.lower()=="true": + v=1 + name=NUM + elif v.lower()=="false": + v=0 + name=NUM + elif name==NUM: + if m('oct'):v=int(v,8) + elif m('hex'):v=int(m('hex'),16) + elif m('n0'):v=m('n0') + else: + v=m('char') + if v:v=parse_char(v) + else:v=m('n2')or m('n4') + elif name==OP: + if v=='%:':v='#' + elif v=='%:%:':v='##' + elif name==STR: + v=v[1:-1] + ret.append((name,v)) + break + return ret +class c_parser(object): + def __init__(self,nodepaths=None,defines=None): + self.lines=[] + if defines is None: + self.defs={} + else: + self.defs=dict(defines) + self.state=[] + self.env=None + self.count_files=0 + self.currentnode_stack=[] + self.nodepaths=nodepaths or[] + self.nodes=[] + self.names=[] + self.curfile='' + self.ban_includes=[] + def tryfind(self,filename): + self.curfile=filename + found=self.currentnode_stack[-1].find_resource(filename) + for n in self.nodepaths: + if found: + break + found=n.find_resource(filename) + if not found: + if not filename in self.names: + self.names.append(filename) + else: + self.nodes.append(found) + if filename[-4:]!='.moc': + self.addlines(found) + return found + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath(self.env) + self.count_files+=1 + if self.count_files>30000:raise PreprocError("recursion limit exceeded") + pc=self.parse_cache + debug('preproc: reading file %r'%filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines=lns+self.lines + return + try: + lines=filter_comments(filepath) + lines.append((POPFILE,'')) + pc[filepath]=lines + self.lines=lines+self.lines + except IOError: + raise PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + error("parsing %s failed"%filepath) + traceback.print_exc() + def start(self,node,env): + debug('preproc: scanning %s (in %s)'%(node.name,node.parent.name)) + self.env=env + variant=node.variant(env) + bld=node.__class__.bld + try: + self.parse_cache=bld.parse_cache + except AttributeError: + bld.parse_cache={} + self.parse_cache=bld.parse_cache + self.addlines(node) + if env['DEFLINES']: + self.lines=[('define',x)for x in env['DEFLINES']]+self.lines + while self.lines: + (kind,line)=self.lines.pop(0) + if kind==POPFILE: + self.currentnode_stack.pop() + continue + try: + self.process_line(kind,line) + except Exception,e: + if Logs.verbose: + debug('preproc: line parsing failed (%s): %s %s'%(e,line,Utils.ex_stack())) + def process_line(self,token,line): + ve=Logs.verbose + if ve:debug('preproc: line is %s - %s state is %s'%(token,line,self.state)) + state=self.state + if token in['ifdef','ifndef','if']: + state.append(undefined) + elif token=='endif': + state.pop() + if not token in['else','elif','endif']: + if skipped in self.state or ignored in self.state: + return + if token=='if': + ret=eval_macro(tokenize(line),self.defs) + if ret:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifdef': + m=re_mac.search(line) + if m and m.group(0)in self.defs:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifndef': + m=re_mac.search(line) + if m and m.group(0)in self.defs:state[-1]=ignored + else:state[-1]=accepted + elif token=='include'or token=='import': + (kind,inc)=extract_include(line,self.defs) + if inc in self.ban_includes:return + if token=='import':self.ban_includes.append(inc) + if ve:debug('preproc: include found %s (%s) '%(inc,kind)) + if kind=='"'or not strict_quotes: + self.tryfind(inc) + elif token=='elif': + if state[-1]==accepted: + state[-1]=skipped + elif state[-1]==ignored: + if eval_macro(tokenize(line),self.defs): + state[-1]=accepted + elif token=='else': + if state[-1]==accepted:state[-1]=skipped + elif state[-1]==ignored:state[-1]=accepted + elif token=='define': + m=re_mac.search(line) + if m: + name=m.group(0) + if ve:debug('preproc: define %s %s'%(name,line)) + self.defs[name]=line + else: + raise PreprocError("invalid define line %s"%line) + elif token=='undef': + m=re_mac.search(line) + if m and m.group(0)in self.defs: + self.defs.__delitem__(m.group(0)) + elif token=='pragma': + if re_pragma_once.search(line.lower()): + self.ban_includes.append(self.curfile) +def get_deps(node,env,nodepaths=[]): + gruik=c_parser(nodepaths) + gruik.start(node,env) + return(gruik.nodes,gruik.names) +re_inc=re.compile('^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) +def lines_includes(filename): + code=Utils.readf(filename) + if use_trigraphs: + for(a,b)in trig_def:code=code.split(a).join(b) + code=re_nl.sub('',code) + code=re_cpp.sub(repl,code) + return[(m.group(2),m.group(3))for m in re.finditer(re_inc,code)] +def get_deps_simple(node,env,nodepaths=[],defines={}): + nodes=[] + names=[] + def find_deps(node): + lst=lines_includes(node.abspath(env)) + for(_,line)in lst: + (t,filename)=extract_include(line,defines) + if filename in names: + continue + if filename.endswith('.moc'): + names.append(filename) + found=None + for n in nodepaths: + if found: + break + found=n.find_resource(filename) + if not found: + if not filename in names: + names.append(filename) + elif not found in nodes: + nodes.append(found) + find_deps(node) + find_deps(node) + return(nodes,names) + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/python.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/python.py --- showq-0.4.1+git20090622/wafadmin/Tools/python.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/python.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,288 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys +import TaskGen,Utils,Utils,Runner,Options,Build +from Logs import debug,warn,info +from TaskGen import extension,taskgen,before,after,feature +from Configure import conf +EXT_PY=['.py'] +FRAG_2=''' +#ifdef __cplusplus +extern "C" { +#endif + void Py_Initialize(void); + void Py_Finalize(void); +#ifdef __cplusplus +} +#endif +int main(int argc, char *argv[]) +{ + argc++; /* avoid unused variable warning */ + argv++; /* avoid unused variable warning */ + Py_Initialize(); + Py_Finalize(); + return 0; +} +''' +def init_pyext(self): + self.default_install_path='${PYTHONDIR}' + self.uselib=self.to_list(getattr(self,'uselib','')) + if not'PYEXT'in self.uselib: + self.uselib.append('PYEXT') + self.env['MACBUNDLE']=True +def pyext_shlib_ext(self): + self.env['shlib_PATTERN']=self.env['pyext_PATTERN'] +def init_pyembed(self): + self.uselib=self.to_list(getattr(self,'uselib','')) + if not'PYEMBED'in self.uselib: + self.uselib.append('PYEMBED') +def process_py(self,node): + if self.bld.is_install and self.install_path: + if not hasattr(self,'_py_installed_files'): + self._py_installed_files=[] + installed_files=self.bld.install_files(self.install_path,node.abspath(self.env),self.env,self.chmod) + self._py_installed_files.extend(installed_files) +def byte_compile_py(self): + if self.bld.is_install and self.install_path: + installed_files=self._py_installed_files + if not installed_files: + return + if self.bld.is_install<0: + info("* removing byte compiled python files") + for fname in installed_files: + try: + os.remove(fname+'c') + except OSError: + pass + try: + os.remove(fname+'o') + except OSError: + pass + if self.bld.is_install>0: + if self.env['PYC']or self.env['PYO']: + info("* byte compiling python files") + if self.env['PYC']: + program=(""" +import sys, py_compile +for pyfile in sys.argv[1:]: + py_compile.compile(pyfile, pyfile + 'c') +""") + argv=[self.env['PYTHON'],"-c",program] + argv.extend(installed_files) + retval=Utils.pproc.Popen(argv).wait() + if retval: + raise Utils.WafError("bytecode compilation failed") + if self.env['PYO']: + program=(""" +import sys, py_compile +for pyfile in sys.argv[1:]: + py_compile.compile(pyfile, pyfile + 'o') +""") + argv=[self.env['PYTHON'],self.env['PYFLAGS_OPT'],"-c",program] + argv.extend(installed_files) + retval=Utils.pproc.Popen(argv).wait() + if retval: + raise Utils.WafError("bytecode compilation failed") +class py_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def init_py(self): + self.default_install_path='${PYTHONDIR}' +def _get_python_variables(python_exe,variables,imports=['import sys']): + program=list(imports) + program.append('') + for v in variables: + program.append("print(repr(%s))"%v) + os_env=dict(os.environ) + try: + del os_env['MACOSX_DEPLOYMENT_TARGET'] + except KeyError: + pass + proc=Utils.pproc.Popen([python_exe,"-c",'\n'.join(program)],stdout=Utils.pproc.PIPE,env=os_env) + output=proc.communicate()[0].split("\n") + if proc.returncode: + if Options.options.verbose: + warn("Python program to extract python configuration variables failed:\n%s"%'\n'.join(["line %03i: %s"%(lineno+1,line)for lineno,line in enumerate(program)])) + raise RuntimeError + return_values=[] + for s in output: + s=s.strip() + if not s: + continue + if s=='None': + return_values.append(None) + elif s[0]=="'"and s[-1]=="'": + return_values.append(s[1:-1]) + elif s[0].isdigit(): + return_values.append(int(s)) + else:break + return return_values +def check_python_headers(conf): + if not conf.env['CC_NAME']and not conf.env['CXX_NAME']: + conf.fatal('load a compiler first (gcc, g++, ..)') + env=conf.env + python=env['PYTHON'] + assert python,("python is %r !"%(python,)) + if Options.platform=='darwin': + conf.check_tool('osx') + try: + v='prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split() + (python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET)=_get_python_variables(python,["get_config_var('%s')"%x for x in v],['from distutils.sysconfig import get_config_var']) + except RuntimeError: + conf.fatal("Python development headers not found (-v for details).") + conf.log.write("""Configuration returned from %r: +python_prefix = %r +python_SO = %r +python_SYSLIBS = %r +python_LDFLAGS = %r +python_SHLIBS = %r +python_LIBDIR = %r +python_LIBPL = %r +INCLUDEPY = %r +Py_ENABLE_SHARED = %r +MACOSX_DEPLOYMENT_TARGET = %r +"""%(python,python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET)) + if python_MACOSX_DEPLOYMENT_TARGET: + conf.env['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET + conf.environ['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET + env['pyext_PATTERN']='%s'+python_SO + if python_SYSLIBS is not None: + for lib in python_SYSLIBS.split(): + if lib.startswith('-l'): + lib=lib[2:] + env.append_value('LIB_PYEMBED',lib) + if python_SHLIBS is not None: + for lib in python_SHLIBS.split(): + if lib.startswith('-l'): + lib=lib[2:] + env.append_value('LIB_PYEMBED',lib) + if Options.platform!='darwin'and python_LDFLAGS: + env.append_value('LINKFLAGS_PYEMBED',python_LDFLAGS.split()) + result=False + name='python'+env['PYTHON_VERSION'] + if python_LIBDIR is not None: + path=[python_LIBDIR] + conf.log.write("\n\n# Trying LIBDIR: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path) + if not result and python_LIBPL is not None: + conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") + path=[python_LIBPL] + result=conf.check(lib=name,uselib='PYEMBED',libpath=path) + if not result: + conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") + path=[os.path.join(python_prefix,"libs")] + name='python'+env['PYTHON_VERSION'].replace('.','') + result=conf.check(lib=name,uselib='PYEMBED',libpath=path) + if result: + env['LIBPATH_PYEMBED']=path + env.append_value('LIB_PYEMBED',name) + else: + conf.log.write("\n\n### LIB NOT FOUND\n") + if(sys.platform=='win32'or sys.platform.startswith('os2')or sys.platform=='darwin'or Py_ENABLE_SHARED): + env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED'] + env['LIB_PYEXT']=env['LIB_PYEMBED'] + python_config=conf.find_program('python%s-config'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG') + if not python_config: + python_config=conf.find_program('python-config-%s'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG') + includes=[] + if python_config: + for incstr in Utils.cmd_output("%s %s --includes"%(python,python_config)).strip().split(): + if(incstr.startswith('-I')or incstr.startswith('/I')): + incstr=incstr[2:] + if incstr not in includes: + includes.append(incstr) + conf.log.write("Include path for Python extensions ""(found via python-config --includes): %r\n"%(includes,)) + env['CPPPATH_PYEXT']=includes + env['CPPPATH_PYEMBED']=includes + else: + conf.log.write("Include path for Python extensions ""(found via distutils module): %r\n"%(INCLUDEPY,)) + env['CPPPATH_PYEXT']=[INCLUDEPY] + env['CPPPATH_PYEMBED']=[INCLUDEPY] + if env['CC_NAME']=='gcc': + env.append_value('CCFLAGS_PYEMBED','-fno-strict-aliasing') + env.append_value('CCFLAGS_PYEXT','-fno-strict-aliasing') + if env['CXX_NAME']=='gcc': + env.append_value('CXXFLAGS_PYEMBED','-fno-strict-aliasing') + env.append_value('CXXFLAGS_PYEXT','-fno-strict-aliasing') + test_env=env.copy() + a=test_env.append_value + a('CPPPATH',env['CPPPATH_PYEMBED']) + a('LIBPATH',env['LIBPATH_PYEMBED']) + a('LIB',env['LIB_PYEMBED']) + a('LINKFLAGS',env['LINKFLAGS_PYEMBED']) + a('CXXFLAGS',env['CXXFLAGS_PYEMBED']) + a('CCFLAGS',env['CCFLAGS_PYEMBED']) + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',env=test_env,fragment=FRAG_2,errmsg='Could not find the python development headers',mandatory=1) +def check_python_version(conf,minver=None): + assert minver is None or isinstance(minver,tuple) + python=conf.env['PYTHON'] + assert python,("python is %r !"%(python,)) + cmd=[python,"-c","import sys\nfor x in sys.version_info: print(str(x))"] + debug('python: Running python command %r'%cmd) + proc=Utils.pproc.Popen(cmd,stdout=Utils.pproc.PIPE) + lines=proc.communicate()[0].split() + assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines) + pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4])) + result=(minver is None)or(pyver_tuple>=minver) + if result: + pyver='.'.join([str(x)for x in pyver_tuple[:2]]) + conf.env['PYTHON_VERSION']=pyver + if'PYTHONDIR'in conf.environ: + pydir=conf.environ['PYTHONDIR'] + else: + if sys.platform=='win32': + (python_LIBDEST,)=_get_python_variables(python,["get_config_var('LIBDEST')"],['from distutils.sysconfig import get_config_var']) + else: + python_LIBDEST=None + if python_LIBDEST is None: + if conf.env['LIBDIR']: + python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver) + else: + python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver) + pydir=os.path.join(python_LIBDEST,"site-packages") + if hasattr(conf,'define'): + conf.define('PYTHONDIR',pydir) + conf.env['PYTHONDIR']=pydir + pyver_full='.'.join(map(str,pyver_tuple[:3])) + if minver is None: + conf.check_message_custom('Python version','',pyver_full) + else: + minver_str='.'.join(map(str,minver)) + conf.check_message('Python version',">= %s"%(minver_str,),result,option=pyver_full) + if not result: + conf.fatal("Python too old.") +def check_python_module(conf,module_name): + result=not Utils.pproc.Popen([conf.env['PYTHON'],"-c","import %s"%module_name],stderr=Utils.pproc.PIPE,stdout=Utils.pproc.PIPE).wait() + conf.check_message('Python module',module_name,result) + if not result: + conf.fatal("Python module not found.") +def detect(conf): + python=conf.find_program('python',var='PYTHON') + if not python:return + v=conf.env + v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"' + v['PYFLAGS']='' + v['PYFLAGS_OPT']='-O' + v['PYC']=getattr(Options.options,'pyc',1) + v['PYO']=getattr(Options.options,'pyo',1) +def set_options(opt): + opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc') + opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo') + +before('apply_incpaths','apply_lib_vars','apply_type_vars')(init_pyext) +feature('pyext')(init_pyext) +before('apply_bundle')(init_pyext) +before('apply_link','apply_lib_vars','apply_type_vars')(pyext_shlib_ext) +after('apply_bundle')(pyext_shlib_ext) +feature('pyext')(pyext_shlib_ext) +before('apply_incpaths','apply_lib_vars','apply_type_vars')(init_pyembed) +feature('pyembed')(init_pyembed) +extension(EXT_PY)(process_py) +feature('py')(byte_compile_py) +before('apply_core')(init_py) +after('vars_target_cprogram','vars_target_cstaticlib')(init_py) +feature('py')(init_py) +conf(check_python_headers) +conf(check_python_version) +conf(check_python_module) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/qt4.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/qt4.py --- showq-0.4.1+git20090622/wafadmin/Tools/qt4.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/qt4.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,366 @@ +#! /usr/bin/env python +# encoding: utf-8 + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml=False + ContentHandler=object +else: + has_xml=True +import os,sys +import ccroot,cxx +import TaskGen,Task,Utils,Runner,Options,Node +from TaskGen import taskgen,feature,after,extension +from Logs import error +from Constants import* +MOC_H=['.h','.hpp','.hxx','.hh'] +EXT_RCC=['.qrc'] +EXT_UI=['.ui'] +EXT_QT4=['.cpp','.cc','.cxx','.C'] +class qxx_task(Task.Task): + before=['cxx_link','ar_link_static'] + def __init__(self,*k,**kw): + Task.Task.__init__(self,*k,**kw) + self.moc_done=0 + def scan(self): + (nodes,names)=ccroot.scan(self) + for x in nodes: + if x.name.endswith('.moc'): + nodes.remove(x) + names.append(x.relpath_gen(self.inputs[0].parent)) + return(nodes,names) + def runnable_status(self): + if self.moc_done: + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + self.signature() + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + self.add_moc_tasks() + return ASK_LATER + def add_moc_tasks(self): + node=self.inputs[0] + tree=node.__class__.bld + try: + self.signature() + except KeyError: + pass + else: + delattr(self,'cache_sig') + moctasks=[] + mocfiles=[] + variant=node.variant(self.env) + try: + tmp_lst=tree.raw_deps[self.unique_id()] + tree.raw_deps[self.unique_id()]=[] + except KeyError: + tmp_lst=[] + for d in tmp_lst: + if not d.endswith('.moc'):continue + if d in mocfiles: + error("paranoia owns") + continue + mocfiles.append(d) + ext='' + try:ext=Options.options.qt_header_ext + except AttributeError:pass + if not ext: + base2=d[:-4] + paths=[node.parent.srcpath(self.env),node.parent.bldpath(self.env)] + poss=[(x,y)for x in MOC_H for y in paths] + for(i,path)in poss: + try: + os.stat(os.path.join(path,base2+i)) + except OSError: + pass + else: + ext=i + break + if not ext:raise Utils.WafError("no header found for %s which is a moc file"%str(d)) + h_node=node.parent.find_resource(base2+i) + m_node=h_node.change_ext('.moc') + tree.node_deps[(self.inputs[0].parent.id,self.env.variant(),m_node.name)]=h_node + task=Task.TaskBase.classes['moc'](self.env,normal=0) + task.set_inputs(h_node) + task.set_outputs(m_node) + generator=tree.generator + generator.outstanding.insert(0,task) + generator.total+=1 + moctasks.append(task) + tmp_lst=tree.raw_deps[self.unique_id()]=mocfiles + lst=tree.node_deps.get(self.unique_id(),()) + for d in lst: + name=d.name + if name.endswith('.moc'): + task=Task.TaskBase.classes['moc'](self.env,normal=0) + task.set_inputs(tree.node_deps[(self.inputs[0].parent.id,self.env.variant(),name)]) + task.set_outputs(d) + generator=tree.generator + generator.outstanding.insert(0,task) + generator.total+=1 + moctasks.append(task) + self.run_after=moctasks + self.moc_done=1 + run=Task.TaskBase.classes['cxx'].__dict__['run'] +def translation_update(task): + outs=[a.abspath(task.env)for a in task.outputs] + outs=" ".join(outs) + lupdate=task.env['QT_LUPDATE'] + for x in task.inputs: + file=x.abspath(task.env) + cmd="%s %s -ts %s"%(lupdate,file,outs) + Utils.pprint('BLUE',cmd) + task.generator.bld.exec_command(cmd) +class XMLHandler(ContentHandler): + def __init__(self): + self.buf=[] + self.files=[] + def startElement(self,name,attrs): + if name=='file': + self.buf=[] + def endElement(self,name): + if name=='file': + self.files.append(''.join(self.buf)) + def characters(self,cars): + self.buf.append(cars) +def scan(self): + node=self.inputs[0] + parser=make_parser() + curHandler=XMLHandler() + parser.setContentHandler(curHandler) + fi=open(self.inputs[0].abspath(self.env)) + parser.parse(fi) + fi.close() + nodes=[] + names=[] + root=self.inputs[0].parent + for x in curHandler.files: + x=x.encode('utf8') + nd=root.find_resource(x) + if nd:nodes.append(nd) + else:names.append(x) + return(nodes,names) +def create_rcc_task(self,node): + rcnode=node.change_ext('_rc.cpp') + rcctask=self.create_task('rcc') + rcctask.inputs=[node] + rcctask.outputs=[rcnode] + cpptask=self.create_task('cxx') + cpptask.inputs=[rcnode] + cpptask.outputs=[rcnode.change_ext('.o')] + self.compiled_tasks.append(cpptask) + return cpptask +def create_uic_task(self,node): + uictask=self.create_task('ui4') + uictask.inputs=[node] + uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] +class qt4_taskgen(cxx.cxx_taskgen): + def __init__(self,*k,**kw): + cxx.cxx_taskgen.__init__(self,*k,**kw) + self.features.append('qt4') +def add_lang(self,node): + self.lang=self.to_list(getattr(self,'lang',[]))+[node] +def apply_qt4(self): + if getattr(self,'lang',None): + update=getattr(self,'update',None) + lst=[] + trans=[] + for l in self.to_list(self.lang): + if not isinstance(l,Node.Node): + l=self.path.find_resource(l+'.ts') + t=self.create_task('ts2qm') + t.set_inputs(l) + t.set_outputs(l.change_ext('.qm')) + lst.append(t.outputs[0]) + if update: + trans.append(t.inputs[0]) + if update and Options.options.trans_qt4: + u=Task.TaskCmd(translation_update,self.env,2) + u.inputs=[a.inputs[0]for a in self.compiled_tasks] + u.outputs=trans + if getattr(self,'langname',None): + t=Task.TaskBase.classes['qm2rcc'](self.env) + t.set_inputs(lst) + t.set_outputs(self.path.find_or_declare(self.langname+'.qrc')) + t.path=self.path + k=create_rcc_task(self,t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + lst=[] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag)<2:continue + if flag[0:2]=='-D'or flag[0:2]=='-I': + lst.append(flag) + self.env['MOC_FLAGS']=lst +def cxx_hook(self,node): + task=self.create_task('qxx') + self.compiled_tasks.append(task) + try:obj_ext=self.obj_ext + except AttributeError:obj_ext='_%d.o'%self.idx + task.inputs=[node] + task.outputs=[node.change_ext(obj_ext)] +def process_qm2rcc(task): + outfile=task.outputs[0].abspath(task.env) + f=open(outfile,'w') + f.write('\n\n') + for k in task.inputs: + f.write(' ') + f.write(k.path_to_parent(task.path)) + f.write('\n') + f.write('\n') + f.close() +b=Task.simple_task_type +b('moc','${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}',color='BLUE',vars=['QT_MOC','MOC_FLAGS'],shell=False) +cls=b('rcc','${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}',color='BLUE',before='cxx moc qxx_task',after="qm2rcc",shell=False) +cls.scan=scan +b('ui4','${QT_UIC} ${SRC} -o ${TGT}',color='BLUE',before='cxx moc qxx_task',shell=False) +b('ts2qm','${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}',color='BLUE',before='qm2rcc',shell=False) +Task.task_type_from_func('qm2rcc',vars=[],func=process_qm2rcc,color='BLUE',before='rcc',after='ts2qm') +def detect_qt4(conf): + env=conf.env + opt=Options.options + qtlibs=getattr(opt,'qtlibs','') + qtincludes=getattr(opt,'qtincludes','') + qtbin=getattr(opt,'qtbin','') + useframework=getattr(opt,'use_qt4_osxframework',True) + qtdir=getattr(opt,'qtdir','') + if not qtdir:qtdir=conf.environ.get('QT4_ROOT','') + if not qtdir: + try: + lst=os.listdir('/usr/local/Trolltech/') + lst.sort() + lst.reverse() + qtdir='/usr/local/Trolltech/%s/'%lst[0] + except OSError: + pass + if not qtdir: + try: + path=conf.environ['PATH'].split(':') + for qmk in['qmake-qt4','qmake4','qmake']: + qmake=conf.find_program(qmk,path) + if qmake: + version=Utils.cmd_output([qmake,'-query','QT_VERSION']).strip().split('.') + if version[0]=="4": + qtincludes=Utils.cmd_output([qmake,'-query','QT_INSTALL_HEADERS']).strip() + qtdir=Utils.cmd_output([qmake,'-query','QT_INSTALL_PREFIX']).strip()+"/" + qtbin=Utils.cmd_output([qmake,'-query','QT_INSTALL_BINS']).strip()+"/" + break + except(OSError,ValueError): + pass + if not qtlibs:qtlibs=os.path.join(qtdir,'lib') + vars="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split() + framework_ok=False + if sys.platform=="darwin"and useframework: + for i in vars: + e=conf.create_framework_configurator() + e.path=[qtlibs,'/Library/Frameworks'] + e.name=i + e.remove_dot_h=True + e.run() + if not i=='QtCore': + for r in env['CCFLAGS_'+i.upper()]: + if r.startswith('-F'): + env['CCFLAGS_'+i.upper()].remove(r) + break + if conf.is_defined('HAVE_QTOPENGL'): + env.append_unique('FRAMEWORK_QTOPENGL','OpenGL') + if conf.is_defined('HAVE_QTGUI'): + env.append_unique('FRAMEWORK_QTGUI',['AppKit','ApplicationServices']) + framework_ok=True + if not conf.is_defined("HAVE_QTGUI"): + if not qtincludes:qtincludes=os.path.join(qtdir,'include') + env['QTINCLUDEPATH']=qtincludes + lst=[qtincludes,'/usr/share/qt4/include/','/opt/qt4/include'] + conf.check(header_name='QtGui/QFont',define_name='HAVE_QTGUI',mandatory=1,includes=lst) + if not qtbin:qtbin=os.path.join(qtdir,'bin') + binpath=[qtbin,'/usr/share/qt4/bin/']+conf.environ['PATH'].split(':') + def find_bin(lst,var): + for f in lst: + ret=conf.find_program(f,path_list=binpath) + if ret: + env[var]=ret + break + find_bin(['uic-qt3','uic3'],'QT_UIC3') + find_bin(['uic-qt4','uic'],'QT_UIC') + if not env['QT_UIC']: + conf.fatal('connot find uic compiler') + try: + version=Utils.cmd_output(env['QT_UIC']+" -version 2>&1").strip() + except ValueError: + conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path') + version=version.replace('Qt User Interface Compiler ','') + version=version.replace('User Interface Compiler for Qt','') + if version.find(" 3.")!=-1: + conf.check_message('uic version','(too old)',0,option='(%s)'%version) + sys.exit(1) + conf.check_message('uic version','',1,option='(%s)'%version) + find_bin(['moc-qt4','moc'],'QT_MOC') + find_bin(['rcc'],'QT_RCC') + find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE') + find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE') + env['UIC3_ST']='%s -o %s' + env['UIC_ST']='%s -o %s' + env['MOC_ST']='-o' + env['ui_PATTERN']='ui_%s.h' + env['QT_LRELEASE_FLAGS']=['-silent'] + if not framework_ok: + vars_debug=[a+'_debug'for a in vars] + pkgconfig=env['pkg-config']or'PKG_CONFIG_PATH=%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib pkg-config --silence-errors'%(qtlibs,qtlibs) + for i in vars_debug+vars: + try: + conf.check_cfg(package=i,args='--cflags --libs',path=pkgconfig) + except ValueError: + pass + def process_lib(vars_,coreval): + for d in vars_: + var=d.upper() + if var=='QTCORE':continue + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if lib in core:continue + accu.append(lib) + env['LIBPATH_'+var]=accu + process_lib(vars,'LIBPATH_QTCORE') + process_lib(vars_debug,'LIBPATH_QTCORE_DEBUG') + if Options.options.want_rpath: + def process_rpath(vars_,coreval): + for d in vars_: + var=d.upper() + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if var!='QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var]=accu + process_rpath(vars,'LIBPATH_QTCORE') + process_rpath(vars_debug,'LIBPATH_QTCORE_DEBUG') + env['QTLOCALE']=str(env['PREFIX'])+'/share/locale' +def detect(conf): + detect_qt4(conf) +def set_options(opt): + opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]') + opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') + for i in"qtdir qtincludes qtlibs qtbin".split(): + opt.add_option('--'+i,type='string',default='',dest=i) + if sys.platform=="darwin": + opt.add_option('--no-qt4-framework',action="store_false",help='do not use the framework version of Qt4 in OS X',dest='use_qt4_osxframework',default=True) + opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False) + +extension(EXT_RCC)(create_rcc_task) +extension(EXT_UI)(create_uic_task) +extension('.ts')(add_lang) +feature('qt4')(apply_qt4) +after('apply_link')(apply_qt4) +extension(EXT_QT4)(cxx_hook) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/suncc.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/suncc.py --- showq-0.4.1+git20090622/wafadmin/Tools/suncc.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/suncc.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,optparse +import Utils,Options,Configure +import ccroot,ar +from Configure import conftest +def find_scc(conf): + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('suncc was not found') + try: + if not Utils.cmd_output('%s -flags'%cc): + conf.fatal('suncc %r was not found'%cc) + except ValueError: + conf.fatal('suncc -flags could not be executed') + v['CC']=cc + v['CC_NAME']='sun' +def scc_common_flags(conf): + v=conf.env + v['CC_SRC_F']='' + v['CC_TGT_F']=['-c','-o',''] + v['CPPPATH_ST']='-I%s' + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o',''] + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STATICLIB_ST']='-l%s' + v['STATICLIBPATH_ST']='-L%s' + v['CCDEFINES_ST']='-D%s' + v['SONAME_ST']='-Wl,-h -Wl,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STATICLIB_MARKER']='-Bstatic' + v['program_PATTERN']='%s' + v['shlib_CCFLAGS']=['-Kpic','-DPIC'] + v['shlib_LINKFLAGS']=['-G'] + v['shlib_PATTERN']='lib%s.so' + v['staticlib_LINKFLAGS']=['-Bstatic'] + v['staticlib_PATTERN']='lib%s.a' +detect=''' +find_scc +find_cpp +find_ar +scc_common_flags +cc_load_tools +cc_add_flags +''' + +conftest(find_scc) +conftest(scc_common_flags) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/suncxx.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/suncxx.py --- showq-0.4.1+git20090622/wafadmin/Tools/suncxx.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/suncxx.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,50 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,optparse +import Utils,Options,Configure +import ccroot,ar +from Configure import conftest +def find_sxx(conf): + v=conf.env + cc=None + if v['CXX']:cc=v['CXX'] + elif'CXX'in conf.environ:cc=conf.environ['CXX'] + if not cc:cc=conf.find_program('c++',var='CXX') + if not cc:cc=conf.find_program('CC',var='CXX') + if not cc:conf.fatal('sunc++ was not found') + v['CXX']=cc + v['CXX_NAME']='sun' +def sxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']='' + v['CXX_TGT_F']=['-c','-o',''] + v['CPPPATH_ST']='-I%s' + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']='' + v['CXXLNK_TGT_F']=['-o',''] + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STATICLIB_ST']='-l%s' + v['STATICLIBPATH_ST']='-L%s' + v['CXXDEFINES_ST']='-D%s' + v['SONAME_ST']='-Wl,-h -Wl,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STATICLIB_MARKER']='-Bstatic' + v['program_PATTERN']='%s' + v['shlib_CXXFLAGS']=['-Kpic','-DPIC'] + v['shlib_LINKFLAGS']=['-G'] + v['shlib_PATTERN']='lib%s.so' + v['staticlib_LINKFLAGS']=['-Bstatic'] + v['staticlib_PATTERN']='lib%s.a' +detect=''' +find_sxx +find_cpp +find_ar +sxx_common_flags +cxx_load_tools +cxx_add_flags +''' + +conftest(find_sxx) +conftest(sxx_common_flags) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/tex.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/tex.py --- showq-0.4.1+git20090622/wafadmin/Tools/tex.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/tex.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,180 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,re +import Utils,TaskGen,Task,Runner,Build +from TaskGen import feature,before +from Logs import error,warn,debug +re_tex=re.compile(r'\\(?Pinclude|input|import|bringin){(?P[^{}]*)}',re.M) +def scan(self): + node=self.inputs[0] + env=self.env + nodes=[] + names=[] + if not node:return(nodes,names) + code=Utils.readf(node.abspath(env)) + curdirnode=self.curdirnode + abs=curdirnode.abspath() + for match in re_tex.finditer(code): + path=match.group('file') + if path: + for k in['','.tex','.ltx']: + debug('tex: trying %s%s'%(path,k)) + try: + os.stat(abs+os.sep+path+k) + except OSError: + continue + found=path+k + node=curdirnode.find_resource(found) + if node: + nodes.append(node) + else: + debug('tex: could not find %s'%path) + names.append(path) + debug("tex: found the following : %s and names %s"%(nodes,names)) + return(nodes,names) +g_bibtex_re=re.compile('bibdata',re.M) +def tex_build(task,command='LATEX'): + env=task.env + bld=task.generator.bld + com='%s %s'%(env[command],env.get_flat(command+'FLAGS')) + if not env['PROMPT_LATEX']:com="%s %s"%(com,'-interaction=batchmode') + node=task.inputs[0] + reldir=node.bld_dir(env) + srcfile=node.srcpath(env) + lst=[] + for c in Utils.split_path(reldir): + if c:lst.append('..') + sr=os.path.join(*(lst+[srcfile])) + sr2=os.path.join(*(lst+[node.parent.srcpath(env)])) + aux_node=node.change_ext('.aux') + idx_node=node.change_ext('.idx') + hash='' + old_hash='' + nm=aux_node.name + docuname=nm[:len(nm)-4] + latex_compile_cmd='cd %s && TEXINPUTS=%s:$TEXINPUTS %s %s'%(reldir,sr2,com,sr) + warn('first pass on %s'%command) + ret=bld.exec_command(latex_compile_cmd) + if ret:return ret + try: + ct=Utils.readf(aux_node.abspath(env)) + except(OSError,IOError): + error('error bibtex scan') + else: + fo=g_bibtex_re.findall(ct) + if fo: + bibtex_compile_cmd='cd %s && BIBINPUTS=%s:$BIBINPUTS %s %s'%(reldir,sr2,env['BIBTEX'],docuname) + warn('calling bibtex') + ret=bld.exec_command(bibtex_compile_cmd) + if ret: + error('error when calling bibtex %s'%bibtex_compile_cmd) + return ret + try: + idx_path=idx_node.abspath(env) + os.stat(idx_path) + except OSError: + error('error file.idx scan') + else: + makeindex_compile_cmd='cd %s && %s %s'%(reldir,env['MAKEINDEX'],idx_path) + warn('calling makeindex') + ret=bld.exec_command(makeindex_compile_cmd) + if ret: + error('error when calling makeindex %s'%makeindex_compile_cmd) + return ret + i=0 + while i<10: + i+=1 + old_hash=hash + try: + hash=Utils.h_file(aux_node.abspath(env)) + except KeyError: + error('could not read aux.h -> %s'%aux_node.abspath(env)) + pass + if hash and hash==old_hash:break + warn('calling %s'%command) + ret=bld.exec_command(latex_compile_cmd) + if ret: + error('error when calling %s %s'%(command,latex_compile_cmd)) + return ret + return 0 +latex_vardeps=['LATEX','LATEXFLAGS'] +def latex_build(task): + return tex_build(task,'LATEX') +pdflatex_vardeps=['PDFLATEX','PDFLATEXFLAGS'] +def pdflatex_build(task): + return tex_build(task,'PDFLATEX') +class tex_taskgen(TaskGen.task_gen): + def __init__(self,*k,**kw): + TaskGen.task_gen.__init__(self,*k,**kw) +def apply_tex(self): + if not getattr(self,'type',None)in['latex','pdflatex']: + self.type='pdflatex' + tree=self.bld + outs=Utils.to_list(getattr(self,'outs',[])) + self.env['PROMPT_LATEX']=getattr(self,'prompt',1) + deps_lst=[] + if getattr(self,'deps',None): + deps=self.to_list(self.deps) + for filename in deps: + n=self.path.find_resource(filename) + if not n in deps_lst:deps_lst.append(n) + self.source=self.to_list(self.source) + for filename in self.source: + base,ext=os.path.splitext(filename) + node=self.path.find_resource(filename) + if not node:raise Utils.WafError('cannot find %s'%filename) + if self.type=='latex': + task=self.create_task('latex') + task.set_inputs(node) + task.set_outputs(node.change_ext('.dvi')) + elif self.type=='pdflatex': + task=self.create_task('pdflatex') + task.set_inputs(node) + task.set_outputs(node.change_ext('.pdf')) + task.env=self.env + task.curdirnode=self.path + if deps_lst: + variant=node.variant(self.env) + try: + lst=tree.node_deps[task.unique_id()] + for n in deps_lst: + if not n in lst: + lst.append(n) + except KeyError: + tree.node_deps[task.unique_id()]=deps_lst + if self.type=='latex': + if'ps'in outs: + pstask=self.create_task('dvips') + pstask.set_inputs(task.outputs) + pstask.set_outputs(node.change_ext('.ps')) + if'pdf'in outs: + pdftask=self.create_task('dvipdf') + pdftask.set_inputs(task.outputs) + pdftask.set_outputs(node.change_ext('.pdf')) + elif self.type=='pdflatex': + if'ps'in outs: + pstask=self.create_task('pdf2ps') + pstask.set_inputs(task.outputs) + pstask.set_outputs(node.change_ext('.ps')) + self.source=[] +def detect(conf): + v=conf.env + for p in'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split(): + conf.find_program(p,var=p.upper()) + v[p.upper()+'FLAGS']='' + v['DVIPSFLAGS']='-Ppdf' +b=Task.simple_task_type +b('tex','${TEX} ${TEXFLAGS} ${SRC}',color='BLUE',shell=False) +b('bibtex','${BIBTEX} ${BIBTEXFLAGS} ${SRC}',color='BLUE',shell=False) +b('dvips','${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}',color='BLUE',after="latex pdflatex tex bibtex",shell=False) +b('dvipdf','${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}',color='BLUE',after="latex pdflatex tex bibtex",shell=False) +b('pdf2ps','${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}',color='BLUE',after="dvipdf pdflatex",shell=False) +b=Task.task_type_from_func +cls=b('latex',latex_build,vars=latex_vardeps) +cls.scan=scan +cls=b('pdflatex',pdflatex_build,vars=pdflatex_vardeps) +cls.scan=scan + +feature('tex')(apply_tex) +before('apply_core')(apply_tex) diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/UnitTest.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/UnitTest.py --- showq-0.4.1+git20090622/wafadmin/Tools/UnitTest.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/UnitTest.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,129 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os,sys +import Build,TaskGen,Utils,Options,Logs +class unit_test(object): + def __init__(self): + self.returncode_ok=0 + self.num_tests_ok=0 + self.num_tests_failed=0 + self.num_tests_err=0 + self.total_num_tests=0 + self.max_label_length=0 + self.unit_tests=Utils.ordered_dict() + self.unit_test_results={} + self.unit_test_erroneous={} + self.change_to_testfile_dir=False + self.want_to_see_test_output=False + self.want_to_see_test_error=False + self.run_if_waf_does='check' + def run(self): + self.num_tests_ok=0 + self.num_tests_failed=0 + self.num_tests_err=0 + self.total_num_tests=0 + self.max_label_length=0 + self.unit_tests=Utils.ordered_dict() + self.unit_test_results={} + self.unit_test_erroneous={} + ld_library_path=[] + if not Options.commands[self.run_if_waf_does]:return + for obj in Build.bld.all_task_gen: + try: + link_task=obj.link_task + except AttributeError: + pass + else: + lib_path=link_task.outputs[0].parent.abspath(obj.env) + if lib_path not in ld_library_path: + ld_library_path.append(lib_path) + unit_test=getattr(obj,'unit_test','') + if unit_test and'cprogram'in obj.features: + try: + output=obj.path + filename=os.path.join(output.abspath(obj.env),obj.target) + srcdir=output.abspath() + label=os.path.join(output.bldpath(obj.env),obj.target) + self.max_label_length=max(self.max_label_length,len(label)) + self.unit_tests[label]=(filename,srcdir) + except KeyError: + pass + self.total_num_tests=len(self.unit_tests) + Utils.pprint('GREEN','Running the unit tests') + count=0 + result=1 + for label in self.unit_tests.allkeys: + file_and_src=self.unit_tests[label] + filename=file_and_src[0] + srcdir=file_and_src[1] + count+=1 + line=Build.bld.progress_line(count,self.total_num_tests,Logs.colors.GREEN,Logs.colors.NORMAL) + if Options.options.progress_bar and line: + sys.stderr.write(line) + sys.stderr.flush() + try: + kwargs={} + kwargs['env']=os.environ.copy() + if self.change_to_testfile_dir: + kwargs['cwd']=srcdir + if not self.want_to_see_test_output: + kwargs['stdout']=Utils.pproc.PIPE + if not self.want_to_see_test_error: + kwargs['stderr']=Utils.pproc.PIPE + if ld_library_path: + if sys.platform=='win32': + kwargs['env']['PATH']=';'.join(ld_library_path+[os.environ.get('PATH','')]) + else: + kwargs['env']['LD_LIBRARY_PATH']=':'.join(ld_library_path+[os.environ.get('LD_LIBRARY_PATH','')]) + pp=Utils.pproc.Popen(filename,**kwargs) + pp.wait() + result=int(pp.returncode==self.returncode_ok) + if result: + self.num_tests_ok+=1 + else: + self.num_tests_failed+=1 + self.unit_test_results[label]=result + self.unit_test_erroneous[label]=0 + except OSError: + self.unit_test_erroneous[label]=1 + self.num_tests_err+=1 + except KeyboardInterrupt: + pass + if Options.options.progress_bar:sys.stdout.write(Logs.colors.cursor_on) + def print_results(self): + if not Options.commands[self.run_if_waf_does]:return + p=Utils.pprint + if self.total_num_tests==0: + p('YELLOW','No unit tests present') + return + p('GREEN','Running unit tests') + p('NORMAL','') + for label in self.unit_tests.allkeys: + filename=self.unit_tests[label] + err=0 + result=0 + try:err=self.unit_test_erroneous[label] + except KeyError:pass + try:result=self.unit_test_results[label] + except KeyError:pass + n=self.max_label_length-len(label) + if err:n+=4 + elif result:n+=7 + else:n+=3 + line='%s %s'%(label,'.'*n) + if err:p('RED','%sERROR'%line) + elif result:p('GREEN','%sOK'%line) + else:p('YELLOW','%sFAILED'%line) + percentage_ok=float(self.num_tests_ok)/float(self.total_num_tests)*100.0 + percentage_failed=float(self.num_tests_failed)/float(self.total_num_tests)*100.0 + percentage_erroneous=float(self.num_tests_err)/float(self.total_num_tests)*100.0 + p('NORMAL',''' +Successful tests: %i (%.1f%%) +Failed tests: %i (%.1f%%) +Erroneous tests: %i (%.1f%%) + +Total number of tests: %i +'''%(self.num_tests_ok,percentage_ok,self.num_tests_failed,percentage_failed,self.num_tests_err,percentage_erroneous,self.total_num_tests)) + p('GREEN','Unit tests finished') + diff -Nru showq-0.4.1+git20090622/wafadmin/Tools/vala.py showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/vala.py --- showq-0.4.1+git20090622/wafadmin/Tools/vala.py 1970-01-01 00:00:00.000000000 +0000 +++ showq-0.4.1+git20090622+dfsg0/wafadmin/Tools/vala.py 2009-05-01 23:35:37.000000000 +0000 @@ -0,0 +1,194 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os.path,shutil +import Task,Runner,Utils,Logs,Build,Node +from TaskGen import extension,after,before +EXT_VALA=['.vala','.gs'] +class valac_task(Task.Task): + vars=("VALAC","VALAC_VERSION","VALAFLAGS") + before=("cc","cxx") + def run(self): + env=self.env + inputs=[a.srcpath(env)for a in self.inputs] + valac=env['VALAC'] + vala_flags=env.get_flat('VALAFLAGS') + top_src=self.generator.bld.srcnode.abspath() + top_bld=self.generator.bld.srcnode.abspath(env) + if env['VALAC_VERSION']>(0,1,6): + cmd=[valac,'-C','--quiet',vala_flags] + else: + cmd=[valac,'-C',vala_flags] + if self.threading: + cmd.append('--thread') + if self.target_glib: + cmd.append('--target-glib=%s'%self.target_glib) + features=self.generator.features + if'cshlib'in features or'cstaticlib'in features: + output_dir=self.outputs[0].bld_dir(env) + cmd.append('--library '+self.target) + if env['VALAC_VERSION']>=(0,7,0): + cmd.append('--header '+os.path.join(output_dir,self.target+'.h')) + self.outputs.append(self.generator.path.find_or_declare(self.target+'.h')) + cmd.append('--basedir '+top_src) + cmd.append('-d '+top_bld) + else: + output_dir=self.outputs[0].bld_dir(env) + cmd.append('-d %s'%output_dir) + for vapi_dir in self.vapi_dirs: + cmd.append('--vapidir=%s'%vapi_dir) + for package in self.packages: + cmd.append('--pkg %s'%package) + for package in self.packages_private: + cmd.append('--pkg %s'%package) + cmd.append(" ".join(inputs)) + result=self.generator.bld.exec_command(" ".join(cmd)) + if not'cprogram'in features: + if self.packages: + filename=os.path.join(self.generator.path.abspath(env),"%s.deps"%self.target) + deps=open(filename,'w') + for package in self.packages: + deps.write(package+'\n') + deps.close() + self._fix_output("../%s.vapi"%self.target) + self._fix_output("%s.vapi"%self.target) + self._fix_output("%s.gidl"%self.target) + self._fix_output("%s.gir"%self.target) + return result + def install(self): + bld=self.generator.bld + features=self.generator.features + if self.attr("install_path")and("cshlib"in features or"cstaticlib"in features): + headers_list=[o for o in self.outputs if o.suffix()==".h"] + vapi_list=[o for o in self.outputs if(o.suffix()in(".vapi",".deps"))] + for header in headers_list: + top_src=self.generator.bld.srcnode + package=self.env['PACKAGE'] + try: + api_version=Utils.g_module.API_VERSION + except AttributeError: + version=Utils.g_module.VERSION.split(".") + if version[0]=="0": + api_version="0."+version[1] + else: + api_version=version[0]+".0" + install_path="${INCLUDEDIR}/%s-%s/%s"%(package,api_version,header.relpath_gen(top_src)) + bld.install_as(install_path,header.abspath(self.env),self.env) + for vapi in vapi_list: + bld.install_files("${DATAROOTDIR}/vala/vapi",vapi.abspath(self.env),self.env) + def _fix_output(self,output): + top_bld=self.generator.bld.srcnode.abspath(self.env) + try: + src=os.path.join(top_bld,output) + dst=self.generator.path.abspath(self.env) + shutil.move(src,dst) + except: + pass +def vala_file(self,node): + valatask=getattr(self,"valatask",None) + if not valatask: + valatask=self.create_task('valac') + self.valatask=valatask + self.includes=Utils.to_list(getattr(self,'includes',[])) + valatask.packages=[] + valatask.packages_private=Utils.to_list(getattr(self,'packages_private',[])) + valatask.vapi_dirs=[] + valatask.target=self.target + valatask.threading=False + valatask.install_path=self.install_path + valatask.target_glib=None + packages=Utils.to_list(getattr(self,'packages',[])) + vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[])) + includes=[] + if hasattr(self,'uselib_local'): + local_packages=Utils.to_list(self.uselib_local) + seen=[] + while len(local_packages)>0: + package=local_packages.pop() + if package in seen: + continue + seen.append(package) + package_obj=self.name_to_obj(package) + if not package_obj: + raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')"%(package,self.name)) + package_name=package_obj.target + package_node=package_obj.path + package_dir=package_node.relpath_gen(self.path) + for task in package_obj.tasks: + for output in task.outputs: + if output.name==package_name+".vapi": + valatask.set_run_after(task) + if package_name not in packages: + packages.append(package_name) + if package_dir not in vapi_dirs: + vapi_dirs.append(package_dir) + if package_dir not in includes: + includes.append(package_dir) + if hasattr(package_obj,'uselib_local'): + lst=self.to_list(package_obj.uselib_local) + lst.reverse() + local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages + valatask.packages=packages + for vapi_dir in vapi_dirs: + try: + valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath()) + valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env)) + except AttributeError: + Logs.warn("Unable to locate Vala API directory: '%s'"%vapi_dir) + self.includes.append(node.bld.srcnode.abspath()) + self.includes.append(node.bld.srcnode.abspath(self.env)) + for include in includes: + try: + self.includes.append(self.path.find_dir(include).abspath()) + self.includes.append(self.path.find_dir(include).abspath(self.env)) + except AttributeError: + Logs.warn("Unable to locate include directory: '%s'"%include) + if hasattr(self,'threading'): + valatask.threading=self.threading + self.uselib=self.to_list(self.uselib) + if not'GTHREAD'in self.uselib: + self.uselib.append('GTHREAD') + if hasattr(self,'target_glib'): + valatask.target_glib=self.target_glib + env=valatask.env + output_nodes=[] + c_node=node.change_ext('.c') + output_nodes.append(c_node) + self.allnodes.append(c_node) + if env['VALAC_VERSION']<(0,7,0): + output_nodes.append(node.change_ext('.h')) + else: + if not'cprogram'in self.features: + output_nodes.append(self.path.find_or_declare('%s.h'%self.target)) + if not'cprogram'in self.features: + output_nodes.append(self.path.find_or_declare('%s.vapi'%self.target)) + if env['VALAC_VERSION']>(0,3,5): + output_nodes.append(self.path.find_or_declare('%s.gir'%self.target)) + elif env['VALAC_VERSION']>(0,1,7): + output_nodes.append(self.path.find_or_declare('%s.gidl'%self.target)) + if valatask.packages: + output_nodes.append(self.path.find_or_declare('%s.deps'%self.target)) + valatask.inputs.append(node) + valatask.outputs.extend(output_nodes) +def detect(conf): + min_version=(0,1,6) + min_version_str="%d.%d.%d"%min_version + valac=conf.find_program('valac',var='VALAC',mandatory=True) + if not conf.env["HAVE_GTHREAD"]: + conf.check_cfg(package='gthread-2.0',uselib_store='GTHREAD',args='--cflags --libs') + try: + output=Utils.cmd_output(valac+" --version",silent=True) + version=output.split(' ',1)[-1].strip().split(".") + version=[int(x)for x in version] + valac_version=tuple(version) + except Exception: + valac_version=(0,0,0) + conf.check_message('program version','valac >= '+min_version_str,valac_version>=min_version,"%d.%d.%d"%valac_version) + conf.check_tool('gnu_dirs') + if valac_version=0x2060000: + import subprocess as pproc +else: + import pproc +import Logs +from Constants import* +is_win32=sys.platform=='win32' +try: + from collections import defaultdict as DefaultDict +except ImportError: + class DefaultDict(dict): + def __init__(self,default_factory): + super(DefaultDict,self).__init__() + self.default_factory=default_factory + def __getitem__(self,key): + try: + return super(DefaultDict,self).__getitem__(key) + except KeyError: + value=self.default_factory() + self[key]=value + return value +class WafError(Exception): + def __init__(self,*args): + self.args=args + self.stack=traceback.extract_stack() + Exception.__init__(self,*args) + def __str__(self): + return str(len(self.args)==1 and self.args[0]or self.args) +class WscriptError(WafError): + def __init__(self,message,wscript_file=None): + if wscript_file: + self.wscript_file=wscript_file + self.wscript_line=None + else: + (self.wscript_file,self.wscript_line)=self.locate_error() + msg_file_line='' + if self.wscript_file: + msg_file_line="%s:"%self.wscript_file + if self.wscript_line: + msg_file_line+="%s:"%self.wscript_line + err_message="%s error: %s"%(msg_file_line,message) + WafError.__init__(self,err_message) + def locate_error(self): + stack=traceback.extract_stack() + stack.reverse() + for frame in stack: + file_name=os.path.basename(frame[0]) + is_wscript=(file_name==WSCRIPT_FILE or file_name==WSCRIPT_BUILD_FILE) + if is_wscript: + return(frame[0],frame[1]) + return(None,None) +indicator=is_win32 and'\x1b[A\x1b[K%s%s%s\r'or'\x1b[K%s%s%s\r' +try: + from fnv import new as md5 + import Constants + Constants.SIG_NIL='signofnv' + def h_file(filename): + m=md5() + try: + m.hfile(filename) + x=m.digest() + if x is None:raise OSError("not a file") + return x + except SystemError: + raise OSError("not a file"+filename) +except ImportError: + try: + from hashlib import md5 + except ImportError: + from md5 import md5 + def h_file(filename): + f=open(filename,'rb') + m=md5() + readBytes=100000 + while(filename): + filename=f.read(100000) + m.update(filename) + f.close() + return m.digest() +class ordered_dict(UserDict): + def __init__(self,dict=None): + self.allkeys=[] + UserDict.__init__(self,dict) + def __delitem__(self,key): + self.allkeys.remove(key) + UserDict.__delitem__(self,key) + def __setitem__(self,key,item): + if key not in self.allkeys:self.allkeys.append(key) + UserDict.__setitem__(self,key,item) +def exec_command(s,**kw): + if'log'in kw: + kw['stdout']=kw['stderr']=kw['log'] + del(kw['log']) + kw['shell']=isinstance(s,str) + try: + proc=pproc.Popen(s,**kw) + return proc.wait() + except WindowsError: + return-1 +if is_win32: + old_log=exec_command + def exec_command(s,**kw): + if len(s)<2000:return old_log(s,**kw) + if'log'in kw: + kw['stdout']=kw['stderr']=kw['log'] + del(kw['log']) + kw['shell']=isinstance(s,str) + startupinfo=pproc.STARTUPINFO() + startupinfo.dwFlags|=pproc.STARTF_USESHOWWINDOW + kw['startupinfo']=startupinfo + proc=pproc.Popen(s,**kw) + return proc.wait() +listdir=os.listdir +if is_win32: + def listdir_win32(s): + if re.match('^[A-Za-z]:$',s): + s+=os.sep + if not os.path.isdir(s): + e=OSError() + e.errno=errno.ENOENT + raise e + return os.listdir(s) + listdir=listdir_win32 +def waf_version(mini=0x010000,maxi=0x100000): + ver=HEXVERSION + try:min_val=mini+0 + except TypeError:min_val=int(mini.replace('.','0'),16) + if min_val>ver: + Logs.error("waf version should be at least %s (%s found)"%(mini,ver)) + sys.exit(0) + try:max_val=maxi+0 + except TypeError:max_val=int(maxi.replace('.','0'),16) + if max_val= 2.3 but the raw source requires Python 2.4") +def ex_stack(): + exc_type,exc_value,tb=sys.exc_info() + exc_lines=traceback.format_exception(exc_type,exc_value,tb) + return''.join(exc_lines) +def to_list(sth): + if isinstance(sth,str): + return sth.split() + else: + return sth +g_loaded_modules={} +g_module=None +def load_module(file_path,name=WSCRIPT_FILE): + try: + return g_loaded_modules[file_path] + except KeyError: + pass + module=imp.new_module(name) + try: + code=readf(file_path,m='rU') + except(IOError,OSError): + raise WscriptError('The file %s could not be opened!'%file_path) + module.waf_hash_val=code + module_dir=os.path.dirname(file_path) + sys.path.insert(0,module_dir) + exec(code,module.__dict__) + sys.path.remove(module_dir) + g_loaded_modules[file_path]=module + return module +def set_main_module(file_path): + global g_module + g_module=load_module(file_path,'wscript_main') + g_module.root_path=file_path +def to_hashtable(s): + tbl={} + lst=s.split('\n') + for line in lst: + if not line:continue + mems=line.split('=') + tbl[mems[0]]=mems[1] + return tbl +def get_term_cols(): + return 80 +try: + import struct,fcntl,termios +except ImportError: + pass +else: + if Logs.got_tty: + def myfun(): + dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2] + return cols + try: + myfun() + except IOError: + pass + else: + get_term_cols=myfun +rot_idx=0 +rot_chr=['\\','|','/','-'] +def split_path(path): + return path.split('/') +def split_path_cygwin(path): + if path.startswith('//'): + ret=path.split('/')[2:] + ret[0]='/'+ret[0] + return ret + return path.split('/') +re_sp=re.compile('[/\\\\]') +def split_path_win32(path): + if path.startswith('\\\\'): + ret=re.split(re_sp,path)[2:] + ret[0]='\\'+ret[0] + return ret + return re.split(re_sp,path) +if sys.platform=='cygwin': + split_path=split_path_cygwin +elif is_win32: + split_path=split_path_win32 +def copy_attrs(orig,dest,names,only_if_set=False): + for a in to_list(names): + u=getattr(orig,a,()) + if u or not only_if_set: + setattr(dest,a,u) +def def_attrs(cls,**kw): + ''' + set attributes for class. + @param cls [any class]: the class to update the given attributes in. + @param kw [dictionary]: dictionary of attributes names and values. + + if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class. + ''' + for k,v in kw.iteritems(): + if not hasattr(cls,k): + setattr(cls,k,v) +quote_define_name_table=None +def quote_define_name(path): + global quote_define_name_table + if not quote_define_name_table: + invalid_chars=set([chr(x)for x in xrange(256)])-set(string.digits+string.uppercase) + quote_define_name_table=string.maketrans(''.join(invalid_chars),'_'*len(invalid_chars)) + return string.translate(string.upper(path),quote_define_name_table) +def quote_whitespace(path): + return(path.strip().find(' ')>0 and'"%s"'%path or path).replace('""','"') +def trimquotes(s): + if not s:return'' + s=s.rstrip() + if s[0]=="'"and s[-1]=="'":return s[1:-1] + return s +def h_list(lst): + m=md5() + m.update(str(lst)) + return m.digest() +def h_fun(fun): + try: + return fun.code + except AttributeError: + try: + h=inspect.getsource(fun) + except IOError: + h="nocode" + try: + fun.code=h + except AttributeError: + pass + return h +def pprint(col,str,label='',sep=os.linesep): + sys.stderr.write("%s%s%s %s%s"%(Logs.colors(col),str,Logs.colors.NORMAL,label,sep)) +def check_dir(dir): + try: + os.stat(dir) + except OSError: + try: + os.makedirs(dir) + except OSError,e: + raise WafError("Cannot create folder '%s' (original error: %s)"%(dir,e)) +def cmd_output(cmd,**kw): + silent=kw.get('silent',False) + if silent: + del(kw['silent']) + if'e'in kw: + tmp=kw['e'] + del(kw['e']) + kw['env']=tmp + kw['shell']=isinstance(cmd,str) + kw['stdout']=pproc.PIPE + if silent: + kw['stderr']=pproc.PIPE + try: + p=pproc.Popen(cmd,**kw) + output=p.communicate()[0] + except WindowsError,e: + raise ValueError(str(e)) + if p.returncode: + if not silent: + msg="command execution failed: %s -> %r"%(cmd,str(output)) + raise ValueError(msg) + output='' + return output +reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") +def subst_vars(expr,params): + def repl_var(m): + if m.group(1): + return'\\' + if m.group(2): + return'$' + try: + return params.get_flat(m.group(3)) + except AttributeError: + return params[m.group(3)] + return reg_subst.sub(repl_var,expr) +def detect_platform(): + s=sys.platform + for x in'cygwin linux irix sunos hpux aix darwin'.split(): + if s.find(x)>=0: + return x + if os.name in'posix java os2'.split(): + return os.name + return s +def load_tool(tool,tooldir=None): + if tooldir: + assert isinstance(tooldir,list) + sys.path=tooldir+sys.path + try: + try: + return __import__(tool) + except ImportError,e: + raise WscriptError(e) + finally: + if tooldir: + for d in tooldir: + sys.path.remove(d) +def readf(fname,m='r'): + f=None + try: + f=open(fname,m) + txt=f.read() + finally: + if f:f.close() + return txt +def nada(*k,**kw): + pass +def diff_path(top,subdir): + top=os.path.normpath(top).replace('\\','/').split('/') + subdir=os.path.normpath(subdir).replace('\\','/').split('/') + if len(top)==len(subdir):return'' + diff=subdir[len(top)-len(subdir):] + return os.path.join(*diff) +class Context(object): + def set_curdir(self,dir): + self.curdir_=dir + def get_curdir(self): + try: + return self.curdir_ + except AttributeError: + self.curdir_=os.getcwd() + return self.get_curdir() + curdir=property(get_curdir,set_curdir) + def recurse(self,dirs,name=''): + if not name: + name=inspect.stack()[1][3] + if isinstance(dirs,str): + dirs=to_list(dirs) + for x in dirs: + if os.path.isabs(x): + nexdir=x + else: + nexdir=os.path.join(self.curdir,x) + base=os.path.join(nexdir,WSCRIPT_FILE) + try: + txt=readf(base+'_'+name,m='rU') + except(OSError,IOError): + try: + module=load_module(base) + except OSError: + raise WscriptError('No such script %s'%base) + try: + f=module.__dict__[name] + except KeyError: + raise WscriptError('No function %s defined in %s'%(name,base)) + if getattr(self.__class__,'pre_recurse',None): + self.pre_recurse(f,base,nexdir) + old=self.curdir + self.curdir=nexdir + try: + f(self) + finally: + self.curdir=old + if getattr(self.__class__,'post_recurse',None): + self.post_recurse(module,base,nexdir) + else: + dc={'ctx':self} + if getattr(self.__class__,'pre_recurse',None): + dc=self.pre_recurse(txt,base+'_'+name,nexdir) + old=self.curdir + self.curdir=nexdir + try: + exec(txt,dc) + finally: + self.curdir=old + if getattr(self.__class__,'post_recurse',None): + self.post_recurse(txt,base+'_'+name,nexdir) +def jar_regexp(regex): + if regex.endswith('/'): + regex+='**' + regex=(re.escape(regex).replace(r"\*\*\/",".*").replace(r"\*\*",".*").replace(r"\*","[^/]*").replace(r"\?","[^/]")) + if regex.endswith(r'\/.*'): + regex=regex[:-4]+'([/].*)*' + regex+='$' + return re.compile(regex) +if is_win32: + old=shutil.copy2 + def copy2(src,dst): + old(src,dst) + shutil.copystat(src,src) + setattr(shutil,'copy2',copy2) +def get_elapsed_time(start): + delta=datetime.datetime.now()-start + days=int(delta.days) + hours=int(delta.seconds/3600) + minutes=int((delta.seconds-hours*3600)/60) + seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000 + result='' + if days: + result+='%dd'%days + if days or hours: + result+='%dh'%hours + if days or hours or minutes: + result+='%dm'%minutes + return'%s%.3fs'%(result,seconds) +