Description: Switch to python3 waf, and twiddle wscript.
Author: Dimitri John Ledkov <xnox@ubuntu.com>


--- pugl-0~svn32+dfsg0.orig/waf
+++ pugl-0~svn32+dfsg0/waf
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
-# encoding: ISO8859-1
-# Thomas Nagy, 2005-2011
-
+# encoding: latin-1
+# Thomas Nagy, 2005-2018
+#
 """
 Redistribution and use in source and binary forms, with or without
 modification, are permitted provided that the following conditions
@@ -30,13 +30,15 @@ IN ANY WAY OUT OF THE USE OF THIS SOFTWA
 POSSIBILITY OF SUCH DAMAGE.
 """
 
-import os, sys
+import os, sys, inspect
 
-VERSION="1.6.11"
-REVISION="d62e7cf41970faf1f2748922c0a58cdb"
+VERSION="2.0.19"
+REVISION="1f3c580272b15a03d2566843c5fe872a"
+GIT="61ee22b598cf80e260beb64e475966f58b304d0d"
 INSTALL=''
-C1='#-'
-C2='#&'
+C1='#6'
+C2='#.'
+C3='#%'
 cwd = os.getcwd()
 join = os.path.join
 
@@ -53,8 +55,8 @@ def err(m):
 	print(('\033[91mError: %s\033[0m' % m))
 	sys.exit(1)
 
-def unpack_wafdir(dir):
-	f = open(sys.argv[0],'rb')
+def unpack_wafdir(dir, src):
+	f = open(src,'rb')
 	c = 'corrupt archive (%d)'
 	while 1:
 		line = f.readline()
@@ -65,22 +67,22 @@ def unpack_wafdir(dir):
 			if f.readline() != b('#<==\n'): err(c % 2)
 			break
 	if not txt: err(c % 3)
-	txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))
+	txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
 
 	import shutil, tarfile
 	try: shutil.rmtree(dir)
 	except OSError: pass
 	try:
-		for x in ['Tools', 'extras']:
+		for x in ('Tools', 'extras'):
 			os.makedirs(join(dir, 'waflib', x))
 	except OSError:
-		err("Cannot unpack waf lib into %s\nMove waf into a writeable directory" % dir)
+		err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
 
 	os.chdir(dir)
 	tmp = 't.bz2'
 	t = open(tmp,'wb')
-	t.write(txt)
-	t.close()
+	try: t.write(txt)
+	finally: t.close()
 
 	try:
 		t = tarfile.open(tmp)
@@ -95,10 +97,12 @@ def unpack_wafdir(dir):
 			except OSError: pass
 			err("Waf cannot be unpacked, check that bzip2 support is present")
 
-	for x in t: t.extract(x)
-	t.close()
+	try:
+		for x in t: t.extract(x)
+	finally:
+		t.close()
 
-	for x in ['Tools', 'extras']:
+	for x in ('Tools', 'extras'):
 		os.chmod(join('waflib',x), 493)
 
 	if sys.hexversion<0x300000f:
@@ -106,7 +110,7 @@ def unpack_wafdir(dir):
 		import fixpy2
 		fixpy2.fixdir(dir)
 
-	os.unlink(tmp)
+	os.remove(tmp)
 	os.chdir(cwd)
 
 	try: dir = unicode(dir, 'mbcs')
@@ -125,8 +129,8 @@ def test(dir):
 		pass
 
 def find_lib():
-	name = sys.argv[0]
-	base = os.path.dirname(os.path.abspath(name))
+	src = os.path.abspath(inspect.getfile(inspect.getmodule(err)))
+	base, name = os.path.split(src)
 
 	#devs use $WAFDIR
 	w=test(os.environ.get('WAFDIR', ''))
@@ -136,10 +140,13 @@ def find_lib():
 	if name.endswith('waf-light'):
 		w = test(base)
 		if w: return w
+		for dir in sys.path:
+			if test(dir):
+				return dir
 		err('waf-light requires waflib -> export WAFDIR=/folder')
 
 	dirname = '%s-%s-%s' % (WAF, VERSION, REVISION)
-	for i in [INSTALL,'/usr','/usr/local','/opt']:
+	for i in (INSTALL,'/usr','/usr/local','/opt'):
 		w = test(i + '/lib/' + dirname)
 		if w: return w
 
@@ -149,7 +156,7 @@ def find_lib():
 	if w: return w
 
 	#unpack
-	unpack_wafdir(dir)
+	unpack_wafdir(dir, src)
 	return dir
 
 wafdir = find_lib()
--- pugl-0~svn32+dfsg0.orig/waflib/Build.py
+++ pugl-0~svn32+dfsg0/waflib/Build.py
@@ -1,21 +1,24 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,errno,re,shutil
-try:import cPickle
-except:import pickle as cPickle
-from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors
-import waflib.Node
+import os,sys,errno,re,shutil,stat
+try:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+from waflib import Node,Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors
 CACHE_DIR='c4che'
 CACHE_SUFFIX='_cache.py'
 INSTALL=1337
 UNINSTALL=-1337
-SAVED_ATTRS='root node_deps raw_deps task_sigs'.split()
+SAVED_ATTRS='root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
 CFG_FILES='cfg_files'
 POST_AT_ONCE=0
 POST_LAZY=1
-POST_BOTH=2
+PROTOCOL=-1
+if sys.platform=='cli':
+	PROTOCOL=0
 class BuildContext(Context.Context):
 	'''executes the build'''
 	cmd='build'
@@ -24,48 +27,44 @@ class BuildContext(Context.Context):
 		super(BuildContext,self).__init__(**kw)
 		self.is_install=0
 		self.top_dir=kw.get('top_dir',Context.top_dir)
-		self.run_dir=kw.get('run_dir',Context.run_dir)
-		self.post_mode=POST_AT_ONCE
 		self.out_dir=kw.get('out_dir',Context.out_dir)
-		self.cache_dir=kw.get('cache_dir',None)
+		self.run_dir=kw.get('run_dir',Context.run_dir)
+		self.launch_dir=Context.launch_dir
+		self.post_mode=POST_LAZY
+		self.cache_dir=kw.get('cache_dir')
 		if not self.cache_dir:
-			self.cache_dir=self.out_dir+os.sep+CACHE_DIR
+			self.cache_dir=os.path.join(self.out_dir,CACHE_DIR)
 		self.all_envs={}
+		self.node_sigs={}
 		self.task_sigs={}
+		self.imp_sigs={}
 		self.node_deps={}
 		self.raw_deps={}
-		self.cache_dir_contents={}
 		self.task_gen_cache_names={}
-		self.launch_dir=Context.launch_dir
 		self.jobs=Options.options.jobs
 		self.targets=Options.options.targets
 		self.keep=Options.options.keep
-		self.cache_global=Options.cache_global
-		self.nocache=Options.options.nocache
 		self.progress_bar=Options.options.progress_bar
 		self.deps_man=Utils.defaultdict(list)
 		self.current_group=0
 		self.groups=[]
 		self.group_names={}
+		for v in SAVED_ATTRS:
+			if not hasattr(self,v):
+				setattr(self,v,{})
 	def get_variant_dir(self):
 		if not self.variant:
 			return self.out_dir
-		return os.path.join(self.out_dir,self.variant)
+		return os.path.join(self.out_dir,os.path.normpath(self.variant))
 	variant_dir=property(get_variant_dir,None)
 	def __call__(self,*k,**kw):
 		kw['bld']=self
 		ret=TaskGen.task_gen(*k,**kw)
 		self.task_gen_cache_names={}
-		self.add_to_group(ret,group=kw.get('group',None))
+		self.add_to_group(ret,group=kw.get('group'))
 		return ret
 	def __copy__(self):
-		raise Errors.WafError('build contexts are not supposed to be copied')
-	def install_files(self,*k,**kw):
-		pass
-	def install_as(self,*k,**kw):
-		pass
-	def symlink_as(self,*k,**kw):
-		pass
+		raise Errors.WafError('build contexts cannot be copied')
 	def load_envs(self):
 		node=self.root.find_node(self.cache_dir)
 		if not node:
@@ -79,12 +78,8 @@ class BuildContext(Context.Context):
 			self.all_envs[name]=env
 			for f in env[CFG_FILES]:
 				newnode=self.root.find_resource(f)
-				try:
-					h=Utils.h_file(newnode.abspath())
-				except(IOError,AttributeError):
-					Logs.error('cannot find %r'%f)
-					h=Utils.SIG_NIL
-				newnode.sig=h
+				if not newnode or not newnode.exists():
+					raise Errors.WafError('Missing configuration file %r, reconfigure the project!'%f)
 	def init_dirs(self):
 		if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)):
 			raise Errors.WafError('The project was not configured: run "waf configure" first!')
@@ -97,56 +92,52 @@ class BuildContext(Context.Context):
 			self.load_envs()
 		self.execute_build()
 	def execute_build(self):
-		Logs.info("Waf: Entering directory `%s'"%self.variant_dir)
+		Logs.info("Waf: Entering directory `%s'",self.variant_dir)
 		self.recurse([self.run_dir])
 		self.pre_build()
 		self.timer=Utils.Timer()
-		if self.progress_bar:
-			sys.stderr.write(Logs.colors.cursor_off)
 		try:
 			self.compile()
 		finally:
-			if self.progress_bar==1:
-				c=len(self.returned_tasks)or 1
-				self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL))
-				print('')
-				sys.stdout.flush()
-				sys.stderr.write(Logs.colors.cursor_on)
-			Logs.info("Waf: Leaving directory `%s'"%self.variant_dir)
+			if self.progress_bar==1 and sys.stderr.isatty():
+				c=self.producer.processed or 1
+				m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL)
+				Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on})
+			Logs.info("Waf: Leaving directory `%s'",self.variant_dir)
+		try:
+			self.producer.bld=None
+			del self.producer
+		except AttributeError:
+			pass
 		self.post_build()
 	def restore(self):
 		try:
 			env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
-		except(IOError,OSError):
+		except EnvironmentError:
 			pass
 		else:
-			if env['version']<Context.HEXVERSION:
-				raise Errors.WafError('Version mismatch! reconfigure the project')
-			for t in env['tools']:
+			if env.version<Context.HEXVERSION:
+				raise Errors.WafError('Project was configured with a different version of Waf, please reconfigure it')
+			for t in env.tools:
 				self.setup(**t)
-		f=None
+		dbfn=os.path.join(self.variant_dir,Context.DBFILE)
 		try:
-			dbfn=os.path.join(self.variant_dir,Context.DBFILE)
+			data=Utils.readf(dbfn,'rb')
+		except(EnvironmentError,EOFError):
+			Logs.debug('build: Could not load the build cache %s (missing)',dbfn)
+		else:
 			try:
-				f=open(dbfn,'rb')
-			except(IOError,EOFError):
-				Logs.debug('build: could not load the build cache %s (missing)'%dbfn)
-			else:
+				Node.pickle_lock.acquire()
+				Node.Nod3=self.node_class
 				try:
-					waflib.Node.pickle_lock.acquire()
-					waflib.Node.Nod3=self.node_class
-					try:
-						data=cPickle.load(f)
-					except Exception ,e:
-						Logs.debug('build: could not pickle the build cache %s: %r'%(dbfn,e))
-					else:
-						for x in SAVED_ATTRS:
-							setattr(self,x,data[x])
-				finally:
-					waflib.Node.pickle_lock.release()
-		finally:
-			if f:
-				f.close()
+					data=cPickle.loads(data)
+				except Exception as e:
+					Logs.debug('build: Could not pickle the build cache %s: %r',dbfn,e)
+				else:
+					for x in SAVED_ATTRS:
+						setattr(self,x,data.get(x,{}))
+			finally:
+				Node.pickle_lock.release()
 		self.init_dirs()
 	def store(self):
 		data={}
@@ -154,20 +145,15 @@ class BuildContext(Context.Context):
 			data[x]=getattr(self,x)
 		db=os.path.join(self.variant_dir,Context.DBFILE)
 		try:
-			waflib.Node.pickle_lock.acquire()
-			waflib.Node.Nod3=self.node_class
-			f=None
-			try:
-				f=open(db+'.tmp','wb')
-				cPickle.dump(data,f)
-			finally:
-				if f:
-					f.close()
+			Node.pickle_lock.acquire()
+			Node.Nod3=self.node_class
+			x=cPickle.dumps(data,PROTOCOL)
 		finally:
-			waflib.Node.pickle_lock.release()
+			Node.pickle_lock.release()
+		Utils.writef(db+'.tmp',x,m='wb')
 		try:
 			st=os.stat(db)
-			os.unlink(db)
+			os.remove(db)
 			if not Utils.is_win32:
 				os.chown(db+'.tmp',st.st_uid,st.st_gid)
 		except(AttributeError,OSError):
@@ -177,23 +163,27 @@ class BuildContext(Context.Context):
 		Logs.debug('build: compile()')
 		self.producer=Runner.Parallel(self,self.jobs)
 		self.producer.biter=self.get_build_iterator()
-		self.returned_tasks=[]
 		try:
 			self.producer.start()
 		except KeyboardInterrupt:
-			self.store()
+			if self.is_dirty():
+				self.store()
 			raise
 		else:
-			if self.producer.dirty:
+			if self.is_dirty():
 				self.store()
 		if self.producer.error:
 			raise Errors.BuildError(self.producer.error)
+	def is_dirty(self):
+		return self.producer.dirty
 	def setup(self,tool,tooldir=None,funs=None):
 		if isinstance(tool,list):
-			for i in tool:self.setup(i,tooldir)
+			for i in tool:
+				self.setup(i,tooldir)
 			return
 		module=Context.load_tool(tool,tooldir)
-		if hasattr(module,"setup"):module.setup(self)
+		if hasattr(module,"setup"):
+			module.setup(self)
 	def get_env(self):
 		try:
 			return self.all_envs[self.variant]
@@ -203,13 +193,20 @@ class BuildContext(Context.Context):
 		self.all_envs[self.variant]=val
 	env=property(get_env,set_env)
 	def add_manual_dependency(self,path,value):
-		if isinstance(path,waflib.Node.Node):
+		if not path:
+			raise ValueError('Invalid input path %r'%path)
+		if isinstance(path,Node.Node):
 			node=path
 		elif os.path.isabs(path):
 			node=self.root.find_resource(path)
 		else:
 			node=self.path.find_resource(path)
-		self.deps_man[id(node)].append(value)
+		if not node:
+			raise ValueError('Could not find the path %r'%path)
+		if isinstance(value,list):
+			self.deps_man[node].extend(value)
+		else:
+			self.deps_man[node].append(value)
 	def launch_node(self):
 		try:
 			return self.p_ln
@@ -232,9 +229,8 @@ class BuildContext(Context.Context):
 			except KeyError:
 				pass
 		lst=[env[a]for a in vars_lst]
-		ret=Utils.h_list(lst)
+		cache[idx]=ret=Utils.h_list(lst)
 		Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst)
-		cache[idx]=ret
 		return ret
 	def get_tgen_by_name(self,name):
 		cache=self.task_gen_cache_names
@@ -249,20 +245,22 @@ class BuildContext(Context.Context):
 			return cache[name]
 		except KeyError:
 			raise Errors.WafError('Could not find a task generator for the name %r'%name)
-	def progress_line(self,state,total,col1,col2):
+	def progress_line(self,idx,total,col1,col2):
+		if not sys.stderr.isatty():
+			return''
 		n=len(str(total))
 		Utils.rot_idx+=1
 		ind=Utils.rot_chr[Utils.rot_idx%4]
-		pc=(100.*state)/total
-		eta=str(self.timer)
-		fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
-		left=fs%(state,total,col1,pc,col2)
-		right='][%s%s%s]'%(col1,eta,col2)
+		pc=(100.*idx)/total
+		fs="[%%%dd/%%d][%%s%%2d%%%%%%s][%s]["%(n,ind)
+		left=fs%(idx,total,col1,pc,col2)
+		right='][%s%s%s]'%(col1,self.timer,col2)
 		cols=Logs.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
-		if cols<7:cols=7
-		ratio=((cols*state)//total)-1
+		if cols<7:
+			cols=7
+		ratio=((cols*idx)//total)-1
 		bar=('='*ratio+'>').ljust(cols)
-		msg=Utils.indicator%(left,bar,right)
+		msg=Logs.indicator%(left,bar,right)
 		return msg
 	def declare_chain(self,*k,**kw):
 		return TaskGen.declare_chain(*k,**kw)
@@ -291,7 +289,7 @@ class BuildContext(Context.Context):
 			return self.group_names[x]
 		return self.groups[x]
 	def add_to_group(self,tgen,group=None):
-		assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase))
+		assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.Task))
 		tgen.bld=self
 		self.get_group(group).append(tgen)
 	def get_group_name(self,g):
@@ -303,14 +301,14 @@ class BuildContext(Context.Context):
 		return''
 	def get_group_idx(self,tg):
 		se=id(tg)
-		for i in range(len(self.groups)):
-			for t in self.groups[i]:
+		for i,tmp in enumerate(self.groups):
+			for t in tmp:
 				if id(t)==se:
 					return i
 		return None
 	def add_group(self,name=None,move=True):
 		if name and name in self.group_names:
-			Logs.error('add_group: name %s already present'%name)
+			raise Errors.WafError('add_group: name %s already present',name)
 		g=[]
 		self.group_names[name]=g
 		self.groups.append(g)
@@ -319,9 +317,10 @@ class BuildContext(Context.Context):
 	def set_group(self,idx):
 		if isinstance(idx,str):
 			g=self.group_names[idx]
-			for i in range(len(self.groups)):
-				if id(g)==id(self.groups[i]):
+			for i,tmp in enumerate(self.groups):
+				if id(g)==id(tmp):
 					self.current_group=i
+					break
 		else:
 			self.current_group=idx
 	def total(self):
@@ -338,8 +337,6 @@ class BuildContext(Context.Context):
 		min_grp=0
 		for name in self.targets.split(','):
 			tg=self.get_tgen_by_name(name)
-			if not tg:
-				raise Errors.WafError('target %r does not exist'%name)
 			m=self.get_group_idx(tg)
 			if m>min_grp:
 				min_grp=m
@@ -347,136 +344,229 @@ class BuildContext(Context.Context):
 			elif m==min_grp:
 				to_post.append(tg)
 		return(min_grp,to_post)
+	def get_all_task_gen(self):
+		lst=[]
+		for g in self.groups:
+			lst.extend(g)
+		return lst
 	def post_group(self):
+		def tgpost(tg):
+			try:
+				f=tg.post
+			except AttributeError:
+				pass
+			else:
+				f()
 		if self.targets=='*':
-			for tg in self.groups[self.cur]:
-				try:
-					f=tg.post
-				except AttributeError:
-					pass
-				else:
-					f()
+			for tg in self.groups[self.current_group]:
+				tgpost(tg)
 		elif self.targets:
-			if self.cur<self._min_grp:
-				for tg in self.groups[self.cur]:
-					try:
-						f=tg.post
-					except AttributeError:
-						pass
-					else:
-						f()
+			if self.current_group<self._min_grp:
+				for tg in self.groups[self.current_group]:
+					tgpost(tg)
 			else:
 				for tg in self._exact_tg:
 					tg.post()
 		else:
 			ln=self.launch_node()
-			for tg in self.groups[self.cur]:
+			if ln.is_child_of(self.bldnode):
+				Logs.warn('Building from the build directory, forcing --targets=*')
+				ln=self.srcnode
+			elif not ln.is_child_of(self.srcnode):
+				Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)',ln.abspath(),self.srcnode.abspath())
+				ln=self.srcnode
+			def is_post(tg,ln):
 				try:
-					f=tg.post
+					p=tg.path
 				except AttributeError:
 					pass
 				else:
-					if tg.path.is_child_of(ln):
-						f()
+					if p.is_child_of(ln):
+						return True
+			def is_post_group():
+				for i,g in enumerate(self.groups):
+					if i>self.current_group:
+						for tg in g:
+							if is_post(tg,ln):
+								return True
+			if self.post_mode==POST_LAZY and ln!=self.srcnode:
+				if is_post_group():
+					ln=self.srcnode
+			for tg in self.groups[self.current_group]:
+				if is_post(tg,ln):
+					tgpost(tg)
 	def get_tasks_group(self,idx):
 		tasks=[]
 		for tg in self.groups[idx]:
-			if isinstance(tg,Task.TaskBase):
-				tasks.append(tg)
-			else:
+			try:
 				tasks.extend(tg.tasks)
+			except AttributeError:
+				tasks.append(tg)
 		return tasks
 	def get_build_iterator(self):
-		self.cur=0
 		if self.targets and self.targets!='*':
 			(self._min_grp,self._exact_tg)=self.get_targets()
-		global lazy_post
 		if self.post_mode!=POST_LAZY:
-			while self.cur<len(self.groups):
+			for self.current_group,_ in enumerate(self.groups):
 				self.post_group()
-				self.cur+=1
-			self.cur=0
-		while self.cur<len(self.groups):
+		for self.current_group,_ in enumerate(self.groups):
 			if self.post_mode!=POST_AT_ONCE:
 				self.post_group()
-			tasks=self.get_tasks_group(self.cur)
+			tasks=self.get_tasks_group(self.current_group)
 			Task.set_file_constraints(tasks)
 			Task.set_precedence_constraints(tasks)
 			self.cur_tasks=tasks
-			self.cur+=1
-			if not tasks:
-				continue
-			yield tasks
+			if tasks:
+				yield tasks
 		while 1:
 			yield[]
+	def install_files(self,dest,files,**kw):
+		assert(dest)
+		tg=self(features='install_task',install_to=dest,install_from=files,**kw)
+		tg.dest=tg.install_to
+		tg.type='install_files'
+		if not kw.get('postpone',True):
+			tg.post()
+		return tg
+	def install_as(self,dest,srcfile,**kw):
+		assert(dest)
+		tg=self(features='install_task',install_to=dest,install_from=srcfile,**kw)
+		tg.dest=tg.install_to
+		tg.type='install_as'
+		if not kw.get('postpone',True):
+			tg.post()
+		return tg
+	def symlink_as(self,dest,src,**kw):
+		assert(dest)
+		tg=self(features='install_task',install_to=dest,install_from=src,**kw)
+		tg.dest=tg.install_to
+		tg.type='symlink_as'
+		tg.link=src
+		if not kw.get('postpone',True):
+			tg.post()
+		return tg
+@TaskGen.feature('install_task')
+@TaskGen.before_method('process_rule','process_source')
+def process_install_task(self):
+	self.add_install_task(**self.__dict__)
+@TaskGen.taskgen_method
+def add_install_task(self,**kw):
+	if not self.bld.is_install:
+		return
+	if not kw['install_to']:
+		return
+	if kw['type']=='symlink_as'and Utils.is_win32:
+		if kw.get('win32_install'):
+			kw['type']='install_as'
+		else:
+			return
+	tsk=self.install_task=self.create_task('inst')
+	tsk.chmod=kw.get('chmod',Utils.O644)
+	tsk.link=kw.get('link','')or kw.get('install_from','')
+	tsk.relative_trick=kw.get('relative_trick',False)
+	tsk.type=kw['type']
+	tsk.install_to=tsk.dest=kw['install_to']
+	tsk.install_from=kw['install_from']
+	tsk.relative_base=kw.get('cwd')or kw.get('relative_base',self.path)
+	tsk.install_user=kw.get('install_user')
+	tsk.install_group=kw.get('install_group')
+	tsk.init_files()
+	if not kw.get('postpone',True):
+		tsk.run_now()
+	return tsk
+@TaskGen.taskgen_method
+def add_install_files(self,**kw):
+	kw['type']='install_files'
+	return self.add_install_task(**kw)
+@TaskGen.taskgen_method
+def add_install_as(self,**kw):
+	kw['type']='install_as'
+	return self.add_install_task(**kw)
+@TaskGen.taskgen_method
+def add_symlink_as(self,**kw):
+	kw['type']='symlink_as'
+	return self.add_install_task(**kw)
 class inst(Task.Task):
-	color='CYAN'
-	def post(self):
-		buf=[]
-		for x in self.source:
-			if isinstance(x,waflib.Node.Node):
-				y=x
-			else:
-				y=self.path.find_resource(x)
-				if not y:
-					if Logs.verbose:
-						Logs.warn('Could not find %s immediately (may cause broken builds)'%x)
-					idx=self.generator.bld.get_group_idx(self)
-					for tg in self.generator.bld.groups[idx]:
-						if not isinstance(tg,inst)and id(tg)!=id(self):
-							tg.post()
-						y=self.path.find_resource(x)
-						if y:
-							break
-					else:
-						raise Errors.WafError('could not find %r in %r'%(x,self.path))
-			buf.append(y)
-		self.inputs=buf
+	def __str__(self):
+		return''
+	def uid(self):
+		lst=self.inputs+self.outputs+[self.link,self.generator.path.abspath()]
+		return Utils.h_list(lst)
+	def init_files(self):
+		if self.type=='symlink_as':
+			inputs=[]
+		else:
+			inputs=self.generator.to_nodes(self.install_from)
+			if self.type=='install_as':
+				assert len(inputs)==1
+		self.set_inputs(inputs)
+		dest=self.get_install_path()
+		outputs=[]
+		if self.type=='symlink_as':
+			if self.relative_trick:
+				self.link=os.path.relpath(self.link,os.path.dirname(dest))
+			outputs.append(self.generator.bld.root.make_node(dest))
+		elif self.type=='install_as':
+			outputs.append(self.generator.bld.root.make_node(dest))
+		else:
+			for y in inputs:
+				if self.relative_trick:
+					destfile=os.path.join(dest,y.path_from(self.relative_base))
+				else:
+					destfile=os.path.join(dest,y.name)
+				outputs.append(self.generator.bld.root.make_node(destfile))
+		self.set_outputs(outputs)
 	def runnable_status(self):
 		ret=super(inst,self).runnable_status()
-		if ret==Task.SKIP_ME:
+		if ret==Task.SKIP_ME and self.generator.bld.is_install:
 			return Task.RUN_ME
 		return ret
-	def __str__(self):
-		return''
-	def run(self):
-		return self.generator.exec_task()
+	def post_run(self):
+		pass
 	def get_install_path(self,destdir=True):
-		dest=Utils.subst_vars(self.dest,self.env)
-		dest=dest.replace('/',os.sep)
+		if isinstance(self.install_to,Node.Node):
+			dest=self.install_to.abspath()
+		else:
+			dest=os.path.normpath(Utils.subst_vars(self.install_to,self.env))
+		if not os.path.isabs(dest):
+			dest=os.path.join(self.env.PREFIX,dest)
 		if destdir and Options.options.destdir:
 			dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep))
 		return dest
-	def exec_install_files(self):
-		destpath=self.get_install_path()
-		if not destpath:
-			raise Errors.WafError('unknown installation path %r'%self.generator)
-		for x,y in zip(self.source,self.inputs):
-			if self.relative_trick:
-				destfile=os.path.join(destpath,y.path_from(self.path))
-				Utils.check_dir(os.path.dirname(destfile))
-			else:
-				destfile=os.path.join(destpath,y.name)
-			self.generator.bld.do_install(y.abspath(),destfile,self.chmod)
-	def exec_install_as(self):
-		destfile=self.get_install_path()
-		self.generator.bld.do_install(self.inputs[0].abspath(),destfile,self.chmod)
-	def exec_symlink_as(self):
-		destfile=self.get_install_path()
-		self.generator.bld.do_link(self.link,destfile)
-class InstallContext(BuildContext):
-	'''installs the targets on the system'''
-	cmd='install'
-	def __init__(self,**kw):
-		super(InstallContext,self).__init__(**kw)
-		self.uninstall=[]
-		self.is_install=INSTALL
-	def do_install(self,src,tgt,chmod=Utils.O644):
-		d,_=os.path.split(tgt)
-		if not d:
-			raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt))
-		Utils.check_dir(d)
-		srclbl=src.replace(self.srcnode.abspath()+os.sep,'')
+	def copy_fun(self,src,tgt):
+		if Utils.is_win32 and len(tgt)>259 and not tgt.startswith('\\\\?\\'):
+			tgt='\\\\?\\'+tgt
+		shutil.copy2(src,tgt)
+		self.fix_perms(tgt)
+	def rm_empty_dirs(self,tgt):
+		while tgt:
+			tgt=os.path.dirname(tgt)
+			try:
+				os.rmdir(tgt)
+			except OSError:
+				break
+	def run(self):
+		is_install=self.generator.bld.is_install
+		if not is_install:
+			return
+		for x in self.outputs:
+			if is_install==INSTALL:
+				x.parent.mkdir()
+		if self.type=='symlink_as':
+			fun=is_install==INSTALL and self.do_link or self.do_unlink
+			fun(self.link,self.outputs[0].abspath())
+		else:
+			fun=is_install==INSTALL and self.do_install or self.do_uninstall
+			launch_node=self.generator.bld.launch_node()
+			for x,y in zip(self.inputs,self.outputs):
+				fun(x.abspath(),y.abspath(),x.path_from(launch_node))
+	def run_now(self):
+		status=self.runnable_status()
+		if status not in(Task.RUN_ME,Task.SKIP_ME):
+			raise Errors.TaskNotReady('Could not process %r: status %r'%(self,status))
+		self.run()
+		self.hasrun=Task.SUCCESS
+	def do_install(self,src,tgt,lbl,**kw):
 		if not Options.options.force:
 			try:
 				st1=os.stat(tgt)
@@ -485,133 +575,93 @@ class InstallContext(BuildContext):
 				pass
 			else:
 				if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size:
-					if not self.progress_bar:
-						Logs.info('- install %s (from %s)'%(tgt,srclbl))
+					if not self.generator.bld.progress_bar:
+						c1=Logs.colors.NORMAL
+						c2=Logs.colors.BLUE
+						Logs.info('%s- install %s%s%s (from %s)',c1,c2,tgt,c1,lbl)
 					return False
-		if not self.progress_bar:
-			Logs.info('+ install %s (from %s)'%(tgt,srclbl))
+		if not self.generator.bld.progress_bar:
+			c1=Logs.colors.NORMAL
+			c2=Logs.colors.BLUE
+			Logs.info('%s+ install %s%s%s (from %s)',c1,c2,tgt,c1,lbl)
+		try:
+			os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode))
+		except EnvironmentError:
+			pass
 		try:
 			os.remove(tgt)
 		except OSError:
 			pass
 		try:
-			shutil.copy2(src,tgt)
-			os.chmod(tgt,chmod)
-		except IOError:
-			try:
-				os.stat(src)
-			except(OSError,IOError):
-				Logs.error('File %r does not exist'%src)
-			raise Errors.WafError('Could not install the file %r'%tgt)
-	def do_link(self,src,tgt):
-		d,_=os.path.split(tgt)
-		Utils.check_dir(d)
-		link=False
+			self.copy_fun(src,tgt)
+		except EnvironmentError as e:
+			if not os.path.exists(src):
+				Logs.error('File %r does not exist',src)
+			elif not os.path.isfile(src):
+				Logs.error('Input %r is not a file',src)
+			raise Errors.WafError('Could not install the file %r'%tgt,e)
+	def fix_perms(self,tgt):
+		if not Utils.is_win32:
+			user=getattr(self,'install_user',None)or getattr(self.generator,'install_user',None)
+			group=getattr(self,'install_group',None)or getattr(self.generator,'install_group',None)
+			if user or group:
+				Utils.lchown(tgt,user or-1,group or-1)
 		if not os.path.islink(tgt):
-			link=True
-		elif os.readlink(tgt)!=src:
-			link=True
-		if link:
-			try:os.remove(tgt)
-			except OSError:pass
-			if not self.progress_bar:
-				Logs.info('+ symlink %s (to %s)'%(tgt,src))
-			os.symlink(src,tgt)
+			os.chmod(tgt,self.chmod)
+	def do_link(self,src,tgt,**kw):
+		if os.path.islink(tgt)and os.readlink(tgt)==src:
+			if not self.generator.bld.progress_bar:
+				c1=Logs.colors.NORMAL
+				c2=Logs.colors.BLUE
+				Logs.info('%s- symlink %s%s%s (to %s)',c1,c2,tgt,c1,src)
 		else:
-			if not self.progress_bar:
-				Logs.info('- symlink %s (to %s)'%(tgt,src))
-	def run_task_now(self,tsk,postpone):
-		tsk.post()
-		if not postpone:
-			if tsk.runnable_status()==Task.ASK_LATER:
-				raise self.WafError('cannot post the task %r'%tsk)
-			tsk.run()
-	def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True):
-		tsk=inst(env=env or self.env)
-		tsk.bld=self
-		tsk.path=cwd or self.path
-		tsk.chmod=chmod
-		if isinstance(files,waflib.Node.Node):
-			tsk.source=[files]
-		else:
-			tsk.source=Utils.to_list(files)
-		tsk.dest=dest
-		tsk.exec_task=tsk.exec_install_files
-		tsk.relative_trick=relative_trick
-		if add:self.add_to_group(tsk)
-		self.run_task_now(tsk,postpone)
-		return tsk
-	def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True):
-		tsk=inst(env=env or self.env)
-		tsk.bld=self
-		tsk.path=cwd or self.path
-		tsk.chmod=chmod
-		tsk.source=[srcfile]
-		tsk.dest=dest
-		tsk.exec_task=tsk.exec_install_as
-		if add:self.add_to_group(tsk)
-		self.run_task_now(tsk,postpone)
-		return tsk
-	def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True):
-		if Utils.is_win32:
-			return
-		tsk=inst(env=env or self.env)
-		tsk.bld=self
-		tsk.dest=dest
-		tsk.path=cwd or self.path
-		tsk.source=[]
-		tsk.link=src
-		tsk.exec_task=tsk.exec_symlink_as
-		if add:self.add_to_group(tsk)
-		self.run_task_now(tsk,postpone)
-		return tsk
-class UninstallContext(InstallContext):
-	'''removes the targets installed'''
-	cmd='uninstall'
-	def __init__(self,**kw):
-		super(UninstallContext,self).__init__(**kw)
-		self.is_install=UNINSTALL
-	def do_install(self,src,tgt,chmod=Utils.O644):
-		if not self.progress_bar:
-			Logs.info('- remove %s'%tgt)
-		self.uninstall.append(tgt)
+			try:
+				os.remove(tgt)
+			except OSError:
+				pass
+			if not self.generator.bld.progress_bar:
+				c1=Logs.colors.NORMAL
+				c2=Logs.colors.BLUE
+				Logs.info('%s+ symlink %s%s%s (to %s)',c1,c2,tgt,c1,src)
+			os.symlink(src,tgt)
+			self.fix_perms(tgt)
+	def do_uninstall(self,src,tgt,lbl,**kw):
+		if not self.generator.bld.progress_bar:
+			c1=Logs.colors.NORMAL
+			c2=Logs.colors.BLUE
+			Logs.info('%s- remove %s%s%s',c1,c2,tgt,c1)
 		try:
 			os.remove(tgt)
-		except OSError ,e:
+		except OSError as e:
 			if e.errno!=errno.ENOENT:
 				if not getattr(self,'uninstall_error',None):
 					self.uninstall_error=True
 					Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
 				if Logs.verbose>1:
-					Logs.warn('could not remove %s (error code %r)'%(e.filename,e.errno))
-		while tgt:
-			tgt=os.path.dirname(tgt)
-			try:
-				os.rmdir(tgt)
-			except OSError:
-				break
-	def do_link(self,src,tgt):
-		try:
-			if not self.progress_bar:
-				Logs.info('- unlink %s'%tgt)
+					Logs.warn('Could not remove %s (error code %r)',e.filename,e.errno)
+		self.rm_empty_dirs(tgt)
+	def do_unlink(self,src,tgt,**kw):
+		try:
+			if not self.generator.bld.progress_bar:
+				c1=Logs.colors.NORMAL
+				c2=Logs.colors.BLUE
+				Logs.info('%s- remove %s%s%s',c1,c2,tgt,c1)
 			os.remove(tgt)
 		except OSError:
 			pass
-		while tgt:
-			tgt=os.path.dirname(tgt)
-			try:
-				os.rmdir(tgt)
-			except OSError:
-				break
-	def execute(self):
-		try:
-			def runnable_status(self):
-				return Task.SKIP_ME
-			setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status)
-			setattr(Task.Task,'runnable_status',runnable_status)
-			super(UninstallContext,self).execute()
-		finally:
-			setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back)
+		self.rm_empty_dirs(tgt)
+class InstallContext(BuildContext):
+	'''installs the targets on the system'''
+	cmd='install'
+	def __init__(self,**kw):
+		super(InstallContext,self).__init__(**kw)
+		self.is_install=INSTALL
+class UninstallContext(InstallContext):
+	'''removes the targets installed'''
+	cmd='uninstall'
+	def __init__(self,**kw):
+		super(UninstallContext,self).__init__(**kw)
+		self.is_install=UNINSTALL
 class CleanContext(BuildContext):
 	'''cleans the project'''
 	cmd='clean'
@@ -626,14 +676,22 @@ class CleanContext(BuildContext):
 			self.store()
 	def clean(self):
 		Logs.debug('build: clean called')
-		if self.bldnode!=self.srcnode:
-			lst=[self.root.find_or_declare(f)for f in self.env[CFG_FILES]]
-			for n in self.bldnode.ant_glob('**/*',excl='lock* *conf_check_*/** config.log c4che/*',quiet=True):
+		if hasattr(self,'clean_files'):
+			for n in self.clean_files:
+				n.delete()
+		elif self.bldnode!=self.srcnode:
+			lst=[]
+			for env in self.all_envs.values():
+				lst.extend(self.root.find_or_declare(f)for f in env[CFG_FILES])
+			excluded_dirs='.lock* *conf_check_*/** config.log %s/*'%CACHE_DIR
+			for n in self.bldnode.ant_glob('**/*',excl=excluded_dirs,quiet=True):
 				if n in lst:
 					continue
 				n.delete()
 		self.root.children={}
-		for v in'node_deps task_sigs raw_deps'.split():
+		for v in SAVED_ATTRS:
+			if v=='root':
+				continue
 			setattr(self,v,{})
 class ListContext(BuildContext):
 	'''lists the targets to execute'''
@@ -655,12 +713,17 @@ class ListContext(BuildContext):
 					f()
 		try:
 			self.get_tgen_by_name('')
-		except:
+		except Errors.WafError:
 			pass
-		lst=list(self.task_gen_cache_names.keys())
-		lst.sort()
-		for k in lst:
-			Logs.pprint('GREEN',k)
+		targets=sorted(self.task_gen_cache_names)
+		line_just=max(len(t)for t in targets)if targets else 0
+		for target in targets:
+			tgen=self.task_gen_cache_names[target]
+			descript=getattr(tgen,'description','')
+			if descript:
+				target=target.ljust(line_just)
+				descript=': %s'%descript
+			Logs.pprint('GREEN',target,label=descript)
 class StepContext(BuildContext):
 	'''executes tasks in a step-by-step fashion, for debugging'''
 	cmd='step'
@@ -672,8 +735,13 @@ class StepContext(BuildContext):
 			Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
 			BuildContext.compile(self)
 			return
+		targets=[]
+		if self.targets and self.targets!='*':
+			targets=self.targets.split(',')
 		for g in self.groups:
 			for tg in g:
+				if targets and tg.name not in targets:
+					continue
 				try:
 					f=tg.post
 				except AttributeError:
@@ -683,23 +751,23 @@ class StepContext(BuildContext):
 			for pat in self.files.split(','):
 				matcher=self.get_matcher(pat)
 				for tg in g:
-					if isinstance(tg,Task.TaskBase):
+					if isinstance(tg,Task.Task):
 						lst=[tg]
 					else:
 						lst=tg.tasks
 					for tsk in lst:
 						do_exec=False
-						for node in getattr(tsk,'inputs',[]):
+						for node in tsk.inputs:
 							if matcher(node,output=False):
 								do_exec=True
 								break
-						for node in getattr(tsk,'outputs',[]):
+						for node in tsk.outputs:
 							if matcher(node,output=True):
 								do_exec=True
 								break
 						if do_exec:
 							ret=tsk.run()
-							Logs.info('%s -> exit %r'%(str(tsk),ret))
+							Logs.info('%s -> exit %r',tsk,ret)
 	def get_matcher(self,pat):
 		inn=True
 		out=True
@@ -718,14 +786,19 @@ class StepContext(BuildContext):
 				pat='%s$'%pat
 			pattern=re.compile(pat)
 		def match(node,output):
-			if output==True and not out:
+			if output and not out:
 				return False
-			if output==False and not inn:
+			if not output and not inn:
 				return False
 			if anode:
 				return anode==node
 			else:
 				return pattern.match(node.abspath())
 		return match
-BuildContext.store=Utils.nogc(BuildContext.store)
-BuildContext.restore=Utils.nogc(BuildContext.restore)
+class EnvContext(BuildContext):
+	fun=cmd=None
+	def execute(self):
+		self.restore()
+		if not self.all_envs:
+			self.load_envs()
+		self.recurse([self.run_dir])
--- pugl-0~svn32+dfsg0.orig/waflib/ConfigSet.py
+++ pugl-0~svn32+dfsg0/waflib/ConfigSet.py
@@ -1,12 +1,10 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
 import copy,re,os
 from waflib import Logs,Utils
-re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
+re_imp=re.compile(r'^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
 class ConfigSet(object):
 	__slots__=('table','parent')
 	def __init__(self,filename=None):
@@ -14,9 +12,12 @@ class ConfigSet(object):
 		if filename:
 			self.load(filename)
 	def __contains__(self,key):
-		if key in self.table:return True
-		try:return self.parent.__contains__(key)
-		except AttributeError:return False
+		if key in self.table:
+			return True
+		try:
+			return self.parent.__contains__(key)
+		except AttributeError:
+			return False
 	def keys(self):
 		keys=set()
 		cur=self
@@ -26,12 +27,14 @@ class ConfigSet(object):
 		keys=list(keys)
 		keys.sort()
 		return keys
+	def __iter__(self):
+		return iter(self.keys())
 	def __str__(self):
 		return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()])
 	def __getitem__(self,key):
 		try:
 			while 1:
-				x=self.table.get(key,None)
+				x=self.table.get(key)
 				if not x is None:
 					return x
 				self=self.parent
@@ -43,7 +46,7 @@ class ConfigSet(object):
 		self[key]=[]
 	def __getattr__(self,name):
 		if name in self.__slots__:
-			return object.__getattr__(self,name)
+			return object.__getattribute__(self,name)
 		else:
 			return self[name]
 	def __setattr__(self,name,value):
@@ -71,29 +74,34 @@ class ConfigSet(object):
 			for x in keys:
 				tbl[x]=copy.deepcopy(tbl[x])
 			self.table=tbl
+		return self
 	def get_flat(self,key):
 		s=self[key]
-		if isinstance(s,str):return s
+		if isinstance(s,str):
+			return s
 		return' '.join(s)
 	def _get_list_value_for_modification(self,key):
 		try:
 			value=self.table[key]
 		except KeyError:
-			try:value=self.parent[key]
-			except AttributeError:value=[]
-			if isinstance(value,list):
-				value=value[:]
+			try:
+				value=self.parent[key]
+			except AttributeError:
+				value=[]
 			else:
-				value=[value]
+				if isinstance(value,list):
+					value=value[:]
+				else:
+					value=[value]
+			self.table[key]=value
 		else:
 			if not isinstance(value,list):
-				value=[value]
-		self.table[key]=value
+				self.table[key]=value=[value]
 		return value
 	def append_value(self,var,val):
-		current_value=self._get_list_value_for_modification(var)
 		if isinstance(val,str):
 			val=[val]
+		current_value=self._get_list_value_for_modification(var)
 		current_value.extend(val)
 	def prepend_value(self,var,val):
 		if isinstance(val,str):
@@ -111,8 +119,10 @@ class ConfigSet(object):
 		env=self
 		while 1:
 			table_list.insert(0,env.table)
-			try:env=env.parent
-			except AttributeError:break
+			try:
+				env=env.parent
+			except AttributeError:
+				break
 		merged_table={}
 		for table in table_list:
 			merged_table.update(table)
@@ -122,30 +132,34 @@ class ConfigSet(object):
 			os.makedirs(os.path.split(filename)[0])
 		except OSError:
 			pass
-		f=None
+		buf=[]
+		merged_table=self.get_merged_dict()
+		keys=list(merged_table.keys())
+		keys.sort()
 		try:
-			f=open(filename,'w')
-			merged_table=self.get_merged_dict()
-			keys=list(merged_table.keys())
-			keys.sort()
-			for k in keys:
-				if k!='undo_stack':
-					f.write('%s = %r\n'%(k,merged_table[k]))
-		finally:
-			if f:
-				f.close()
+			fun=ascii
+		except NameError:
+			fun=repr
+		for k in keys:
+			if k!='undo_stack':
+				buf.append('%s = %s\n'%(k,fun(merged_table[k])))
+		Utils.writef(filename,''.join(buf))
 	def load(self,filename):
 		tbl=self.table
-		code=Utils.readf(filename)
+		code=Utils.readf(filename,m='r')
 		for m in re_imp.finditer(code):
 			g=m.group
 			tbl[g(2)]=eval(g(3))
-		Logs.debug('env: %s'%str(self.table))
+		Logs.debug('env: %s',self.table)
 	def update(self,d):
-		for k,v in d.items():
-			self[k]=v
+		self.table.update(d)
 	def stash(self):
-		self.undo_stack=self.undo_stack+[self.table]
-		self.table=self.table.copy()
+		orig=self.table
+		tbl=self.table=self.table.copy()
+		for x in tbl.keys():
+			tbl[x]=copy.deepcopy(tbl[x])
+		self.undo_stack=self.undo_stack+[orig]
+	def commit(self):
+		self.undo_stack.pop(-1)
 	def revert(self):
 		self.table=self.undo_stack.pop(-1)
--- pugl-0~svn32+dfsg0.orig/waflib/Configure.py
+++ pugl-0~svn32+dfsg0/waflib/Configure.py
@@ -1,54 +1,15 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,shlex,sys,time
+import os,re,shlex,shutil,sys,time,traceback
 from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors
-try:
-	from urllib import request
-except:
-	from urllib import urlopen
-else:
-	urlopen=request.urlopen
-BREAK='break'
-CONTINUE='continue'
 WAF_CONFIG_LOG='config.log'
 autoconfig=False
 conf_template='''# project %(app)s configured on %(now)s by
 # waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
 # using %(args)s
 #'''
-def download_check(node):
-	pass
-def download_tool(tool,force=False,ctx=None):
-	for x in Utils.to_list(Context.remote_repo):
-		for sub in Utils.to_list(Context.remote_locs):
-			url='/'.join((x,sub,tool+'.py'))
-			try:
-				web=urlopen(url)
-				try:
-					if web.getcode()!=200:
-						continue
-				except AttributeError:
-					pass
-			except Exception:
-				continue
-			else:
-				tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
-				tmp.write(web.read())
-				Logs.warn('Downloaded %s from %s'%(tool,url))
-				download_check(tmp)
-				try:
-					module=Context.load_tool(tool)
-				except:
-					Logs.warn('The tool %s from %s is unusable'%(tool,url))
-					try:
-						tmp.delete()
-					except:
-						pass
-					continue
-				return module
-	raise Errors.WafError('Could not load the Waf tool')
 class ConfigurationContext(Context.Context):
 	'''configures the project'''
 	cmd='configure'
@@ -96,10 +57,11 @@ class ConfigurationContext(Context.Conte
 			out=getattr(Context.g_module,Context.OUT,None)
 		if not out:
 			out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','')
+		out=os.path.realpath(out)
 		self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out)
 		self.bldnode.mkdir()
 		if not os.path.isdir(self.bldnode.abspath()):
-			conf.fatal('could not create the build directory %s'%self.bldnode.abspath())
+			self.fatal('Could not create the build directory %s'%self.bldnode.abspath())
 	def execute(self):
 		self.init_dirs()
 		self.cachedir=self.bldnode.make_node(Build.CACHE_DIR)
@@ -111,17 +73,12 @@ class ConfigurationContext(Context.Conte
 			ver=getattr(Context.g_module,'VERSION','')
 			if ver:
 				app="%s (%s)"%(app,ver)
-		now=time.ctime()
-		pyver=sys.hexversion
-		systype=sys.platform
-		args=" ".join(sys.argv)
-		wafver=Context.WAFVERSION
-		abi=Context.ABI
-		self.to_log(conf_template%vars())
+		params={'now':time.ctime(),'pyver':sys.hexversion,'systype':sys.platform,'args':" ".join(sys.argv),'wafver':Context.WAFVERSION,'abi':Context.ABI,'app':app}
+		self.to_log(conf_template%params)
 		self.msg('Setting top to',self.srcnode.abspath())
 		self.msg('Setting out to',self.bldnode.abspath())
 		if id(self.srcnode)==id(self.bldnode):
-			Logs.warn('Setting top == out (remember to use "update_outputs")')
+			Logs.warn('Setting top == out')
 		elif id(self.path)!=id(self.srcnode):
 			if self.srcnode.is_child_of(self.path):
 				Logs.warn('Are you certain that you do not want to set top="." ?')
@@ -130,27 +87,38 @@ class ConfigurationContext(Context.Conte
 		Context.top_dir=self.srcnode.abspath()
 		Context.out_dir=self.bldnode.abspath()
 		env=ConfigSet.ConfigSet()
-		env['argv']=sys.argv
-		env['options']=Options.options.__dict__
+		env.argv=sys.argv
+		env.options=Options.options.__dict__
+		env.config_cmd=self.cmd
 		env.run_dir=Context.run_dir
 		env.top_dir=Context.top_dir
 		env.out_dir=Context.out_dir
-		env['hash']=self.hash
-		env['files']=self.files
-		env['environ']=dict(self.environ)
-		if not self.env.NO_LOCK_IN_RUN:
-			env.store(Context.run_dir+os.sep+Options.lockfile)
-		if not self.env.NO_LOCK_IN_TOP:
-			env.store(Context.top_dir+os.sep+Options.lockfile)
-		if not self.env.NO_LOCK_IN_OUT:
-			env.store(Context.out_dir+os.sep+Options.lockfile)
+		env.hash=self.hash
+		env.files=self.files
+		env.environ=dict(self.environ)
+		env.launch_dir=Context.launch_dir
+		if not(self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN')or getattr(Options.options,'no_lock_in_run')):
+			env.store(os.path.join(Context.run_dir,Options.lockfile))
+		if not(self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP')or getattr(Options.options,'no_lock_in_top')):
+			env.store(os.path.join(Context.top_dir,Options.lockfile))
+		if not(self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT')or getattr(Options.options,'no_lock_in_out')):
+			env.store(os.path.join(Context.out_dir,Options.lockfile))
 	def prepare_env(self,env):
 		if not env.PREFIX:
-			env.PREFIX=os.path.abspath(os.path.expanduser(Options.options.prefix))
+			if Options.options.prefix or Utils.is_win32:
+				env.PREFIX=Options.options.prefix
+			else:
+				env.PREFIX='/'
 		if not env.BINDIR:
-			env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)
+			if Options.options.bindir:
+				env.BINDIR=Options.options.bindir
+			else:
+				env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)
 		if not env.LIBDIR:
-			env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env)
+			if Options.options.libdir:
+				env.LIBDIR=Options.options.libdir
+			else:
+				env.LIBDIR=Utils.subst_vars('${PREFIX}/lib%s'%Utils.lib64(),env)
 	def store(self):
 		n=self.cachedir.make_node('build.config.py')
 		n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools))
@@ -159,157 +127,251 @@ class ConfigurationContext(Context.Conte
 		for key in self.all_envs:
 			tmpenv=self.all_envs[key]
 			tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX))
-	def load(self,input,tooldir=None,funs=None,download=True):
-		tools=Utils.to_list(input)
-		if tooldir:tooldir=Utils.to_list(tooldir)
+	def load(self,tool_list,tooldir=None,funs=None,with_sys_path=True,cache=False):
+		tools=Utils.to_list(tool_list)
+		if tooldir:
+			tooldir=Utils.to_list(tooldir)
 		for tool in tools:
-			mag=(tool,id(self.env),funs)
-			if mag in self.tool_cache:
-				self.to_log('(tool %s is already loaded, skipping)'%tool)
-				continue
-			self.tool_cache.append(mag)
+			if cache:
+				mag=(tool,id(self.env),tooldir,funs)
+				if mag in self.tool_cache:
+					self.to_log('(tool %s is already loaded, skipping)'%tool)
+					continue
+				self.tool_cache.append(mag)
 			module=None
 			try:
-				module=Context.load_tool(tool,tooldir)
-			except ImportError ,e:
-				if Options.options.download:
-					module=download_tool(tool,ctx=self)
-					if not module:
-						self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))
-				else:
-					self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))
-			except Exception ,e:
+				module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path)
+			except ImportError as e:
+				self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,getattr(e,'waf_sys_path',sys.path),e))
+			except Exception as e:
 				self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
-				self.to_log(Utils.ex_stack())
+				self.to_log(traceback.format_exc())
 				raise
 			if funs is not None:
 				self.eval_rules(funs)
 			else:
 				func=getattr(module,'configure',None)
 				if func:
-					if type(func)is type(Utils.readf):func(self)
-					else:self.eval_rules(func)
+					if type(func)is type(Utils.readf):
+						func(self)
+					else:
+						self.eval_rules(func)
 			self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
 	def post_recurse(self,node):
 		super(ConfigurationContext,self).post_recurse(node)
-		self.hash=hash((self.hash,node.read('rb')))
+		self.hash=Utils.h_list((self.hash,node.read('rb')))
 		self.files.append(node.abspath())
 	def eval_rules(self,rules):
 		self.rules=Utils.to_list(rules)
 		for x in self.rules:
 			f=getattr(self,x)
-			if not f:self.fatal("No such method '%s'."%x)
-			try:
-				f()
-			except Exception ,e:
-				ret=self.err_handler(x,e)
-				if ret==BREAK:
-					break
-				elif ret==CONTINUE:
-					continue
-				else:
-					raise
-	def err_handler(self,fun,error):
-		pass
+			if not f:
+				self.fatal('No such configuration function %r'%x)
+			f()
 def conf(f):
 	def fun(*k,**kw):
-		mandatory=True
-		if'mandatory'in kw:
-			mandatory=kw['mandatory']
-			del kw['mandatory']
+		mandatory=kw.pop('mandatory',True)
 		try:
 			return f(*k,**kw)
-		except Errors.ConfigurationError ,e:
+		except Errors.ConfigurationError:
 			if mandatory:
-				raise e
+				raise
+	fun.__name__=f.__name__
 	setattr(ConfigurationContext,f.__name__,fun)
 	setattr(Build.BuildContext,f.__name__,fun)
 	return f
-def add_os_flags(self,var,dest=None):
-	try:self.env.append_value(dest or var,shlex.split(self.environ[var]))
-	except KeyError:pass
+@conf
+def add_os_flags(self,var,dest=None,dup=False):
+	try:
+		flags=shlex.split(self.environ[var])
+	except KeyError:
+		return
+	if dup or''.join(flags)not in''.join(Utils.to_list(self.env[dest or var])):
+		self.env.append_value(dest or var,flags)
+@conf
 def cmd_to_list(self,cmd):
-	if isinstance(cmd,str)and cmd.find(' '):
-		try:
-			os.stat(cmd)
-		except OSError:
+	if isinstance(cmd,str):
+		if os.path.isfile(cmd):
+			return[cmd]
+		if os.sep=='/':
 			return shlex.split(cmd)
 		else:
-			return[cmd]
+			try:
+				return shlex.split(cmd,posix=False)
+			except TypeError:
+				return shlex.split(cmd)
 	return cmd
-def check_waf_version(self,mini='1.6.0',maxi='1.7.0'):
-	self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)))
+@conf
+def check_waf_version(self,mini='1.9.99',maxi='2.1.0',**kw):
+	self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)),**kw)
 	ver=Context.HEXVERSION
 	if Utils.num2ver(mini)>ver:
 		self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver))
 	if Utils.num2ver(maxi)<ver:
 		self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver))
-	self.end_msg('ok')
+	self.end_msg('ok',**kw)
+@conf
 def find_file(self,filename,path_list=[]):
 	for n in Utils.to_list(filename):
 		for d in Utils.to_list(path_list):
-			p=os.path.join(d,n)
+			p=os.path.expanduser(os.path.join(d,n))
 			if os.path.exists(p):
 				return p
 	self.fatal('Could not find %r'%filename)
+@conf
 def find_program(self,filename,**kw):
 	exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py')
-	environ=kw.get('environ',os.environ)
+	environ=kw.get('environ',getattr(self,'environ',os.environ))
 	ret=''
 	filename=Utils.to_list(filename)
+	msg=kw.get('msg',', '.join(filename))
 	var=kw.get('var','')
 	if not var:
-		var=filename[0].upper()
-	if self.env[var]:
-		ret=self.env[var]
-	elif var in environ:
-		ret=environ[var]
+		var=re.sub(r'[-.]','_',filename[0].upper())
 	path_list=kw.get('path_list','')
-	if not ret:
-		if path_list:
-			path_list=Utils.to_list(path_list)
+	if path_list:
+		path_list=Utils.to_list(path_list)
+	else:
+		path_list=environ.get('PATH','').split(os.pathsep)
+	if kw.get('value'):
+		ret=self.cmd_to_list(kw['value'])
+	elif environ.get(var):
+		ret=self.cmd_to_list(environ[var])
+	elif self.env[var]:
+		ret=self.cmd_to_list(self.env[var])
+	else:
+		if not ret:
+			ret=self.find_binary(filename,exts.split(','),path_list)
+		if not ret and Utils.winreg:
+			ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
+		if not ret and Utils.winreg:
+			ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
+		ret=self.cmd_to_list(ret)
+	if ret:
+		if len(ret)==1:
+			retmsg=ret[0]
 		else:
-			path_list=environ.get('PATH','').split(os.pathsep)
-		if not isinstance(filename,list):
-			filename=[filename]
-		for a in exts.split(','):
-			if ret:
-				break
-			for b in filename:
-				if ret:
-					break
-				for c in path_list:
-					if ret:
-						break
-					x=os.path.expanduser(os.path.join(c,b+a))
-					if os.path.isfile(x):
-						ret=x
-	if not ret and Utils.winreg:
-		ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
-	if not ret and Utils.winreg:
-		ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
-	self.msg('Checking for program '+','.join(filename),ret or False)
-	self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))
+			retmsg=ret
+	else:
+		retmsg=False
+	self.msg('Checking for program %r'%msg,retmsg,**kw)
+	if not kw.get('quiet'):
+		self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))
 	if not ret:
-		self.fatal(kw.get('errmsg','')or'Could not find the program %s'%','.join(filename))
-	if var:
+		self.fatal(kw.get('errmsg','')or'Could not find the program %r'%filename)
+	interpreter=kw.get('interpreter')
+	if interpreter is None:
+		if not Utils.check_exe(ret[0],env=environ):
+			self.fatal('Program %r is not executable'%ret)
 		self.env[var]=ret
+	else:
+		self.env[var]=self.env[interpreter]+ret
 	return ret
-def find_perl_program(self,filename,path_list=[],var=None,environ=None,exts=''):
+@conf
+def find_binary(self,filenames,exts,paths):
+	for f in filenames:
+		for ext in exts:
+			exe_name=f+ext
+			if os.path.isabs(exe_name):
+				if os.path.isfile(exe_name):
+					return exe_name
+			else:
+				for path in paths:
+					x=os.path.expanduser(os.path.join(path,exe_name))
+					if os.path.isfile(x):
+						return x
+	return None
+@conf
+def run_build(self,*k,**kw):
+	buf=[]
+	for key in sorted(kw.keys()):
+		v=kw[key]
+		if hasattr(v,'__call__'):
+			buf.append(Utils.h_fun(v))
+		else:
+			buf.append(str(v))
+	h=Utils.h_list(buf)
+	dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
+	cachemode=kw.get('confcache',getattr(Options.options,'confcache',None))
+	if not cachemode and os.path.exists(dir):
+		shutil.rmtree(dir)
+	try:
+		os.makedirs(dir)
+	except OSError:
+		pass
+	try:
+		os.stat(dir)
+	except OSError:
+		self.fatal('cannot use the configuration test folder %r'%dir)
+	if cachemode==1:
+		try:
+			proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build'))
+		except EnvironmentError:
+			pass
+		else:
+			ret=proj['cache_run_build']
+			if isinstance(ret,str)and ret.startswith('Test does not build'):
+				self.fatal(ret)
+			return ret
+	bdir=os.path.join(dir,'testbuild')
+	if not os.path.exists(bdir):
+		os.makedirs(bdir)
+	cls_name=kw.get('run_build_cls')or getattr(self,'run_build_cls','build')
+	self.test_bld=bld=Context.create_context(cls_name,top_dir=dir,out_dir=bdir)
+	bld.init_dirs()
+	bld.progress_bar=0
+	bld.targets='*'
+	bld.logger=self.logger
+	bld.all_envs.update(self.all_envs)
+	bld.env=kw['env']
+	bld.kw=kw
+	bld.conf=self
+	kw['build_fun'](bld)
+	ret=-1
 	try:
-		app=self.find_program(filename,path_list=path_list,var=var,environ=environ,exts=exts)
-	except:
-		self.find_program('perl',var='PERL')
-		app=self.find_file(filename,os.environ['PATH'].split(os.pathsep))
-		if not app:
+		try:
+			bld.compile()
+		except Errors.WafError:
+			ret='Test does not build: %s'%traceback.format_exc()
+			self.fatal(ret)
+		else:
+			ret=getattr(bld,'retval',0)
+	finally:
+		if cachemode:
+			proj=ConfigSet.ConfigSet()
+			proj['cache_run_build']=ret
+			proj.store(os.path.join(dir,'cache_run_build'))
+		else:
+			shutil.rmtree(dir)
+	return ret
+@conf
+def ret_msg(self,msg,args):
+	if isinstance(msg,str):
+		return msg
+	return msg(args)
+@conf
+def test(self,*k,**kw):
+	if not'env'in kw:
+		kw['env']=self.env.derive()
+	if kw.get('validate'):
+		kw['validate'](kw)
+	self.start_msg(kw['msg'],**kw)
+	ret=None
+	try:
+		ret=self.run_build(*k,**kw)
+	except self.errors.ConfigurationError:
+		self.end_msg(kw['errmsg'],'YELLOW',**kw)
+		if Logs.verbose>1:
 			raise
-		if var:
-			self.env[var]=Utils.to_list(self.env['PERL'])+[app]
-	self.msg('Checking for %r'%filename,app)
-
-conf(add_os_flags)
-conf(cmd_to_list)
-conf(check_waf_version)
-conf(find_file)
-conf(find_program)
-conf(find_perl_program)
\ No newline at end of file
+		else:
+			self.fatal('The configuration failed')
+	else:
+		kw['success']=ret
+	if kw.get('post_check'):
+		ret=kw['post_check'](kw)
+	if ret:
+		self.end_msg(kw['errmsg'],'YELLOW',**kw)
+		self.fatal('The configuration failed %r'%ret)
+	else:
+		self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
+	return ret
--- pugl-0~svn32+dfsg0.orig/waflib/Context.py
+++ pugl-0~svn32+dfsg0/waflib/Context.py
@@ -1,15 +1,22 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,imp,sys
+import os,re,sys
 from waflib import Utils,Errors,Logs
 import waflib.Node
-HEXVERSION=0x1060b00
-WAFVERSION="1.6.11"
-WAFREVISION="a7e69d6b81b04729804754c4d5214da063779a65"
-ABI=98
-DBFILE='.wafpickle-%d'%ABI
+if sys.hexversion>0x3040000:
+	import types
+	class imp(object):
+		new_module=lambda x:types.ModuleType(x)
+else:
+	import imp
+HEXVERSION=0x2001300
+WAFVERSION="2.0.19"
+WAFREVISION="e83405712e95b47c040763fdfa468c04dfe72e4b"
+WAFNAME="waf"
+ABI=20
+DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI)
 APPNAME='APPNAME'
 VERSION='VERSION'
 TOP='top'
@@ -20,16 +27,13 @@ run_dir=''
 top_dir=''
 out_dir=''
 waf_dir=''
-local_repo=''
-remote_repo='http://waf.googlecode.com/git/'
-remote_locs=['waflib/extras','waflib/Tools']
+default_encoding=Utils.console_encoding()
 g_module=None
 STDOUT=1
 STDERR=-1
 BOTH=0
 classes=[]
 def create_context(cmd_name,*k,**kw):
-	global classes
 	for x in classes:
 		if x.cmd==cmd_name:
 			return x(*k,**kw)
@@ -37,10 +41,10 @@ def create_context(cmd_name,*k,**kw):
 	ctx.fun=cmd_name
 	return ctx
 class store_context(type):
-	def __init__(cls,name,bases,dict):
-		super(store_context,cls).__init__(name,bases,dict)
+	def __init__(cls,name,bases,dct):
+		super(store_context,cls).__init__(name,bases,dct)
 		name=cls.__name__
-		if name=='ctx'or name=='Context':
+		if name in('ctx','Context'):
 			return
 		try:
 			cls.cmd
@@ -48,7 +52,6 @@ class store_context(type):
 			raise Errors.WafError('Missing command for the context class %r (cmd)'%name)
 		if not getattr(cls,'fun',None):
 			cls.fun=cls.cmd
-		global classes
 		classes.insert(0,cls)
 ctx=store_context('ctx',(object,),{})
 class Context(ctx):
@@ -58,13 +61,9 @@ class Context(ctx):
 		try:
 			rd=kw['run_dir']
 		except KeyError:
-			global run_dir
 			rd=run_dir
-		class node_class(waflib.Node.Node):
-			pass
-		self.node_class=node_class
-		self.node_class.__module__="waflib.Node"
-		self.node_class.__name__="Nod3"
+		self.node_class=type('Nod3',(waflib.Node.Node,),{})
+		self.node_class.__module__='waflib.Node'
 		self.node_class.ctx=self
 		self.root=self.node_class('',None)
 		self.cur_script=None
@@ -72,18 +71,24 @@ class Context(ctx):
 		self.stack_path=[]
 		self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self}
 		self.logger=None
-	def __hash__(self):
-		return id(self)
+	def finalize(self):
+		try:
+			logger=self.logger
+		except AttributeError:
+			pass
+		else:
+			Logs.free_logger(logger)
+			delattr(self,'logger')
 	def load(self,tool_list,*k,**kw):
 		tools=Utils.to_list(tool_list)
 		path=Utils.to_list(kw.get('tooldir',''))
+		with_sys_path=kw.get('with_sys_path',True)
 		for t in tools:
-			module=load_tool(t,path)
+			module=load_tool(t,path,with_sys_path=with_sys_path)
 			fun=getattr(module,kw.get('name',self.fun),None)
 			if fun:
 				fun(self)
 	def execute(self):
-		global g_module
 		self.recurse([os.path.dirname(g_module.root_path)])
 	def pre_recurse(self,node):
 		self.stack_path.append(self.cur_script)
@@ -93,10 +98,10 @@ class Context(ctx):
 		self.cur_script=self.stack_path.pop()
 		if self.cur_script:
 			self.path=self.cur_script.parent
-	def recurse(self,dirs,name=None,mandatory=True,once=True):
+	def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None):
 		try:
 			cache=self.recurse_cache
-		except:
+		except AttributeError:
 			cache=self.recurse_cache={}
 		for d in Utils.to_list(dirs):
 			if not os.path.isabs(d):
@@ -108,7 +113,7 @@ class Context(ctx):
 				cache[node]=True
 				self.pre_recurse(node)
 				try:
-					function_code=node.read('rU')
+					function_code=node.read('r',encoding)
 					exec(compile(function_code,node.abspath(),'exec'),self.exec_dict)
 				finally:
 					self.post_recurse(node)
@@ -119,73 +124,120 @@ class Context(ctx):
 					cache[tup]=True
 					self.pre_recurse(node)
 					try:
-						wscript_module=load_module(node.abspath())
+						wscript_module=load_module(node.abspath(),encoding=encoding)
 						user_function=getattr(wscript_module,(name or self.fun),None)
 						if not user_function:
 							if not mandatory:
 								continue
-							raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath()))
+							raise Errors.WafError('No function %r defined in %s'%(name or self.fun,node.abspath()))
 						user_function(self)
 					finally:
 						self.post_recurse(node)
 				elif not node:
 					if not mandatory:
 						continue
+					try:
+						os.listdir(d)
+					except OSError:
+						raise Errors.WafError('Cannot read the folder %r'%d)
 					raise Errors.WafError('No wscript file in directory %s'%d)
+	def log_command(self,cmd,kw):
+		if Logs.verbose:
+			fmt=os.environ.get('WAF_CMD_FORMAT')
+			if fmt=='string':
+				if not isinstance(cmd,str):
+					cmd=Utils.shell_escape(cmd)
+			Logs.debug('runner: %r',cmd)
+			Logs.debug('runner_env: kw=%s',kw)
 	def exec_command(self,cmd,**kw):
 		subprocess=Utils.subprocess
 		kw['shell']=isinstance(cmd,str)
-		Logs.debug('runner: %r'%cmd)
-		Logs.debug('runner_env: kw=%s'%kw)
+		self.log_command(cmd,kw)
+		if self.logger:
+			self.logger.info(cmd)
+		if'stdout'not in kw:
+			kw['stdout']=subprocess.PIPE
+		if'stderr'not in kw:
+			kw['stderr']=subprocess.PIPE
+		if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
+			raise Errors.WafError('Program %s not found!'%cmd[0])
+		cargs={}
+		if'timeout'in kw:
+			if sys.hexversion>=0x3030000:
+				cargs['timeout']=kw['timeout']
+				if not'start_new_session'in kw:
+					kw['start_new_session']=True
+			del kw['timeout']
+		if'input'in kw:
+			if kw['input']:
+				cargs['input']=kw['input']
+				kw['stdin']=subprocess.PIPE
+			del kw['input']
+		if'cwd'in kw:
+			if not isinstance(kw['cwd'],str):
+				kw['cwd']=kw['cwd'].abspath()
+		encoding=kw.pop('decode_as',default_encoding)
 		try:
+			ret,out,err=Utils.run_process(cmd,kw,cargs)
+		except Exception as e:
+			raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
+		if out:
+			if not isinstance(out,str):
+				out=out.decode(encoding,errors='replace')
+			if self.logger:
+				self.logger.debug('out: %s',out)
+			else:
+				Logs.info(out,extra={'stream':sys.stdout,'c1':''})
+		if err:
+			if not isinstance(err,str):
+				err=err.decode(encoding,errors='replace')
 			if self.logger:
-				self.logger.info(cmd)
-				kw['stdout']=kw['stderr']=subprocess.PIPE
-				p=subprocess.Popen(cmd,**kw)
-				(out,err)=p.communicate()
-				if out:
-					self.logger.debug('out: %s'%out.decode(sys.stdout.encoding or'iso8859-1'))
-				if err:
-					self.logger.error('err: %s'%err.decode(sys.stdout.encoding or'iso8859-1'))
-				return p.returncode
+				self.logger.error('err: %s'%err)
 			else:
-				p=subprocess.Popen(cmd,**kw)
-				return p.wait()
-		except OSError:
-			return-1
+				Logs.info(err,extra={'stream':sys.stderr,'c1':''})
+		return ret
 	def cmd_and_log(self,cmd,**kw):
 		subprocess=Utils.subprocess
 		kw['shell']=isinstance(cmd,str)
-		Logs.debug('runner: %r'%cmd)
-		if'quiet'in kw:
-			quiet=kw['quiet']
-			del kw['quiet']
-		else:
-			quiet=None
-		if'output'in kw:
-			to_ret=kw['output']
-			del kw['output']
-		else:
-			to_ret=STDOUT
+		self.log_command(cmd,kw)
+		quiet=kw.pop('quiet',None)
+		to_ret=kw.pop('output',STDOUT)
+		if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
+			raise Errors.WafError('Program %r not found!'%cmd[0])
 		kw['stdout']=kw['stderr']=subprocess.PIPE
 		if quiet is None:
 			self.to_log(cmd)
+		cargs={}
+		if'timeout'in kw:
+			if sys.hexversion>=0x3030000:
+				cargs['timeout']=kw['timeout']
+				if not'start_new_session'in kw:
+					kw['start_new_session']=True
+			del kw['timeout']
+		if'input'in kw:
+			if kw['input']:
+				cargs['input']=kw['input']
+				kw['stdin']=subprocess.PIPE
+			del kw['input']
+		if'cwd'in kw:
+			if not isinstance(kw['cwd'],str):
+				kw['cwd']=kw['cwd'].abspath()
+		encoding=kw.pop('decode_as',default_encoding)
 		try:
-			p=subprocess.Popen(cmd,**kw)
-			(out,err)=p.communicate()
-		except Exception ,e:
+			ret,out,err=Utils.run_process(cmd,kw,cargs)
+		except Exception as e:
 			raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
 		if not isinstance(out,str):
-			out=out.decode(sys.stdout.encoding or'iso8859-1')
+			out=out.decode(encoding,errors='replace')
 		if not isinstance(err,str):
-			err=err.decode(sys.stdout.encoding or'iso8859-1')
+			err=err.decode(encoding,errors='replace')
 		if out and quiet!=STDOUT and quiet!=BOTH:
 			self.to_log('out: %s'%out)
 		if err and quiet!=STDERR and quiet!=BOTH:
 			self.to_log('err: %s'%err)
-		if p.returncode:
-			e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode))
-			e.returncode=p.returncode
+		if ret:
+			e=Errors.WafError('Command %r returned %r'%(cmd,ret))
+			e.returncode=ret
 			e.stderr=err
 			e.stdout=out
 			raise e
@@ -198,9 +250,14 @@ class Context(ctx):
 		if self.logger:
 			self.logger.info('from %s: %s'%(self.path.abspath(),msg))
 		try:
-			msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename)
-		except:
+			logfile=self.logger.handlers[0].baseFilename
+		except AttributeError:
 			pass
+		else:
+			if os.environ.get('WAF_PRINT_FAILURE_LOG'):
+				msg='Log from (%s):\n%s\n'%(logfile,Utils.readf(logfile))
+			else:
+				msg='%s\n(complete log in %s)'%(msg,logfile)
 		raise self.errors.ConfigurationError(msg,ex=ex)
 	def to_log(self,msg):
 		if not msg:
@@ -210,17 +267,29 @@ class Context(ctx):
 		else:
 			sys.stderr.write(str(msg))
 			sys.stderr.flush()
-	def msg(self,msg,result,color=None):
-		self.start_msg(msg)
+	def msg(self,*k,**kw):
+		try:
+			msg=kw['msg']
+		except KeyError:
+			msg=k[0]
+		self.start_msg(msg,**kw)
+		try:
+			result=kw['result']
+		except KeyError:
+			result=k[1]
+		color=kw.get('color')
 		if not isinstance(color,str):
 			color=result and'GREEN'or'YELLOW'
-		self.end_msg(result,color)
-	def start_msg(self,msg):
+		self.end_msg(result,color,**kw)
+	def start_msg(self,*k,**kw):
+		if kw.get('quiet'):
+			return
+		msg=kw.get('msg')or k[0]
 		try:
 			if self.in_msg:
 				self.in_msg+=1
 				return
-		except:
+		except AttributeError:
 			self.in_msg=0
 		self.in_msg+=1
 		try:
@@ -230,70 +299,115 @@ class Context(ctx):
 		for x in(self.line_just*'-',msg):
 			self.to_log(x)
 		Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
-	def end_msg(self,result,color=None):
+	def end_msg(self,*k,**kw):
+		if kw.get('quiet'):
+			return
 		self.in_msg-=1
 		if self.in_msg:
 			return
+		result=kw.get('result')or k[0]
 		defcolor='GREEN'
-		if result==True:
+		if result is True:
 			msg='ok'
-		elif result==False:
+		elif not result:
 			msg='not found'
 			defcolor='YELLOW'
 		else:
 			msg=str(result)
 		self.to_log(msg)
-		Logs.pprint(color or defcolor,msg)
+		try:
+			color=kw['color']
+		except KeyError:
+			if len(k)>1 and k[1]in Logs.colors_lst:
+				color=k[1]
+			else:
+				color=defcolor
+		Logs.pprint(color,msg)
 	def load_special_tools(self,var,ban=[]):
-		global waf_dir
-		lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
-		for x in lst:
-			if not x.name in ban:
-				load_tool(x.name.replace('.py',''))
+		if os.path.isdir(waf_dir):
+			lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
+			for x in lst:
+				if not x.name in ban:
+					load_tool(x.name.replace('.py',''))
+		else:
+			from zipfile import PyZipFile
+			waflibs=PyZipFile(waf_dir)
+			lst=waflibs.namelist()
+			for x in lst:
+				if not re.match('waflib/extras/%s'%var.replace('*','.*'),var):
+					continue
+				f=os.path.basename(x)
+				doban=False
+				for b in ban:
+					r=b.replace('*','.*')
+					if re.match(r,f):
+						doban=True
+				if not doban:
+					f=f.replace('.py','')
+					load_tool(f)
 cache_modules={}
-def load_module(path):
+def load_module(path,encoding=None):
 	try:
 		return cache_modules[path]
 	except KeyError:
 		pass
 	module=imp.new_module(WSCRIPT_FILE)
 	try:
-		code=Utils.readf(path,m='rU')
-	except(IOError,OSError):
+		code=Utils.readf(path,m='r',encoding=encoding)
+	except EnvironmentError:
 		raise Errors.WafError('Could not read the file %r'%path)
 	module_dir=os.path.dirname(path)
 	sys.path.insert(0,module_dir)
-	exec(compile(code,path,'exec'),module.__dict__)
-	sys.path.remove(module_dir)
+	try:
+		exec(compile(code,path,'exec'),module.__dict__)
+	finally:
+		sys.path.remove(module_dir)
 	cache_modules[path]=module
 	return module
-def load_tool(tool,tooldir=None):
-	tool=tool.replace('++','xx')
-	tool=tool.replace('java','javaw')
-	tool=tool.replace('compiler_cc','compiler_c')
-	if tooldir:
-		assert isinstance(tooldir,list)
-		sys.path=tooldir+sys.path
-		try:
-			__import__(tool)
+def load_tool(tool,tooldir=None,ctx=None,with_sys_path=True):
+	if tool=='java':
+		tool='javaw'
+	else:
+		tool=tool.replace('++','xx')
+	if not with_sys_path:
+		back_path=sys.path
+		sys.path=[]
+	try:
+		if tooldir:
+			assert isinstance(tooldir,list)
+			sys.path=tooldir+sys.path
+			try:
+				__import__(tool)
+			except ImportError as e:
+				e.waf_sys_path=list(sys.path)
+				raise
+			finally:
+				for d in tooldir:
+					sys.path.remove(d)
 			ret=sys.modules[tool]
 			Context.tools[tool]=ret
 			return ret
-		finally:
-			for d in tooldir:
-				sys.path.remove(d)
-	else:
-		global waf_dir
-		try:
-			os.stat(os.path.join(waf_dir,'waflib','extras',tool+'.py'))
-			d='waflib.extras.%s'%tool
-		except:
+		else:
+			if not with_sys_path:
+				sys.path.insert(0,waf_dir)
 			try:
-				os.stat(os.path.join(waf_dir,'waflib','Tools',tool+'.py'))
-				d='waflib.Tools.%s'%tool
-			except:
-				d=tool
-		__import__(d)
-		ret=sys.modules[d]
-		Context.tools[tool]=ret
-		return ret
+				for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'):
+					try:
+						__import__(x%tool)
+						break
+					except ImportError:
+						x=None
+				else:
+					__import__(tool)
+			except ImportError as e:
+				e.waf_sys_path=list(sys.path)
+				raise
+			finally:
+				if not with_sys_path:
+					sys.path.remove(waf_dir)
+			ret=sys.modules[x%tool]
+			Context.tools[tool]=ret
+			return ret
+	finally:
+		if not with_sys_path:
+			sys.path+=back_path
--- pugl-0~svn32+dfsg0.orig/waflib/Errors.py
+++ pugl-0~svn32+dfsg0/waflib/Errors.py
@@ -1,10 +1,11 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import traceback,sys
 class WafError(Exception):
 	def __init__(self,msg='',ex=None):
+		Exception.__init__(self)
 		self.msg=msg
 		assert not isinstance(msg,Exception)
 		self.stack=[]
@@ -27,7 +28,8 @@ class BuildError(WafError):
 		lst=['Build failed']
 		for tsk in self.tasks:
 			txt=tsk.format_error()
-			if txt:lst.append(txt)
+			if txt:
+				lst.append(txt)
 		return'\n'.join(lst)
 class ConfigurationError(WafError):
 	pass
--- pugl-0~svn32+dfsg0.orig/waflib/Logs.py
+++ pugl-0~svn32+dfsg0/waflib/Logs.py
@@ -1,54 +1,53 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import os,re,traceback,sys
-_nocolor=os.environ.get('NOCOLOR','no')not in('no','0','false')
-try:
-	if not _nocolor:
-		import waflib.ansiterm
-except:
-	pass
+from waflib import Utils,ansiterm
+if not os.environ.get('NOSYNC',False):
+	if sys.stdout.isatty()and id(sys.stdout)==id(sys.__stdout__):
+		sys.stdout=ansiterm.AnsiTerm(sys.stdout)
+	if sys.stderr.isatty()and id(sys.stderr)==id(sys.__stderr__):
+		sys.stderr=ansiterm.AnsiTerm(sys.stderr)
 import logging
-LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
-HOUR_FORMAT="%H:%M:%S"
-zones=''
+LOG_FORMAT=os.environ.get('WAF_LOG_FORMAT','%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
+HOUR_FORMAT=os.environ.get('WAF_HOUR_FORMAT','%H:%M:%S')
+zones=[]
 verbose=0
-colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
-got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs']
-if got_tty:
-	try:
-		got_tty=sys.stderr.isatty()
-	except AttributeError:
-		got_tty=False
-if(not got_tty and os.environ.get('TERM','dumb')!='msys')or _nocolor:
-	colors_lst['USE']=False
-def get_term_cols():
-	return 80
+colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','GREY':'\x1b[37m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
+indicator='\r\x1b[K%s%s%s'
 try:
-	import struct,fcntl,termios
-except ImportError:
-	pass
-else:
-	if got_tty:
-		def get_term_cols_real():
-			dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2]
-			return cols
-		try:
-			get_term_cols_real()
-		except:
-			pass
+	unicode
+except NameError:
+	unicode=None
+def enable_colors(use):
+	if use==1:
+		if not(sys.stderr.isatty()or sys.stdout.isatty()):
+			use=0
+		if Utils.is_win32 and os.name!='java':
+			term=os.environ.get('TERM','')
 		else:
-			get_term_cols=get_term_cols_real
+			term=os.environ.get('TERM','dumb')
+		if term in('dumb','emacs'):
+			use=0
+	if use>=1:
+		os.environ['TERM']='vt100'
+	colors_lst['USE']=use
+try:
+	get_term_cols=ansiterm.get_term_cols
+except AttributeError:
+	def get_term_cols():
+		return 80
 get_term_cols.__doc__="""
-	Get the console width in characters.
+	Returns the console width in characters.
 
 	:return: the number of characters per line
 	:rtype: int
 	"""
 def get_color(cl):
-	if not colors_lst['USE']:return''
-	return colors_lst.get(cl,'')
+	if colors_lst['USE']:
+		return colors_lst.get(cl,'')
+	return''
 class color_dict(object):
 	def __getattr__(self,a):
 		return get_color(a)
@@ -57,19 +56,11 @@ class color_dict(object):
 colors=color_dict()
 re_log=re.compile(r'(\w+): (.*)',re.M)
 class log_filter(logging.Filter):
-	def __init__(self,name=None):
-		pass
+	def __init__(self,name=''):
+		logging.Filter.__init__(self,name)
 	def filter(self,rec):
-		rec.c1=colors.PINK
-		rec.c2=colors.NORMAL
 		rec.zone=rec.module
 		if rec.levelno>=logging.INFO:
-			if rec.levelno>=logging.ERROR:
-				rec.c1=colors.RED
-			elif rec.levelno>=logging.WARNING:
-				rec.c1=colors.YELLOW
-			else:
-				rec.c1=colors.GREEN
 			return True
 		m=re_log.match(rec.msg)
 		if m:
@@ -80,26 +71,82 @@ class log_filter(logging.Filter):
 		elif not verbose>2:
 			return False
 		return True
+class log_handler(logging.StreamHandler):
+	def emit(self,record):
+		try:
+			try:
+				self.stream=record.stream
+			except AttributeError:
+				if record.levelno>=logging.WARNING:
+					record.stream=self.stream=sys.stderr
+				else:
+					record.stream=self.stream=sys.stdout
+			self.emit_override(record)
+			self.flush()
+		except(KeyboardInterrupt,SystemExit):
+			raise
+		except:
+			self.handleError(record)
+	def emit_override(self,record,**kw):
+		self.terminator=getattr(record,'terminator','\n')
+		stream=self.stream
+		if unicode:
+			msg=self.formatter.format(record)
+			fs='%s'+self.terminator
+			try:
+				if(isinstance(msg,unicode)and getattr(stream,'encoding',None)):
+					fs=fs.decode(stream.encoding)
+					try:
+						stream.write(fs%msg)
+					except UnicodeEncodeError:
+						stream.write((fs%msg).encode(stream.encoding))
+				else:
+					stream.write(fs%msg)
+			except UnicodeError:
+				stream.write((fs%msg).encode('utf-8'))
+		else:
+			logging.StreamHandler.emit(self,record)
 class formatter(logging.Formatter):
 	def __init__(self):
 		logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
 	def format(self,rec):
-		if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
-			try:
-				msg=rec.msg.decode('utf-8')
-			except:
-				msg=rec.msg
-			return'%s%s%s'%(rec.c1,msg,rec.c2)
+		try:
+			msg=rec.msg.decode('utf-8')
+		except Exception:
+			msg=rec.msg
+		use=colors_lst['USE']
+		if(use==1 and rec.stream.isatty())or use==2:
+			c1=getattr(rec,'c1',None)
+			if c1 is None:
+				c1=''
+				if rec.levelno>=logging.ERROR:
+					c1=colors.RED
+				elif rec.levelno>=logging.WARNING:
+					c1=colors.YELLOW
+				elif rec.levelno>=logging.INFO:
+					c1=colors.GREEN
+			c2=getattr(rec,'c2',colors.NORMAL)
+			msg='%s%s%s'%(c1,msg,c2)
+		else:
+			msg=re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))','',msg)
+		if rec.levelno>=logging.INFO:
+			if rec.args:
+				try:
+					return msg%rec.args
+				except UnicodeDecodeError:
+					return msg.encode('utf-8')%rec.args
+			return msg
+		rec.msg=msg
+		rec.c1=colors.PINK
+		rec.c2=colors.NORMAL
 		return logging.Formatter.format(self,rec)
 log=None
 def debug(*k,**kw):
 	if verbose:
 		k=list(k)
 		k[0]=k[0].replace('\n',' ')
-		global log
 		log.debug(*k,**kw)
 def error(*k,**kw):
-	global log
 	log.error(*k,**kw)
 	if verbose>2:
 		st=traceback.extract_stack()
@@ -107,35 +154,38 @@ def error(*k,**kw):
 			st=st[:-1]
 			buf=[]
 			for filename,lineno,name,line in st:
-				buf.append('  File "%s", line %d, in %s'%(filename,lineno,name))
+				buf.append('  File %r, line %d, in %s'%(filename,lineno,name))
 				if line:
 					buf.append('	%s'%line.strip())
-			if buf:log.error("\n".join(buf))
+			if buf:
+				log.error('\n'.join(buf))
 def warn(*k,**kw):
-	global log
-	log.warn(*k,**kw)
+	log.warning(*k,**kw)
 def info(*k,**kw):
-	global log
 	log.info(*k,**kw)
 def init_log():
 	global log
 	log=logging.getLogger('waflib')
 	log.handlers=[]
 	log.filters=[]
-	hdlr=logging.StreamHandler()
+	hdlr=log_handler()
 	hdlr.setFormatter(formatter())
 	log.addHandler(hdlr)
 	log.addFilter(log_filter())
 	log.setLevel(logging.DEBUG)
 def make_logger(path,name):
 	logger=logging.getLogger(name)
-	hdlr=logging.FileHandler(path,'w')
+	if sys.hexversion>0x3000000:
+		encoding=sys.stdout.encoding
+	else:
+		encoding=None
+	hdlr=logging.FileHandler(path,'w',encoding=encoding)
 	formatter=logging.Formatter('%(message)s')
 	hdlr.setFormatter(formatter)
 	logger.addHandler(hdlr)
 	logger.setLevel(logging.DEBUG)
 	return logger
-def make_mem_logger(name,to_log,size=10000):
+def make_mem_logger(name,to_log,size=8192):
 	from logging.handlers import MemoryHandler
 	logger=logging.getLogger(name)
 	hdlr=MemoryHandler(size,target=to_log)
@@ -145,5 +195,12 @@ def make_mem_logger(name,to_log,size=100
 	logger.memhandler=hdlr
 	logger.setLevel(logging.DEBUG)
 	return logger
-def pprint(col,str,label='',sep='\n'):
-	sys.stderr.write("%s%s%s %s%s"%(colors(col),str,colors.NORMAL,label,sep))
+def free_logger(logger):
+	try:
+		for x in logger.handlers:
+			x.close()
+			logger.removeHandler(x)
+	except Exception:
+		pass
+def pprint(col,msg,label='',sep='\n'):
+	info('%s%s%s %s',colors(col),msg,colors.NORMAL,label,extra={'terminator':sep})
--- pugl-0~svn32+dfsg0.orig/waflib/Node.py
+++ pugl-0~svn32+dfsg0/waflib/Node.py
@@ -1,9 +1,7 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
 import os,re,sys,shutil
 from waflib import Utils,Errors
 exclude_regs='''
@@ -12,6 +10,7 @@ exclude_regs='''
 **/.#*
 **/%*%
 **/._*
+**/*.swp
 **/CVS
 **/CVS/**
 **/.cvsignore
@@ -35,28 +34,54 @@ exclude_regs='''
 **/{arch}
 **/_darcs
 **/_darcs/**
+**/.intlcache
 **/.DS_Store'''
-def split_path(path):
-	return path.split('/')
-def split_path_cygwin(path):
-	if path.startswith('//'):
-		ret=path.split('/')[2:]
-		ret[0]='/'+ret[0]
-		return ret
-	return path.split('/')
-re_sp=re.compile('[/\\\\]')
-def split_path_win32(path):
-	if path.startswith('\\\\'):
-		ret=re.split(re_sp,path)[2:]
-		ret[0]='\\'+ret[0]
-		return ret
-	return re.split(re_sp,path)
-if sys.platform=='cygwin':
-	split_path=split_path_cygwin
-elif Utils.is_win32:
-	split_path=split_path_win32
+def ant_matcher(s,ignorecase):
+	reflags=re.I if ignorecase else 0
+	ret=[]
+	for x in Utils.to_list(s):
+		x=x.replace('\\','/').replace('//','/')
+		if x.endswith('/'):
+			x+='**'
+		accu=[]
+		for k in x.split('/'):
+			if k=='**':
+				accu.append(k)
+			else:
+				k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+')
+				k='^%s$'%k
+				try:
+					exp=re.compile(k,flags=reflags)
+				except Exception as e:
+					raise Errors.WafError('Invalid pattern: %s'%k,e)
+				else:
+					accu.append(exp)
+		ret.append(accu)
+	return ret
+def ant_sub_filter(name,nn):
+	ret=[]
+	for lst in nn:
+		if not lst:
+			pass
+		elif lst[0]=='**':
+			ret.append(lst)
+			if len(lst)>1:
+				if lst[1].match(name):
+					ret.append(lst[2:])
+			else:
+				ret.append([])
+		elif lst[0].match(name):
+			ret.append(lst[1:])
+	return ret
+def ant_sub_matcher(name,pats):
+	nacc=ant_sub_filter(name,pats[0])
+	nrej=ant_sub_filter(name,pats[1])
+	if[]in nrej:
+		nacc=[]
+	return[nacc,nrej]
 class Node(object):
-	__slots__=('name','sig','children','parent','cache_abspath','cache_isdir')
+	dict_class=dict
+	__slots__=('name','parent','children','cache_abspath','cache_isdir')
 	def __init__(self,name,parent):
 		self.name=name
 		self.parent=parent
@@ -68,45 +93,71 @@ class Node(object):
 		self.name=data[0]
 		self.parent=data[1]
 		if data[2]is not None:
-			self.children=data[2]
-		if data[3]is not None:
-			self.sig=data[3]
+			self.children=self.dict_class(data[2])
 	def __getstate__(self):
-		return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None))
+		return(self.name,self.parent,getattr(self,'children',None))
 	def __str__(self):
-		return self.name
+		return self.abspath()
 	def __repr__(self):
 		return self.abspath()
-	def __hash__(self):
-		return id(self)
-	def __eq__(self,node):
-		return id(self)==id(node)
 	def __copy__(self):
 		raise Errors.WafError('nodes are not supposed to be copied')
-	def read(self,flags='r'):
-		return Utils.readf(self.abspath(),flags)
-	def write(self,data,flags='w'):
-		f=None
-		try:
-			f=open(self.abspath(),flags)
-			f.write(data)
-		finally:
-			if f:
-				f.close()
+	def read(self,flags='r',encoding='latin-1'):
+		return Utils.readf(self.abspath(),flags,encoding)
+	def write(self,data,flags='w',encoding='latin-1'):
+		Utils.writef(self.abspath(),data,flags,encoding)
+	def read_json(self,convert=True,encoding='utf-8'):
+		import json
+		object_pairs_hook=None
+		if convert and sys.hexversion<0x3000000:
+			try:
+				_type=unicode
+			except NameError:
+				_type=str
+			def convert(value):
+				if isinstance(value,list):
+					return[convert(element)for element in value]
+				elif isinstance(value,_type):
+					return str(value)
+				else:
+					return value
+			def object_pairs(pairs):
+				return dict((str(pair[0]),convert(pair[1]))for pair in pairs)
+			object_pairs_hook=object_pairs
+		return json.loads(self.read(encoding=encoding),object_pairs_hook=object_pairs_hook)
+	def write_json(self,data,pretty=True):
+		import json
+		indent=2
+		separators=(',',': ')
+		sort_keys=pretty
+		newline=os.linesep
+		if not pretty:
+			indent=None
+			separators=(',',':')
+			newline=''
+		output=json.dumps(data,indent=indent,separators=separators,sort_keys=sort_keys)+newline
+		self.write(output,encoding='utf-8')
+	def exists(self):
+		return os.path.exists(self.abspath())
+	def isdir(self):
+		return os.path.isdir(self.abspath())
 	def chmod(self,val):
 		os.chmod(self.abspath(),val)
-	def delete(self):
+	def delete(self,evict=True):
 		try:
-			if getattr(self,'children',None):
-				shutil.rmtree(self.abspath())
-			else:
-				os.unlink(self.abspath())
-		except:
-			pass
-		try:
-			delattr(self,'children')
-		except:
-			pass
+			try:
+				if os.path.isdir(self.abspath()):
+					shutil.rmtree(self.abspath())
+				else:
+					os.remove(self.abspath())
+			except OSError:
+				if os.path.exists(self.abspath()):
+					raise
+		finally:
+			if evict:
+				self.evict()
+	def evict(self):
+		del self.parent.children[self.name]
 	def suffix(self):
 		k=max(0,self.name.rfind('.'))
 		return self.name[k:]
@@ -122,86 +173,84 @@ class Node(object):
 		lst.sort()
 		return lst
 	def mkdir(self):
-		if getattr(self,'cache_isdir',None):
+		if self.isdir():
 			return
 		try:
 			self.parent.mkdir()
-		except:
+		except OSError:
 			pass
 		if self.name:
 			try:
 				os.makedirs(self.abspath())
 			except OSError:
 				pass
-			if not os.path.isdir(self.abspath()):
-				raise Errors.WafError('Could not create the directory %s'%self.abspath())
+			if not self.isdir():
+				raise Errors.WafError('Could not create the directory %r'%self)
 			try:
 				self.children
-			except:
-				self.children={}
-		self.cache_isdir=True
+			except AttributeError:
+				self.children=self.dict_class()
 	def find_node(self,lst):
 		if isinstance(lst,str):
-			lst=[x for x in split_path(lst)if x and x!='.']
+			lst=[x for x in Utils.split_path(lst)if x and x!='.']
+		if lst and lst[0].startswith('\\\\')and not self.parent:
+			node=self.ctx.root.make_node(lst[0])
+			node.cache_isdir=True
+			return node.find_node(lst[1:])
 		cur=self
 		for x in lst:
 			if x=='..':
 				cur=cur.parent or cur
 				continue
 			try:
-				if x in cur.children:
-					cur=cur.children[x]
+				ch=cur.children
+			except AttributeError:
+				cur.children=self.dict_class()
+			else:
+				try:
+					cur=ch[x]
 					continue
-			except:
-				cur.children={}
+				except KeyError:
+					pass
 			cur=self.__class__(x,cur)
-			try:
-				os.stat(cur.abspath())
-			except:
-				del cur.parent.children[x]
+			if not cur.exists():
+				cur.evict()
 				return None
-		ret=cur
-		try:
-			os.stat(ret.abspath())
-		except:
-			del ret.parent.children[ret.name]
+		if not cur.exists():
+			cur.evict()
 			return None
-		try:
-			while not getattr(cur.parent,'cache_isdir',None):
-				cur=cur.parent
-				cur.cache_isdir=True
-		except AttributeError:
-			pass
-		return ret
+		return cur
 	def make_node(self,lst):
 		if isinstance(lst,str):
-			lst=[x for x in split_path(lst)if x and x!='.']
+			lst=[x for x in Utils.split_path(lst)if x and x!='.']
 		cur=self
 		for x in lst:
 			if x=='..':
 				cur=cur.parent or cur
 				continue
-			if getattr(cur,'children',{}):
-				if x in cur.children:
-					cur=cur.children[x]
-					continue
+			try:
+				cur=cur.children[x]
+			except AttributeError:
+				cur.children=self.dict_class()
+			except KeyError:
+				pass
 			else:
-				cur.children={}
+				continue
 			cur=self.__class__(x,cur)
 		return cur
-	def search(self,lst):
+	def search_node(self,lst):
 		if isinstance(lst,str):
-			lst=[x for x in split_path(lst)if x and x!='.']
+			lst=[x for x in Utils.split_path(lst)if x and x!='.']
 		cur=self
-		try:
-			for x in lst:
-				if x=='..':
-					cur=cur.parent or cur
-				else:
+		for x in lst:
+			if x=='..':
+				cur=cur.parent or cur
+			else:
+				try:
 					cur=cur.children[x]
-			return cur
-		except:
-			pass
+				except(AttributeError,KeyError):
+					return None
+		return cur
 	def path_from(self,node):
 		c1=self
 		c2=node
@@ -217,190 +266,136 @@ class Node(object):
 			up+=1
 			c2=c2.parent
 			c2h-=1
-		while id(c1)!=id(c2):
+		while not c1 is c2:
 			lst.append(c1.name)
 			up+=1
 			c1=c1.parent
 			c2=c2.parent
-		for i in range(up):
-			lst.append('..')
-		lst.reverse()
-		return os.sep.join(lst)or'.'
+		if c1.parent:
+			lst.extend(['..']*up)
+			lst.reverse()
+			return os.sep.join(lst)or'.'
+		else:
+			return self.abspath()
 	def abspath(self):
 		try:
 			return self.cache_abspath
-		except:
+		except AttributeError:
 			pass
-		if os.sep=='/':
-			if not self.parent:
-				val=os.sep
-			elif not self.parent.name:
-				val=os.sep+self.name
-			else:
-				val=self.parent.abspath()+os.sep+self.name
+		if not self.parent:
+			val=os.sep
+		elif not self.parent.name:
+			val=os.sep+self.name
 		else:
+			val=self.parent.abspath()+os.sep+self.name
+		self.cache_abspath=val
+		return val
+	if Utils.is_win32:
+		def abspath(self):
+			try:
+				return self.cache_abspath
+			except AttributeError:
+				pass
 			if not self.parent:
 				val=''
 			elif not self.parent.name:
 				val=self.name+os.sep
 			else:
 				val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name
-		self.cache_abspath=val
-		return val
+			self.cache_abspath=val
+			return val
 	def is_child_of(self,node):
 		p=self
 		diff=self.height()-node.height()
 		while diff>0:
 			diff-=1
 			p=p.parent
-		return id(p)==id(node)
-	def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True):
+		return p is node
+	def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True,quiet=False):
 		dircont=self.listdir()
-		dircont.sort()
 		try:
 			lst=set(self.children.keys())
+		except AttributeError:
+			self.children=self.dict_class()
+		else:
 			if remove:
 				for x in lst-set(dircont):
-					del self.children[x]
-		except:
-			self.children={}
+					self.children[x].evict()
 		for name in dircont:
 			npats=accept(name,pats)
 			if npats and npats[0]:
 				accepted=[]in npats[0]
 				node=self.make_node([name])
-				isdir=os.path.isdir(node.abspath())
+				isdir=node.isdir()
 				if accepted:
 					if isdir:
 						if dir:
 							yield node
-					else:
-						if src:
-							yield node
-				if getattr(node,'cache_isdir',None)or isdir:
+					elif src:
+						yield node
+				if isdir:
 					node.cache_isdir=True
 					if maxdepth:
-						for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove):
+						for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove,quiet=quiet):
 							yield k
-		raise StopIteration
 	def ant_glob(self,*k,**kw):
 		src=kw.get('src',True)
-		dir=kw.get('dir',False)
+		dir=kw.get('dir')
 		excl=kw.get('excl',exclude_regs)
 		incl=k and k[0]or kw.get('incl','**')
-		def to_pat(s):
-			lst=Utils.to_list(s)
-			ret=[]
-			for x in lst:
-				x=x.replace('\\','/').replace('//','/')
-				if x.endswith('/'):
-					x+='**'
-				lst2=x.split('/')
-				accu=[]
-				for k in lst2:
-					if k=='**':
-						accu.append(k)
-					else:
-						k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+')
-						k='^%s$'%k
-						try:
-							accu.append(re.compile(k))
-						except Exception ,e:
-							raise Errors.WafError("Invalid pattern: %s"%k,e)
-				ret.append(accu)
-			return ret
-		def filtre(name,nn):
-			ret=[]
-			for lst in nn:
-				if not lst:
-					pass
-				elif lst[0]=='**':
-					ret.append(lst)
-					if len(lst)>1:
-						if lst[1].match(name):
-							ret.append(lst[2:])
-					else:
-						ret.append([])
-				elif lst[0].match(name):
-					ret.append(lst[1:])
-			return ret
-		def accept(name,pats):
-			nacc=filtre(name,pats[0])
-			nrej=filtre(name,pats[1])
-			if[]in nrej:
-				nacc=[]
-			return[nacc,nrej]
-		ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=25,dir=dir,src=src,remove=kw.get('remove',True))]
-		if kw.get('flat',False):
-			return' '.join([x.path_from(self)for x in ret])
-		return ret
-	def find_nodes(self,find_dirs=True,find_files=True,match_fun=lambda x:True):
-		x="""
-		Recursively finds nodes::
-
-			def configure(cnf):
-				cnf.find_nodes()
-
-		:param find_dirs: whether to return directories
-		:param find_files: whether to return files
-		:param match_fun: matching function, taking a node as parameter
-		:rtype generator
-		:return: a generator that iterates over all the requested files
-		"""
-		files=self.listdir()
-		for f in files:
-			node=self.make_node([f])
-			if os.path.isdir(node.abspath()):
-				if find_dirs and match_fun(node):
-					yield node
-				gen=node.find_nodes(find_dirs,find_files,match_fun)
-				for g in gen:
-					yield g
-			else:
-				if find_files and match_fun(node):
-					yield node
+		remove=kw.get('remove',True)
+		maxdepth=kw.get('maxdepth',25)
+		ignorecase=kw.get('ignorecase',False)
+		quiet=kw.get('quiet',False)
+		pats=(ant_matcher(incl,ignorecase),ant_matcher(excl,ignorecase))
+		if kw.get('generator'):
+			return Utils.lazy_generator(self.ant_iter,(ant_sub_matcher,maxdepth,pats,dir,src,remove,quiet))
+		it=self.ant_iter(ant_sub_matcher,maxdepth,pats,dir,src,remove,quiet)
+		if kw.get('flat'):
+			return' '.join(x.path_from(self)for x in it)
+		return list(it)
 	def is_src(self):
 		cur=self
-		x=id(self.ctx.srcnode)
-		y=id(self.ctx.bldnode)
+		x=self.ctx.srcnode
+		y=self.ctx.bldnode
 		while cur.parent:
-			if id(cur)==y:
+			if cur is y:
 				return False
-			if id(cur)==x:
+			if cur is x:
 				return True
 			cur=cur.parent
 		return False
 	def is_bld(self):
 		cur=self
-		y=id(self.ctx.bldnode)
+		y=self.ctx.bldnode
 		while cur.parent:
-			if id(cur)==y:
+			if cur is y:
 				return True
 			cur=cur.parent
 		return False
 	def get_src(self):
 		cur=self
-		x=id(self.ctx.srcnode)
-		y=id(self.ctx.bldnode)
+		x=self.ctx.srcnode
+		y=self.ctx.bldnode
 		lst=[]
 		while cur.parent:
-			if id(cur)==y:
+			if cur is y:
 				lst.reverse()
-				return self.ctx.srcnode.make_node(lst)
-			if id(cur)==x:
+				return x.make_node(lst)
+			if cur is x:
 				return self
 			lst.append(cur.name)
 			cur=cur.parent
 		return self
 	def get_bld(self):
 		cur=self
-		x=id(self.ctx.srcnode)
-		y=id(self.ctx.bldnode)
+		x=self.ctx.srcnode
+		y=self.ctx.bldnode
 		lst=[]
 		while cur.parent:
-			if id(cur)==y:
+			if cur is y:
 				return self
-			if id(cur)==x:
+			if cur is x:
 				lst.reverse()
 				return self.ctx.bldnode.make_node(lst)
 			lst.append(cur.name)
@@ -411,51 +406,25 @@ class Node(object):
 		return self.ctx.bldnode.make_node(['__root__']+lst)
 	def find_resource(self,lst):
 		if isinstance(lst,str):
-			lst=[x for x in split_path(lst)if x and x!='.']
-		node=self.get_bld().search(lst)
+			lst=[x for x in Utils.split_path(lst)if x and x!='.']
+		node=self.get_bld().search_node(lst)
 		if not node:
-			self=self.get_src()
-			node=self.find_node(lst)
-		try:
-			pat=node.abspath()
-			if os.path.isdir(pat):
-				return None
-		except:
-			pass
+			node=self.get_src().find_node(lst)
+		if node and node.isdir():
+			return None
 		return node
 	def find_or_declare(self,lst):
-		if isinstance(lst,str):
-			lst=[x for x in split_path(lst)if x and x!='.']
-		node=self.get_bld().search(lst)
-		if node:
-			if not os.path.isfile(node.abspath()):
-				node.sig=None
-				try:
-					node.parent.mkdir()
-				except:
-					pass
-			return node
-		self=self.get_src()
-		node=self.find_node(lst)
-		if node:
-			if not os.path.isfile(node.abspath()):
-				node.sig=None
-				try:
-					node.parent.mkdir()
-				except:
-					pass
-			return node
-		node=self.get_bld().make_node(lst)
+		if isinstance(lst,str)and os.path.isabs(lst):
+			node=self.ctx.root.make_node(lst)
+		else:
+			node=self.get_bld().make_node(lst)
 		node.parent.mkdir()
 		return node
 	def find_dir(self,lst):
 		if isinstance(lst,str):
-			lst=[x for x in split_path(lst)if x and x!='.']
+			lst=[x for x in Utils.split_path(lst)if x and x!='.']
 		node=self.find_node(lst)
-		try:
-			if not os.path.isdir(node.abspath()):
-				return None
-		except(OSError,AttributeError):
+		if node and not node.isdir():
 			return None
 		return node
 	def change_ext(self,ext,ext_in=None):
@@ -469,37 +438,39 @@ class Node(object):
 		else:
 			name=name[:-len(ext_in)]+ext
 		return self.parent.find_or_declare([name])
-	def nice_path(self,env=None):
-		return self.path_from(self.ctx.launch_node())
 	def bldpath(self):
 		return self.path_from(self.ctx.bldnode)
 	def srcpath(self):
 		return self.path_from(self.ctx.srcnode)
 	def relpath(self):
 		cur=self
-		x=id(self.ctx.bldnode)
+		x=self.ctx.bldnode
 		while cur.parent:
-			if id(cur)==x:
+			if cur is x:
 				return self.bldpath()
 			cur=cur.parent
 		return self.srcpath()
 	def bld_dir(self):
 		return self.parent.bldpath()
-	def bld_base(self):
-		s=os.path.splitext(self.name)[0]
-		return self.bld_dir()+os.sep+s
+	def h_file(self):
+		return Utils.h_file(self.abspath())
 	def get_bld_sig(self):
 		try:
-			ret=self.ctx.hash_cache[id(self)]
-		except KeyError:
-			pass
+			cache=self.ctx.cache_sig
 		except AttributeError:
-			self.ctx.hash_cache={}
-		else:
-			return ret
-		if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode:
-			self.sig=Utils.h_file(self.abspath())
-		self.ctx.hash_cache[id(self)]=ret=self.sig
+			cache=self.ctx.cache_sig={}
+		try:
+			ret=cache[self]
+		except KeyError:
+			p=self.abspath()
+			try:
+				ret=cache[self]=self.h_file()
+			except EnvironmentError:
+				if self.isdir():
+					st=os.stat(p)
+					ret=cache[self]=Utils.h_list([p,st.st_ino,st.st_mode])
+					return ret
+				raise
 		return ret
 pickle_lock=Utils.threading.Lock()
 class Nod3(Node):
--- pugl-0~svn32+dfsg0.orig/waflib/Options.py
+++ pugl-0~svn32+dfsg0/waflib/Options.py
@@ -1,63 +1,40 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import os,tempfile,optparse,sys,re
-from waflib import Logs,Utils,Context
-cmds='distclean configure build install clean uninstall check dist distcheck'.split()
-options={}
+from waflib import Logs,Utils,Context,Errors
+options=optparse.Values()
 commands=[]
+envvars=[]
 lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform)
-try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
-except KeyError:cache_global=''
-platform=Utils.unversioned_sys_platform()
 class opt_parser(optparse.OptionParser):
-	def __init__(self,ctx):
-		optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION))
+	def __init__(self,ctx,allow_unknown=False):
+		optparse.OptionParser.__init__(self,conflict_handler='resolve',add_help_option=False,version='%s %s (%s)'%(Context.WAFNAME,Context.WAFVERSION,Context.WAFREVISION))
 		self.formatter.width=Logs.get_term_cols()
-		p=self.add_option
 		self.ctx=ctx
-		jobs=ctx.jobs()
-		p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)
-		p('-k','--keep',dest='keep',default=0,action='count',help='keep running happily even if errors are found')
-		p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')
-		p('--nocache',dest='nocache',default=False,action='store_true',help='ignore the WAFCACHE (if set)')
-		p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')
-		gr=optparse.OptionGroup(self,'configure options')
-		self.add_option_group(gr)
-		gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')
-		gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')
-		default_prefix=os.environ.get('PREFIX')
-		if not default_prefix:
-			if platform=='win32':
-				d=tempfile.gettempdir()
-				default_prefix=d[0].upper()+d[1:]
-			else:
-				default_prefix='/usr/local/'
-		gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)
-		gr.add_option('--download',dest='download',default=False,action='store_true',help='try to download the tools if missing')
-		gr=optparse.OptionGroup(self,'build and install options')
-		self.add_option_group(gr)
-		gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')
-		gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')
-		gr=optparse.OptionGroup(self,'step options')
-		self.add_option_group(gr)
-		gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
-		default_destdir=os.environ.get('DESTDIR','')
-		gr=optparse.OptionGroup(self,'install/uninstall options')
-		self.add_option_group(gr)
-		gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
-		gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')
+		self.allow_unknown=allow_unknown
+	def _process_args(self,largs,rargs,values):
+		while rargs:
+			try:
+				optparse.OptionParser._process_args(self,largs,rargs,values)
+			except(optparse.BadOptionError,optparse.AmbiguousOptionError)as e:
+				if self.allow_unknown:
+					largs.append(e.opt_str)
+				else:
+					self.error(str(e))
+	def print_usage(self,file=None):
+		return self.print_help(file)
 	def get_usage(self):
 		cmds_str={}
 		for cls in Context.classes:
-			if not cls.cmd or cls.cmd=='options':
+			if not cls.cmd or cls.cmd=='options'or cls.cmd.startswith('_'):
 				continue
 			s=cls.__doc__ or''
 			cmds_str[cls.cmd]=s
 		if Context.g_module:
 			for(k,v)in Context.g_module.__dict__.items():
-				if k in['options','init','shutdown']:
+				if k in('options','init','shutdown'):
 					continue
 				if type(v)is type(Context.create_context):
 					if v.__doc__ and not k.startswith('_'):
@@ -68,11 +45,11 @@ class opt_parser(optparse.OptionParser):
 		lst=['  %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()]
 		lst.sort()
 		ret='\n'.join(lst)
-		return'''waf [commands] [options]
+		return'''%s [commands] [options]
 
-Main commands (example: ./waf build -j4)
+Main commands (example: ./%s build -j4)
 %s
-'''%ret
+'''%(Context.WAFNAME,Context.WAFNAME,ret)
 class OptionsContext(Context.Context):
 	cmd='options'
 	fun='options'
@@ -80,6 +57,51 @@ class OptionsContext(Context.Context):
 		super(OptionsContext,self).__init__(**kw)
 		self.parser=opt_parser(self)
 		self.option_groups={}
+		jobs=self.jobs()
+		p=self.add_option
+		color=os.environ.get('NOCOLOR','')and'no'or'auto'
+		if os.environ.get('CLICOLOR','')=='0':
+			color='no'
+		elif os.environ.get('CLICOLOR_FORCE','')=='1':
+			color='yes'
+		p('-c','--color',dest='colors',default=color,action='store',help='whether to use colors (yes/no/auto) [default: auto]',choices=('yes','no','auto'))
+		p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)
+		p('-k','--keep',dest='keep',default=0,action='count',help='continue despite errors (-kk to try harder)')
+		p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')
+		p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')
+		p('--profile',dest='profile',default=0,action='store_true',help=optparse.SUPPRESS_HELP)
+		p('--pdb',dest='pdb',default=0,action='store_true',help=optparse.SUPPRESS_HELP)
+		p('-h','--help',dest='whelp',default=0,action='store_true',help="show this help message and exit")
+		gr=self.add_option_group('Configuration options')
+		self.option_groups['configure options']=gr
+		gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')
+		gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')
+		gr.add_option('--no-lock-in-run',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_run')
+		gr.add_option('--no-lock-in-out',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_out')
+		gr.add_option('--no-lock-in-top',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_top')
+		default_prefix=getattr(Context.g_module,'default_prefix',os.environ.get('PREFIX'))
+		if not default_prefix:
+			if Utils.unversioned_sys_platform()=='win32':
+				d=tempfile.gettempdir()
+				default_prefix=d[0].upper()+d[1:]
+			else:
+				default_prefix='/usr/local/'
+		gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)
+		gr.add_option('--bindir',dest='bindir',help='bindir')
+		gr.add_option('--libdir',dest='libdir',help='libdir')
+		gr=self.add_option_group('Build and installation options')
+		self.option_groups['build and install options']=gr
+		gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')
+		gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')
+		gr=self.add_option_group('Step options')
+		self.option_groups['step options']=gr
+		gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
+		default_destdir=os.environ.get('DESTDIR','')
+		gr=self.add_option_group('Installation and uninstallation options')
+		self.option_groups['install/uninstall options']=gr
+		gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
+		gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')
+		gr.add_option('--distcheck-args',metavar='ARGS',help='arguments to pass to distcheck',default=None,action='store')
 	def jobs(self):
 		count=int(os.environ.get('JOBS',0))
 		if count<1:
@@ -94,7 +116,7 @@ class OptionsContext(Context.Context):
 				if not count and os.name not in('nt','java'):
 					try:
 						tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0)
-					except Exception:
+					except Errors.WafError:
 						pass
 					else:
 						if re.match('^[0-9]+$',tmp):
@@ -105,11 +127,11 @@ class OptionsContext(Context.Context):
 			count=1024
 		return count
 	def add_option(self,*k,**kw):
-		self.parser.add_option(*k,**kw)
+		return self.parser.add_option(*k,**kw)
 	def add_option_group(self,*k,**kw):
 		try:
 			gr=self.option_groups[k[0]]
-		except:
+		except KeyError:
 			gr=self.parser.add_option_group(*k,**kw)
 		self.option_groups[k[0]]=gr
 		return gr
@@ -121,14 +143,60 @@ class OptionsContext(Context.Context):
 				if group.title==opt_str:
 					return group
 			return None
-	def parse_args(self,_args=None):
-		global options,commands
+	def sanitize_path(self,path,cwd=None):
+		if not cwd:
+			cwd=Context.launch_dir
+		p=os.path.expanduser(path)
+		p=os.path.join(cwd,p)
+		p=os.path.normpath(p)
+		p=os.path.abspath(p)
+		return p
+	def parse_cmd_args(self,_args=None,cwd=None,allow_unknown=False):
+		self.parser.allow_unknown=allow_unknown
 		(options,leftover_args)=self.parser.parse_args(args=_args)
-		commands=leftover_args
-		if options.destdir:
-			options.destdir=os.path.abspath(os.path.expanduser(options.destdir))
+		envvars=[]
+		commands=[]
+		for arg in leftover_args:
+			if'='in arg:
+				envvars.append(arg)
+			elif arg!='options':
+				commands.append(arg)
+		if options.jobs<1:
+			options.jobs=1
+		for name in'top out destdir prefix bindir libdir'.split():
+			if getattr(options,name,None):
+				path=self.sanitize_path(getattr(options,name),cwd)
+				setattr(options,name,path)
+		return options,commands,envvars
+	def init_module_vars(self,arg_options,arg_commands,arg_envvars):
+		options.__dict__.clear()
+		del commands[:]
+		del envvars[:]
+		options.__dict__.update(arg_options.__dict__)
+		commands.extend(arg_commands)
+		envvars.extend(arg_envvars)
+		for var in envvars:
+			(name,value)=var.split('=',1)
+			os.environ[name.strip()]=value
+	def init_logs(self,options,commands,envvars):
+		Logs.verbose=options.verbose
 		if options.verbose>=1:
 			self.load('errcheck')
+		colors={'yes':2,'auto':1,'no':0}[options.colors]
+		Logs.enable_colors(colors)
+		if options.zones:
+			Logs.zones=options.zones.split(',')
+			if not Logs.verbose:
+				Logs.verbose=1
+		elif Logs.verbose>0:
+			Logs.zones=['runner']
+		if Logs.verbose>2:
+			Logs.zones=['*']
+	def parse_args(self,_args=None):
+		options,commands,envvars=self.parse_cmd_args()
+		self.init_logs(options,commands,envvars)
+		self.init_module_vars(options,commands,envvars)
 	def execute(self):
 		super(OptionsContext,self).execute()
 		self.parse_args()
+		Utils.alloc_process_pool(options.jobs)
--- pugl-0~svn32+dfsg0.orig/waflib/Runner.py
+++ pugl-0~svn32+dfsg0/waflib/Runner.py
@@ -1,148 +1,274 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import random,atexit
+import heapq,traceback
 try:
-	from queue import Queue
-except:
+	from queue import Queue,PriorityQueue
+except ImportError:
 	from Queue import Queue
+	try:
+		from Queue import PriorityQueue
+	except ImportError:
+		class PriorityQueue(Queue):
+			def _init(self,maxsize):
+				self.maxsize=maxsize
+				self.queue=[]
+			def _put(self,item):
+				heapq.heappush(self.queue,item)
+			def _get(self):
+				return heapq.heappop(self.queue)
 from waflib import Utils,Task,Errors,Logs
-GAP=10
-class TaskConsumer(Utils.threading.Thread):
+GAP=5
+class PriorityTasks(object):
 	def __init__(self):
+		self.lst=[]
+	def __len__(self):
+		return len(self.lst)
+	def __iter__(self):
+		return iter(self.lst)
+	def __str__(self):
+		return'PriorityTasks: [%s]'%'\n  '.join(str(x)for x in self.lst)
+	def clear(self):
+		self.lst=[]
+	def append(self,task):
+		heapq.heappush(self.lst,task)
+	def appendleft(self,task):
+		heapq.heappush(self.lst,task)
+	def pop(self):
+		return heapq.heappop(self.lst)
+	def extend(self,lst):
+		if self.lst:
+			for x in lst:
+				self.append(x)
+		else:
+			if isinstance(lst,list):
+				self.lst=lst
+				heapq.heapify(lst)
+			else:
+				self.lst=lst.lst
+class Consumer(Utils.threading.Thread):
+	def __init__(self,spawner,task):
+		Utils.threading.Thread.__init__(self)
+		self.task=task
+		self.spawner=spawner
+		self.setDaemon(1)
+		self.start()
+	def run(self):
+		try:
+			if not self.spawner.master.stop:
+				self.spawner.master.process_task(self.task)
+		finally:
+			self.spawner.sem.release()
+			self.spawner.master.out.put(self.task)
+			self.task=None
+			self.spawner=None
+class Spawner(Utils.threading.Thread):
+	def __init__(self,master):
 		Utils.threading.Thread.__init__(self)
-		self.ready=Queue()
+		self.master=master
+		self.sem=Utils.threading.Semaphore(master.numjobs)
 		self.setDaemon(1)
 		self.start()
 	def run(self):
 		try:
 			self.loop()
-		except:
+		except Exception:
 			pass
 	def loop(self):
+		master=self.master
 		while 1:
-			tsk=self.ready.get()
-			if not isinstance(tsk,Task.TaskBase):
-				tsk(self)
-			else:
-				tsk.process()
-pool=Queue()
-def get_pool():
-	try:
-		return pool.get(False)
-	except:
-		return TaskConsumer()
-def put_pool(x):
-	pool.put(x)
-def _free_resources():
-	global pool
-	lst=[]
-	while pool.qsize():
-		lst.append(pool.get())
-	for x in lst:
-		x.ready.put(None)
-	for x in lst:
-		x.join()
-	pool=None
-atexit.register(_free_resources)
+			task=master.ready.get()
+			self.sem.acquire()
+			if not master.stop:
+				task.log_display(task.generator.bld)
+			Consumer(self,task)
 class Parallel(object):
 	def __init__(self,bld,j=2):
 		self.numjobs=j
 		self.bld=bld
-		self.outstanding=[]
-		self.frozen=[]
+		self.outstanding=PriorityTasks()
+		self.postponed=PriorityTasks()
+		self.incomplete=set()
+		self.ready=PriorityQueue(0)
 		self.out=Queue(0)
 		self.count=0
-		self.processed=1
+		self.processed=0
 		self.stop=False
 		self.error=[]
 		self.biter=None
 		self.dirty=False
+		self.revdeps=Utils.defaultdict(set)
+		self.spawner=None
+		if self.numjobs>1:
+			self.spawner=Spawner(self)
 	def get_next_task(self):
 		if not self.outstanding:
 			return None
-		return self.outstanding.pop(0)
+		return self.outstanding.pop()
 	def postpone(self,tsk):
-		if random.randint(0,1):
-			self.frozen.insert(0,tsk)
-		else:
-			self.frozen.append(tsk)
+		self.postponed.append(tsk)
 	def refill_task_list(self):
 		while self.count>self.numjobs*GAP:
 			self.get_out()
 		while not self.outstanding:
 			if self.count:
 				self.get_out()
-			elif self.frozen:
+				if self.outstanding:
+					break
+			elif self.postponed:
 				try:
 					cond=self.deadlock==self.processed
-				except:
+				except AttributeError:
 					pass
 				else:
 					if cond:
-						msg='check the build order for the tasks'
-						for tsk in self.frozen:
-							if not tsk.run_after:
-								msg='check the methods runnable_status'
-								break
 						lst=[]
-						for tsk in self.frozen:
-							lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after]))
-						raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst)))
+						for tsk in self.postponed:
+							deps=[id(x)for x in tsk.run_after if not x.hasrun]
+							lst.append('%s\t-> %r'%(repr(tsk),deps))
+							if not deps:
+								lst.append('\n  task %r dependencies are done, check its *runnable_status*?'%id(tsk))
+						raise Errors.WafError('Deadlock detected: check the task build order%s'%''.join(lst))
 				self.deadlock=self.processed
-			if self.frozen:
-				self.outstanding+=self.frozen
-				self.frozen=[]
+			if self.postponed:
+				self.outstanding.extend(self.postponed)
+				self.postponed.clear()
 			elif not self.count:
-				self.outstanding.extend(self.biter.next())
-				self.total=self.bld.total()
-				break
+				if self.incomplete:
+					for x in self.incomplete:
+						for k in x.run_after:
+							if not k.hasrun:
+								break
+						else:
+							self.incomplete.remove(x)
+							self.outstanding.append(x)
+							break
+					else:
+						if self.stop or self.error:
+							break
+						raise Errors.WafError('Broken revdeps detected on %r'%self.incomplete)
+				else:
+					tasks=next(self.biter)
+					ready,waiting=self.prio_and_split(tasks)
+					self.outstanding.extend(ready)
+					self.incomplete.update(waiting)
+					self.total=self.bld.total()
+					break
 	def add_more_tasks(self,tsk):
 		if getattr(tsk,'more_tasks',None):
-			self.outstanding+=tsk.more_tasks
+			more=set(tsk.more_tasks)
+			groups_done=set()
+			def iteri(a,b):
+				for x in a:
+					yield x
+				for x in b:
+					yield x
+			for x in iteri(self.outstanding,self.incomplete):
+				for k in x.run_after:
+					if isinstance(k,Task.TaskGroup):
+						if k not in groups_done:
+							groups_done.add(k)
+							for j in k.prev&more:
+								self.revdeps[j].add(k)
+					elif k in more:
+						self.revdeps[k].add(x)
+			ready,waiting=self.prio_and_split(tsk.more_tasks)
+			self.outstanding.extend(ready)
+			self.incomplete.update(waiting)
 			self.total+=len(tsk.more_tasks)
+	def mark_finished(self,tsk):
+		def try_unfreeze(x):
+			if x in self.incomplete:
+				for k in x.run_after:
+					if not k.hasrun:
+						break
+				else:
+					self.incomplete.remove(x)
+					self.outstanding.append(x)
+		if tsk in self.revdeps:
+			for x in self.revdeps[tsk]:
+				if isinstance(x,Task.TaskGroup):
+					x.prev.remove(tsk)
+					if not x.prev:
+						for k in x.next:
+							k.run_after.remove(x)
+							try_unfreeze(k)
+						x.next=[]
+				else:
+					try_unfreeze(x)
+			del self.revdeps[tsk]
+		if hasattr(tsk,'semaphore'):
+			sem=tsk.semaphore
+			try:
+				sem.release(tsk)
+			except KeyError:
+				pass
+			else:
+				while sem.waiting and not sem.is_locked():
+					x=sem.waiting.pop()
+					self._add_task(x)
 	def get_out(self):
 		tsk=self.out.get()
 		if not self.stop:
 			self.add_more_tasks(tsk)
+		self.mark_finished(tsk)
 		self.count-=1
 		self.dirty=True
 		return tsk
+	def add_task(self,tsk):
+		self.ready.put(tsk)
+	def _add_task(self,tsk):
+		if hasattr(tsk,'semaphore'):
+			sem=tsk.semaphore
+			try:
+				sem.acquire(tsk)
+			except IndexError:
+				sem.waiting.add(tsk)
+				return
+		self.count+=1
+		self.processed+=1
+		if self.numjobs==1:
+			tsk.log_display(tsk.generator.bld)
+			try:
+				self.process_task(tsk)
+			finally:
+				self.out.put(tsk)
+		else:
+			self.add_task(tsk)
+	def process_task(self,tsk):
+		tsk.process()
+		if tsk.hasrun!=Task.SUCCESS:
+			self.error_handler(tsk)
+	def skip(self,tsk):
+		tsk.hasrun=Task.SKIPPED
+		self.mark_finished(tsk)
+	def cancel(self,tsk):
+		tsk.hasrun=Task.CANCELED
+		self.mark_finished(tsk)
 	def error_handler(self,tsk):
 		if not self.bld.keep:
 			self.stop=True
 		self.error.append(tsk)
-	def add_task(self,tsk):
-		try:
-			self.pool
-		except AttributeError:
-			self.init_task_pool()
-		self.ready.put(tsk)
-	def init_task_pool(self):
-		pool=self.pool=[get_pool()for i in range(self.numjobs)]
-		self.ready=Queue(0)
-		def setq(consumer):
-			consumer.ready=self.ready
-		for x in pool:
-			x.ready.put(setq)
-		return pool
-	def free_task_pool(self):
-		def setq(consumer):
-			consumer.ready=Queue(0)
-			self.out.put(self)
+	def task_status(self,tsk):
 		try:
-			pool=self.pool
-		except:
-			pass
-		else:
-			for x in pool:
-				self.ready.put(setq)
-			for x in pool:
-				self.get_out()
-			for x in pool:
-				put_pool(x)
-			self.pool=[]
+			return tsk.runnable_status()
+		except Exception:
+			self.processed+=1
+			tsk.err_msg=traceback.format_exc()
+			if not self.stop and self.bld.keep:
+				self.skip(tsk)
+				if self.bld.keep==1:
+					if Logs.verbose>1 or not self.error:
+						self.error.append(tsk)
+					self.stop=True
+				else:
+					if Logs.verbose>1:
+						self.error.append(tsk)
+				return Task.EXCEPTION
+			tsk.hasrun=Task.EXCEPTION
+			self.error_handler(tsk)
+			return Task.EXCEPTION
 	def start(self):
 		self.total=self.bld.total()
 		while not self.stop:
@@ -158,40 +284,92 @@ class Parallel(object):
 				continue
 			if self.stop:
 				break
-			try:
-				st=tsk.runnable_status()
-			except Exception:
-				self.processed+=1
-				tsk.err_msg=Utils.ex_stack()
-				if not self.stop and self.bld.keep:
-					tsk.hasrun=Task.SKIPPED
-					if self.bld.keep==1:
-						if Logs.verbose>1 or not self.error:
-							self.error.append(tsk)
-						self.stop=True
-					else:
-						if Logs.verbose>1:
-							self.error.append(tsk)
-					continue
-				tsk.hasrun=Task.EXCEPTION
-				self.error_handler(tsk)
-				continue
-			if st==Task.ASK_LATER:
+			st=self.task_status(tsk)
+			if st==Task.RUN_ME:
+				self._add_task(tsk)
+			elif st==Task.ASK_LATER:
 				self.postpone(tsk)
 			elif st==Task.SKIP_ME:
 				self.processed+=1
-				tsk.hasrun=Task.SKIPPED
+				self.skip(tsk)
 				self.add_more_tasks(tsk)
-			else:
-				tsk.position=(self.processed,self.total)
-				self.count+=1
-				tsk.master=self
+			elif st==Task.CANCEL_ME:
+				if Logs.verbose>1:
+					self.error.append(tsk)
 				self.processed+=1
-				if self.numjobs==1:
-					tsk.process()
-				else:
-					self.add_task(tsk)
+				self.cancel(tsk)
 		while self.error and self.count:
 			self.get_out()
-		assert(self.count==0 or self.stop)
-		self.free_task_pool()
+		self.ready.put(None)
+		if not self.stop:
+			assert not self.count
+			assert not self.postponed
+			assert not self.incomplete
+	def prio_and_split(self,tasks):
+		for x in tasks:
+			x.visited=0
+		reverse=self.revdeps
+		groups_done=set()
+		for x in tasks:
+			for k in x.run_after:
+				if isinstance(k,Task.TaskGroup):
+					if k not in groups_done:
+						groups_done.add(k)
+						for j in k.prev:
+							reverse[j].add(k)
+				else:
+					reverse[k].add(x)
+		def visit(n):
+			if isinstance(n,Task.TaskGroup):
+				return sum(visit(k)for k in n.next)
+			if n.visited==0:
+				n.visited=1
+				if n in reverse:
+					rev=reverse[n]
+					n.prio_order=n.tree_weight+len(rev)+sum(visit(k)for k in rev)
+				else:
+					n.prio_order=n.tree_weight
+				n.visited=2
+			elif n.visited==1:
+				raise Errors.WafError('Dependency cycle found!')
+			return n.prio_order
+		for x in tasks:
+			if x.visited!=0:
+				continue
+			try:
+				visit(x)
+			except Errors.WafError:
+				self.debug_cycles(tasks,reverse)
+		ready=[]
+		waiting=[]
+		for x in tasks:
+			for k in x.run_after:
+				if not k.hasrun:
+					waiting.append(x)
+					break
+			else:
+				ready.append(x)
+		return(ready,waiting)
+	def debug_cycles(self,tasks,reverse):
+		tmp={}
+		for x in tasks:
+			tmp[x]=0
+		def visit(n,acc):
+			if isinstance(n,Task.TaskGroup):
+				for k in n.next:
+					visit(k,acc)
+				return
+			if tmp[n]==0:
+				tmp[n]=1
+				for k in reverse.get(n,[]):
+					visit(k,[n]+acc)
+				tmp[n]=2
+			elif tmp[n]==1:
+				lst=[]
+				for tsk in acc:
+					lst.append(repr(tsk))
+					if tsk is n:
+						break
+				raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s'%''.join(lst))
+		for x in tasks:
+			visit(x,[])
--- pugl-0~svn32+dfsg0.orig/waflib/Scripting.py
+++ pugl-0~svn32+dfsg0/waflib/Scripting.py
@@ -1,8 +1,9 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,shutil,traceback,errno,sys,stat
+from __future__ import with_statement
+import os,shlex,shutil,traceback,errno,sys,stat
 from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node
 build_dir_override=None
 no_climb_commands=['configure']
@@ -10,34 +11,50 @@ default_cmd="build"
 def waf_entry_point(current_directory,version,wafdir):
 	Logs.init_log()
 	if Context.WAFVERSION!=version:
-		Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir))
+		Logs.error('Waf script %r and library %r do not match (directory %r)',version,Context.WAFVERSION,wafdir)
 		sys.exit(1)
-	if'--version'in sys.argv:
-		Context.run_dir=current_directory
-		ctx=Context.create_context('options')
-		ctx.curdir=current_directory
-		ctx.parse_args()
-		sys.exit(0)
 	Context.waf_dir=wafdir
-	Context.launch_dir=current_directory
-	no_climb=os.environ.get('NOCLIMB',None)
+	Context.run_dir=Context.launch_dir=current_directory
+	start_dir=current_directory
+	no_climb=os.environ.get('NOCLIMB')
+	if len(sys.argv)>1:
+		potential_wscript=os.path.join(current_directory,sys.argv[1])
+		if os.path.basename(potential_wscript)==Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
+			path=os.path.normpath(os.path.dirname(potential_wscript))
+			start_dir=os.path.abspath(path)
+			no_climb=True
+			sys.argv.pop(1)
+	ctx=Context.create_context('options')
+	(options,commands,env)=ctx.parse_cmd_args(allow_unknown=True)
+	if options.top:
+		start_dir=Context.run_dir=Context.top_dir=options.top
+		no_climb=True
+	if options.out:
+		Context.out_dir=options.out
 	if not no_climb:
 		for k in no_climb_commands:
-			if k in sys.argv:
-				no_climb=True
-				break
-	cur=current_directory
+			for y in commands:
+				if y.startswith(k):
+					no_climb=True
+					break
+	cur=start_dir
 	while cur:
-		lst=os.listdir(cur)
+		try:
+			lst=os.listdir(cur)
+		except OSError:
+			lst=[]
+			Logs.error('Directory %r is unreadable!',cur)
 		if Options.lockfile in lst:
 			env=ConfigSet.ConfigSet()
 			try:
 				env.load(os.path.join(cur,Options.lockfile))
 				ino=os.stat(cur)[stat.ST_INO]
-			except Exception:
+			except EnvironmentError:
 				pass
 			else:
-				for x in[env.run_dir,env.top_dir,env.out_dir]:
+				for x in(env.run_dir,env.top_dir,env.out_dir):
+					if not x:
+						continue
 					if Utils.is_win32:
 						if cur==x:
 							load=True
@@ -45,14 +62,14 @@ def waf_entry_point(current_directory,ve
 					else:
 						try:
 							ino2=os.stat(x)[stat.ST_INO]
-						except:
+						except OSError:
 							pass
 						else:
 							if ino==ino2:
 								load=True
 								break
 				else:
-					Logs.warn('invalid lock file in %s'%cur)
+					Logs.warn('invalid lock file in %s',cur)
 					load=False
 				if load:
 					Context.run_dir=env.run_dir
@@ -68,44 +85,59 @@ def waf_entry_point(current_directory,ve
 		cur=next
 		if no_climb:
 			break
-	if not Context.run_dir:
-		if'-h'in sys.argv or'--help'in sys.argv:
-			Logs.warn('No wscript file found: the help message may be incomplete')
-			Context.run_dir=current_directory
-			ctx=Context.create_context('options')
-			ctx.curdir=current_directory
-			ctx.parse_args()
+	wscript=os.path.normpath(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))
+	if not os.path.exists(wscript):
+		if options.whelp:
+			Logs.warn('These are the generic options (no wscript/project found)')
+			ctx.parser.print_help()
 			sys.exit(0)
-		Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE)
+		Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)',Context.WSCRIPT_FILE)
 		sys.exit(1)
 	try:
 		os.chdir(Context.run_dir)
 	except OSError:
-		Logs.error('Waf: The folder %r is unreadable'%Context.run_dir)
+		Logs.error('Waf: The folder %r is unreadable',Context.run_dir)
 		sys.exit(1)
 	try:
-		set_main_module(Context.run_dir+os.sep+Context.WSCRIPT_FILE)
-	except Errors.WafError ,e:
+		set_main_module(wscript)
+	except Errors.WafError as e:
 		Logs.pprint('RED',e.verbose_msg)
 		Logs.error(str(e))
 		sys.exit(1)
-	except Exception ,e:
-		Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e)
+	except Exception as e:
+		Logs.error('Waf: The wscript in %r is unreadable',Context.run_dir)
 		traceback.print_exc(file=sys.stdout)
 		sys.exit(2)
-	try:
-		run_commands()
-	except Errors.WafError ,e:
-		if Logs.verbose>1:
-			Logs.pprint('RED',e.verbose_msg)
-		Logs.error(e.msg)
-		sys.exit(1)
-	except Exception ,e:
-		traceback.print_exc(file=sys.stdout)
-		sys.exit(2)
-	except KeyboardInterrupt:
-		Logs.pprint('RED','Interrupted')
-		sys.exit(68)
+	if options.profile:
+		import cProfile,pstats
+		cProfile.runctx('from waflib import Scripting; Scripting.run_commands()',{},{},'profi.txt')
+		p=pstats.Stats('profi.txt')
+		p.sort_stats('time').print_stats(75)
+	else:
+		try:
+			try:
+				run_commands()
+			except:
+				if options.pdb:
+					import pdb
+					type,value,tb=sys.exc_info()
+					traceback.print_exc()
+					pdb.post_mortem(tb)
+				else:
+					raise
+		except Errors.WafError as e:
+			if Logs.verbose>1:
+				Logs.pprint('RED',e.verbose_msg)
+			Logs.error(e.msg)
+			sys.exit(1)
+		except SystemExit:
+			raise
+		except Exception as e:
+			traceback.print_exc(file=sys.stdout)
+			sys.exit(2)
+		except KeyboardInterrupt:
+			Logs.pprint('RED','Interrupted')
+			sys.exit(68)
 def set_main_module(file_path):
 	Context.g_module=Context.load_module(file_path)
 	Context.g_module.root_path=file_path
@@ -113,7 +145,7 @@ def set_main_module(file_path):
 		name=obj.__name__
 		if not name in Context.g_module.__dict__:
 			setattr(Context.g_module,name,obj)
-	for k in[update,dist,distclean,distcheck,update]:
+	for k in(dist,distclean,distcheck):
 		set_def(k)
 	if not'init'in Context.g_module.__dict__:
 		Context.g_module.init=Utils.nada
@@ -122,89 +154,97 @@ def set_main_module(file_path):
 	if not'options'in Context.g_module.__dict__:
 		Context.g_module.options=Utils.nada
 def parse_options():
-	Context.create_context('options').execute()
+	ctx=Context.create_context('options')
+	ctx.execute()
 	if not Options.commands:
-		Options.commands=[default_cmd]
-	Options.commands=[x for x in Options.commands if x!='options']
-	Logs.verbose=Options.options.verbose
-	Logs.init_log()
-	if Options.options.zones:
-		Logs.zones=Options.options.zones.split(',')
-		if not Logs.verbose:
-			Logs.verbose=1
-	elif Logs.verbose>0:
-		Logs.zones=['runner']
-	if Logs.verbose>2:
-		Logs.zones=['*']
+		if isinstance(default_cmd,list):
+			Options.commands.extend(default_cmd)
+		else:
+			Options.commands.append(default_cmd)
+	if Options.options.whelp:
+		ctx.parser.print_help()
+		sys.exit(0)
 def run_command(cmd_name):
 	ctx=Context.create_context(cmd_name)
+	ctx.log_timer=Utils.Timer()
 	ctx.options=Options.options
 	ctx.cmd=cmd_name
-	ctx.execute()
+	try:
+		ctx.execute()
+	finally:
+		ctx.finalize()
 	return ctx
 def run_commands():
 	parse_options()
 	run_command('init')
 	while Options.commands:
 		cmd_name=Options.commands.pop(0)
-		timer=Utils.Timer()
-		run_command(cmd_name)
-		if not Options.options.progress_bar:
-			elapsed=' (%s)'%str(timer)
-			Logs.info('%r finished successfully%s'%(cmd_name,elapsed))
+		ctx=run_command(cmd_name)
+		Logs.info('%r finished successfully (%s)',cmd_name,ctx.log_timer)
 	run_command('shutdown')
-def _can_distclean(name):
-	for k in'.o .moc .exe'.split():
-		if name.endswith(k):
-			return True
-	return False
 def distclean_dir(dirname):
 	for(root,dirs,files)in os.walk(dirname):
 		for f in files:
-			if _can_distclean(f):
-				fname=root+os.sep+f
+			if f.endswith(('.o','.moc','.exe')):
+				fname=os.path.join(root,f)
 				try:
-					os.unlink(fname)
-				except:
-					Logs.warn('could not remove %r'%fname)
-	for x in[Context.DBFILE,'config.log']:
+					os.remove(fname)
+				except OSError:
+					Logs.warn('Could not remove %r',fname)
+	for x in(Context.DBFILE,'config.log'):
 		try:
-			os.unlink(x)
-		except:
+			os.remove(x)
+		except OSError:
 			pass
 	try:
-		shutil.rmtree('c4che')
-	except:
+		shutil.rmtree(Build.CACHE_DIR)
+	except OSError:
 		pass
 def distclean(ctx):
-	'''removes the build directory'''
-	lst=os.listdir('.')
-	for f in lst:
-		if f==Options.lockfile:
-			try:
-				proj=ConfigSet.ConfigSet(f)
-			except:
-				Logs.warn('could not read %r'%f)
-				continue
-			if proj['out_dir']!=proj['top_dir']:
-				try:
-					shutil.rmtree(proj['out_dir'])
-				except IOError:
-					pass
-				except OSError ,e:
-					if e.errno!=errno.ENOENT:
-						Logs.warn('project %r cannot be removed'%proj[Context.OUT])
-			else:
-				distclean_dir(proj['out_dir'])
-			for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']):
-				try:
-					os.remove(os.path.join(k,Options.lockfile))
-				except OSError ,e:
-					if e.errno!=errno.ENOENT:
-						Logs.warn('file %r cannot be removed'%f)
-		if f.startswith('.waf')and not Options.commands:
-			shutil.rmtree(f,ignore_errors=True)
+	'''removes build folders and data'''
+	def remove_and_log(k,fun):
+		try:
+			fun(k)
+		except EnvironmentError as e:
+			if e.errno!=errno.ENOENT:
+				Logs.warn('Could not remove %r',k)
+	if not Options.commands:
+		for k in os.listdir('.'):
+			for x in'.waf-2 waf-2 .waf3-2 waf3-2'.split():
+				if k.startswith(x):
+					remove_and_log(k,shutil.rmtree)
+	cur='.'
+	if ctx.options.no_lock_in_top:
+		cur=ctx.options.out
+	try:
+		lst=os.listdir(cur)
+	except OSError:
+		Logs.warn('Could not read %r',cur)
+		return
+	if Options.lockfile in lst:
+		f=os.path.join(cur,Options.lockfile)
+		try:
+			env=ConfigSet.ConfigSet(f)
+		except EnvironmentError:
+			Logs.warn('Could not read %r',f)
+			return
+		if not env.out_dir or not env.top_dir:
+			Logs.warn('Invalid lock file %r',f)
+			return
+		if env.out_dir==env.top_dir:
+			distclean_dir(env.out_dir)
+		else:
+			remove_and_log(env.out_dir,shutil.rmtree)
+		env_dirs=[env.out_dir]
+		if not ctx.options.no_lock_in_top:
+			env_dirs.append(env.top_dir)
+		if not ctx.options.no_lock_in_run:
+			env_dirs.append(env.run_dir)
+		for k in env_dirs:
+			p=os.path.join(k,Options.lockfile)
+			remove_and_log(p,os.remove)
 class Dist(Context.Context):
+	'''creates an archive containing the project source code'''
 	cmd='dist'
 	fun='dist'
 	algo='tar.bz2'
@@ -217,37 +257,35 @@ class Dist(Context.Context):
 		arch_name=self.get_arch_name()
 		try:
 			self.base_path
-		except:
+		except AttributeError:
 			self.base_path=self.path
 		node=self.base_path.make_node(arch_name)
 		try:
 			node.delete()
-		except:
+		except OSError:
 			pass
 		files=self.get_files()
 		if self.algo.startswith('tar.'):
-			tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.',''))
+			tar=tarfile.open(node.abspath(),'w:'+self.algo.replace('tar.',''))
 			for x in files:
 				self.add_tar_file(x,tar)
 			tar.close()
 		elif self.algo=='zip':
 			import zipfile
-			zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED)
+			zip=zipfile.ZipFile(node.abspath(),'w',compression=zipfile.ZIP_DEFLATED)
 			for x in files:
 				archive_name=self.get_base_name()+'/'+x.path_from(self.base_path)
 				zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED)
 			zip.close()
 		else:
-			self.fatal('Valid algo types are tar.bz2, tar.gz or zip')
+			self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
 		try:
-			from hashlib import sha1 as sha
+			from hashlib import sha256
 		except ImportError:
-			from sha import sha
-		try:
-			digest=" (sha=%r)"%sha(node.read()).hexdigest()
-		except:
 			digest=''
-		Logs.info('New archive created: %s%s'%(self.arch_name,digest))
+		else:
+			digest=' (sha256=%r)'%sha256(node.read(flags='rb')).hexdigest()
+		Logs.info('New archive created: %s%s',self.arch_name,digest)
 	def get_tar_path(self,node):
 		return node.abspath()
 	def add_tar_file(self,x,tar):
@@ -257,28 +295,26 @@ class Dist(Context.Context):
 		tinfo.gid=0
 		tinfo.uname='root'
 		tinfo.gname='root'
-		fu=None
-		try:
-			fu=open(p,'rb')
-			tar.addfile(tinfo,fileobj=fu)
-		finally:
-			if fu:
-				fu.close()
+		if os.path.isfile(p):
+			with open(p,'rb')as f:
+				tar.addfile(tinfo,fileobj=f)
+		else:
+			tar.addfile(tinfo)
 	def get_tar_prefix(self):
 		try:
 			return self.tar_prefix
-		except:
+		except AttributeError:
 			return self.get_base_name()
 	def get_arch_name(self):
 		try:
 			self.arch_name
-		except:
+		except AttributeError:
 			self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo)
 		return self.arch_name
 	def get_base_name(self):
 		try:
 			self.base_name
-		except:
+		except AttributeError:
 			appname=getattr(Context.g_module,Context.APPNAME,'noname')
 			version=getattr(Context.g_module,Context.VERSION,'1.0')
 			self.base_name=appname+'-'+version
@@ -286,16 +322,17 @@ class Dist(Context.Context):
 	def get_excl(self):
 		try:
 			return self.excl
-		except:
-			self.excl=Node.exclude_regs+' **/waf-1.6.* **/.waf-1.6* **/waf3-1.6.* **/.waf3-1.6* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
-			nd=self.root.find_node(Context.out_dir)
-			if nd:
-				self.excl+=' '+nd.path_from(self.base_path)
+		except AttributeError:
+			self.excl=Node.exclude_regs+' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+			if Context.out_dir:
+				nd=self.root.find_node(Context.out_dir)
+				if nd:
+					self.excl+=' '+nd.path_from(self.base_path)
 			return self.excl
 	def get_files(self):
 		try:
 			files=self.files
-		except:
+		except AttributeError:
 			files=self.base_path.ant_glob('**/*',excl=self.get_excl())
 		return files
 def dist(ctx):
@@ -308,37 +345,30 @@ class DistCheck(Dist):
 		self.recurse([os.path.dirname(Context.g_module.root_path)])
 		self.archive()
 		self.check()
+	def make_distcheck_cmd(self,tmpdir):
+		cfg=[]
+		if Options.options.distcheck_args:
+			cfg=shlex.split(Options.options.distcheck_args)
+		else:
+			cfg=[x for x in sys.argv if x.startswith('-')]
+		cmd=[sys.executable,sys.argv[0],'configure','build','install','uninstall','--destdir='+tmpdir]+cfg
+		return cmd
 	def check(self):
 		import tempfile,tarfile
-		t=None
-		try:
-			t=tarfile.open(self.get_arch_name())
+		with tarfile.open(self.get_arch_name())as t:
 			for x in t:
 				t.extract(x)
-		finally:
-			if t:
-				t.close()
 		instdir=tempfile.mkdtemp('.inst',self.get_base_name())
-		ret=Utils.subprocess.Popen([sys.argv[0],'configure','install','uninstall','--destdir='+instdir],cwd=self.get_base_name()).wait()
+		cmd=self.make_distcheck_cmd(instdir)
+		ret=Utils.subprocess.Popen(cmd,cwd=self.get_base_name()).wait()
 		if ret:
-			raise Errors.WafError('distcheck failed with code %i'%ret)
+			raise Errors.WafError('distcheck failed with code %r'%ret)
 		if os.path.exists(instdir):
 			raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir)
 		shutil.rmtree(self.get_base_name())
 def distcheck(ctx):
 	'''checks if the project compiles (tarball from 'dist')'''
 	pass
-def update(ctx):
-	'''updates the plugins from the *waflib/extras* directory'''
-	lst=Options.options.files.split(',')
-	if not lst:
-		lst=[x for x in Utils.listdir(Context.waf_dir+'/waflib/extras')if x.endswith('.py')]
-	for x in lst:
-		tool=x.replace('.py','')
-		try:
-			Configure.download_tool(tool,force=True,ctx=ctx)
-		except Errors.WafError:
-			Logs.error('Could not find the tool %s in the remote repository'%x)
 def autoconfigure(execute_method):
 	def execute(self):
 		if not Configure.autoconfig:
@@ -347,7 +377,7 @@ def autoconfigure(execute_method):
 		do_config=False
 		try:
 			env.load(os.path.join(Context.top_dir,Options.lockfile))
-		except Exception:
+		except EnvironmentError:
 			Logs.warn('Configuring the project')
 			do_config=True
 		else:
@@ -355,13 +385,31 @@ def autoconfigure(execute_method):
 				do_config=True
 			else:
 				h=0
-				for f in env['files']:
-					h=hash((h,Utils.readf(f,'rb')))
-				do_config=h!=env.hash
+				for f in env.files:
+					try:
+						h=Utils.h_list((h,Utils.readf(f,'rb')))
+					except EnvironmentError:
+						do_config=True
+						break
+				else:
+					do_config=h!=env.hash
 		if do_config:
-			Options.commands.insert(0,self.cmd)
-			Options.commands.insert(0,'configure')
-			return
-		return execute_method(self)
+			cmd=env.config_cmd or'configure'
+			if Configure.autoconfig=='clobber':
+				tmp=Options.options.__dict__
+				launch_dir_tmp=Context.launch_dir
+				if env.options:
+					Options.options.__dict__=env.options
+				Context.launch_dir=env.launch_dir
+				try:
+					run_command(cmd)
+				finally:
+					Options.options.__dict__=tmp
+					Context.launch_dir=launch_dir_tmp
+			else:
+				run_command(cmd)
+			run_command(self.cmd)
+		else:
+			return execute_method(self)
 	return execute
 Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute)
--- pugl-0~svn32+dfsg0.orig/waflib/Task.py
+++ pugl-0~svn32+dfsg0/waflib/Task.py
@@ -1,167 +1,226 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-import os,shutil,re,tempfile
+import os,re,sys,tempfile,traceback
 from waflib import Utils,Logs,Errors
 NOT_RUN=0
 MISSING=1
 CRASHED=2
 EXCEPTION=3
+CANCELED=4
 SKIPPED=8
 SUCCESS=9
 ASK_LATER=-1
 SKIP_ME=-2
 RUN_ME=-3
+CANCEL_ME=-4
 COMPILE_TEMPLATE_SHELL='''
 def f(tsk):
 	env = tsk.env
 	gen = tsk.generator
 	bld = gen.bld
-	wd = getattr(tsk, 'cwd', None)
+	cwdx = tsk.get_cwd()
 	p = env.get_flat
+	def to_list(xx):
+		if isinstance(xx, str): return [xx]
+		return xx
 	tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
-	return tsk.exec_command(cmd, cwd=wd, env=env.env or None)
+	return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None)
 '''
 COMPILE_TEMPLATE_NOSHELL='''
 def f(tsk):
 	env = tsk.env
 	gen = tsk.generator
 	bld = gen.bld
-	wd = getattr(tsk, 'cwd', None)
+	cwdx = tsk.get_cwd()
 	def to_list(xx):
 		if isinstance(xx, str): return [xx]
 		return xx
-	tsk.last_cmd = lst = []
+	def merge(lst1, lst2):
+		if lst1 and lst2:
+			return lst1[:-1] + [lst1[-1] + lst2[0]] + lst2[1:]
+		return lst1 + lst2
+	lst = []
 	%s
-	lst = [x for x in lst if x]
-	return tsk.exec_command(lst, cwd=wd, env=env.env or None)
+	if '' in lst:
+		lst = [x for x in lst if x]
+	tsk.last_cmd = lst
+	return tsk.exec_command(lst, cwd=cwdx, env=env.env or None)
+'''
+COMPILE_TEMPLATE_SIG_VARS='''
+def f(tsk):
+	sig = tsk.generator.bld.hash_env_vars(tsk.env, tsk.vars)
+	tsk.m.update(sig)
+	env = tsk.env
+	gen = tsk.generator
+	bld = gen.bld
+	cwdx = tsk.get_cwd()
+	p = env.get_flat
+	buf = []
+	%s
+	tsk.m.update(repr(buf).encode())
 '''
-def cache_outputs(cls):
-	m1=cls.run
-	def run(self):
-		bld=self.generator.bld
-		if bld.cache_global and not bld.nocache:
-			if self.can_retrieve_cache():
-				return 0
-		return m1(self)
-	cls.run=run
-	m2=cls.post_run
-	def post_run(self):
-		bld=self.generator.bld
-		ret=m2(self)
-		if bld.cache_global and not bld.nocache:
-			self.put_files_cache()
-		return ret
-	cls.post_run=post_run
-	return cls
 classes={}
 class store_task_type(type):
 	def __init__(cls,name,bases,dict):
 		super(store_task_type,cls).__init__(name,bases,dict)
 		name=cls.__name__
-		if name.endswith('_task'):
-			name=name.replace('_task','')
-		if name!='evil'and name!='TaskBase':
-			global classes
+		if name!='evil'and name!='Task':
 			if getattr(cls,'run_str',None):
 				(f,dvars)=compile_fun(cls.run_str,cls.shell)
-				cls.hcode=cls.run_str
+				cls.hcode=Utils.h_cmd(cls.run_str)
+				cls.orig_run_str=cls.run_str
 				cls.run_str=None
 				cls.run=f
 				cls.vars=list(set(cls.vars+dvars))
 				cls.vars.sort()
+				if cls.vars:
+					fun=compile_sig_vars(cls.vars)
+					if fun:
+						cls.sig_vars=fun
 			elif getattr(cls,'run',None)and not'hcode'in cls.__dict__:
-				cls.hcode=Utils.h_fun(cls.run)
-			if not getattr(cls,'nocache',None):
-				cls=cache_outputs(cls)
-			classes[name]=cls
+				cls.hcode=Utils.h_cmd(cls.run)
+			getattr(cls,'register',classes)[name]=cls
 evil=store_task_type('evil',(object,),{})
-class TaskBase(evil):
+class Task(evil):
+	vars=[]
+	always_run=False
+	shell=False
 	color='GREEN'
 	ext_in=[]
 	ext_out=[]
 	before=[]
 	after=[]
-	hcode=''
+	hcode=Utils.SIG_NIL
+	keep_last_cmd=False
+	weight=0
+	tree_weight=0
+	prio_order=0
+	__slots__=('hasrun','generator','env','inputs','outputs','dep_nodes','run_after')
 	def __init__(self,*k,**kw):
 		self.hasrun=NOT_RUN
 		try:
 			self.generator=kw['generator']
 		except KeyError:
 			self.generator=self
-	def __repr__(self):
-		return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun','')))
-	def __str__(self):
-		if hasattr(self,'fun'):
-			return'executing: %s\n'%self.fun.__name__
-		return self.__class__.__name__+'\n'
-	def __hash__(self):
-		return id(self)
-	def exec_command(self,cmd,**kw):
+		self.env=kw['env']
+		self.inputs=[]
+		self.outputs=[]
+		self.dep_nodes=[]
+		self.run_after=set()
+	def __lt__(self,other):
+		return self.priority()>other.priority()
+	def __le__(self,other):
+		return self.priority()>=other.priority()
+	def __gt__(self,other):
+		return self.priority()<other.priority()
+	def __ge__(self,other):
+		return self.priority()<=other.priority()
+	def get_cwd(self):
 		bld=self.generator.bld
-		try:
-			if not kw.get('cwd',None):
-				kw['cwd']=bld.cwd
-		except AttributeError:
-			bld.cwd=kw['cwd']=bld.variant_dir
-		return bld.exec_command(cmd,**kw)
-	def runnable_status(self):
-		return RUN_ME
+		ret=getattr(self,'cwd',None)or getattr(bld,'cwd',bld.bldnode)
+		if isinstance(ret,str):
+			if os.path.isabs(ret):
+				ret=bld.root.make_node(ret)
+			else:
+				ret=self.generator.path.make_node(ret)
+		return ret
+	def quote_flag(self,x):
+		old=x
+		if'\\'in x:
+			x=x.replace('\\','\\\\')
+		if'"'in x:
+			x=x.replace('"','\\"')
+		if old!=x or' 'in x or'\t'in x or"'"in x:
+			x='"%s"'%x
+		return x
+	def priority(self):
+		return(self.weight+self.prio_order,-getattr(self.generator,'tg_idx_count',0))
+	def split_argfile(self,cmd):
+		return([cmd[0]],[self.quote_flag(x)for x in cmd[1:]])
+	def exec_command(self,cmd,**kw):
+		if not'cwd'in kw:
+			kw['cwd']=self.get_cwd()
+		if hasattr(self,'timeout'):
+			kw['timeout']=self.timeout
+		if self.env.PATH:
+			env=kw['env']=dict(kw.get('env')or self.env.env or os.environ)
+			env['PATH']=self.env.PATH if isinstance(self.env.PATH,str)else os.pathsep.join(self.env.PATH)
+		if hasattr(self,'stdout'):
+			kw['stdout']=self.stdout
+		if hasattr(self,'stderr'):
+			kw['stderr']=self.stderr
+		if not isinstance(cmd,str):
+			if Utils.is_win32:
+				too_long=sum([len(arg)for arg in cmd])+len(cmd)>8192
+			else:
+				too_long=len(cmd)>200000
+			if too_long and getattr(self,'allow_argsfile',True):
+				cmd,args=self.split_argfile(cmd)
+				try:
+					(fd,tmp)=tempfile.mkstemp()
+					os.write(fd,'\r\n'.join(args).encode())
+					os.close(fd)
+					if Logs.verbose:
+						Logs.debug('argfile: @%r -> %r',tmp,args)
+					return self.generator.bld.exec_command(cmd+['@'+tmp],**kw)
+				finally:
+					try:
+						os.remove(tmp)
+					except OSError:
+						pass
+		return self.generator.bld.exec_command(cmd,**kw)
 	def process(self):
-		m=self.master
-		if m.stop:
-			m.out.put(self)
-			return
 		try:
 			del self.generator.bld.task_sigs[self.uid()]
-		except:
+		except KeyError:
 			pass
 		try:
-			self.generator.bld.returned_tasks.append(self)
-			self.log_display(self.generator.bld)
 			ret=self.run()
 		except Exception:
-			self.err_msg=Utils.ex_stack()
+			self.err_msg=traceback.format_exc()
 			self.hasrun=EXCEPTION
-			m.error_handler(self)
-			m.out.put(self)
-			return
-		if ret:
-			self.err_code=ret
-			self.hasrun=CRASHED
 		else:
+			if ret:
+				self.err_code=ret
+				self.hasrun=CRASHED
+			else:
+				try:
+					self.post_run()
+				except Errors.WafError:
+					pass
+				except Exception:
+					self.err_msg=traceback.format_exc()
+					self.hasrun=EXCEPTION
+				else:
+					self.hasrun=SUCCESS
+		if self.hasrun!=SUCCESS and self.scan:
 			try:
-				self.post_run()
-			except Errors.WafError:
+				del self.generator.bld.imp_sigs[self.uid()]
+			except KeyError:
 				pass
-			except Exception:
-				self.err_msg=Utils.ex_stack()
-				self.hasrun=EXCEPTION
-			else:
-				self.hasrun=SUCCESS
-		if self.hasrun!=SUCCESS:
-			m.error_handler(self)
-		m.out.put(self)
-	def run(self):
-		if hasattr(self,'fun'):
-			return self.fun(self)
-		return 0
-	def post_run(self):
-		pass
 	def log_display(self,bld):
-		bld.to_log(self.display())
+		if self.generator.bld.progress_bar==3:
+			return
+		s=self.display()
+		if s:
+			if bld.logger:
+				logger=bld.logger
+			else:
+				logger=Logs
+			if self.generator.bld.progress_bar==1:
+				c1=Logs.colors.cursor_off
+				c2=Logs.colors.cursor_on
+				logger.info(s,extra={'stream':sys.stderr,'terminator':'','c1':c1,'c2':c2})
+			else:
+				logger.info(s,extra={'terminator':'','c1':'','c2':''})
 	def display(self):
 		col1=Logs.colors(self.color)
 		col2=Logs.colors.NORMAL
-		master=self.master
+		master=self.generator.bld.producer
 		def cur():
-			tmp=-1
-			if hasattr(master,'ready'):
-				tmp-=master.ready.qsize()
-			return master.processed+tmp
+			return master.processed-master.ready.qsize()
 		if self.generator.bld.progress_bar==1:
 			return self.generator.bld.progress_line(cur(),master.total,col1,col2)
 		if self.generator.bld.progress_bar==2:
@@ -180,19 +239,18 @@ class TaskBase(evil):
 			return None
 		total=master.total
 		n=len(str(total))
-		fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
-		return fs%(cur(),total,col1,s,col2)
-	def attr(self,att,default=None):
-		ret=getattr(self,att,self)
-		if ret is self:return getattr(self.__class__,att,default)
-		return ret
+		fs='[%%%dd/%%%dd] %%s%%s%%s%%s\n'%(n,n)
+		kw=self.keyword()
+		if kw:
+			kw+=' '
+		return fs%(cur(),total,kw,col1,s,col2)
 	def hash_constraints(self):
-		cls=self.__class__
-		tup=(str(cls.before),str(cls.after),str(cls.ext_in),str(cls.ext_out),cls.__name__,cls.hcode)
-		h=hash(tup)
-		return h
+		return(tuple(self.before),tuple(self.after),tuple(self.ext_in),tuple(self.ext_out),self.__class__.__name__,self.hcode)
 	def format_error(self):
-		msg=getattr(self,'last_cmd','')
+		if Logs.verbose:
+			msg=': %r\n%r'%(self,getattr(self,'last_cmd',''))
+		else:
+			msg=' (run with -v to display more information)'
 		name=getattr(self.generator,'name','')
 		if getattr(self,"err_msg",None):
 			return self.err_msg
@@ -200,15 +258,19 @@ class TaskBase(evil):
 			return'task in %r was not executed for some reason: %r'%(name,self)
 		elif self.hasrun==CRASHED:
 			try:
-				return' -> task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg)
+				return' -> task in %r failed with exit status %r%s'%(name,self.err_code,msg)
 			except AttributeError:
-				return' -> task in %r failed: %r\n%r'%(name,self,msg)
+				return' -> task in %r failed%s'%(name,msg)
 		elif self.hasrun==MISSING:
-			return' -> missing files in %r: %r\n%r'%(name,self,msg)
+			return' -> missing files in %r%s'%(name,msg)
+		elif self.hasrun==CANCELED:
+			return' -> %r canceled because of missing dependencies'%name
 		else:
 			return'invalid status for task in %r: %r'%(name,self.hasrun)
 	def colon(self,var1,var2):
 		tmp=self.env[var1]
+		if not tmp:
+			return[]
 		if isinstance(var2,str):
 			it=self.env[var2]
 		else:
@@ -216,57 +278,78 @@ class TaskBase(evil):
 		if isinstance(tmp,str):
 			return[tmp%x for x in it]
 		else:
-			if Logs.verbose and not tmp and it:
-				Logs.warn('Missing env variable %r for task %r (generator %r)'%(var1,self,self.generator))
 			lst=[]
 			for y in it:
 				lst.extend(tmp)
 				lst.append(y)
 			return lst
-class Task(TaskBase):
-	vars=[]
-	shell=False
-	def __init__(self,*k,**kw):
-		TaskBase.__init__(self,*k,**kw)
-		self.env=kw['env']
-		self.inputs=[]
-		self.outputs=[]
-		self.dep_nodes=[]
-		self.run_after=set([])
 	def __str__(self):
-		env=self.env
-		src_str=' '.join([a.nice_path(env)for a in self.inputs])
-		tgt_str=' '.join([a.nice_path(env)for a in self.outputs])
-		if self.outputs:sep=' -> '
-		else:sep=''
-		return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)
+		name=self.__class__.__name__
+		if self.outputs:
+			if name.endswith(('lib','program'))or not self.inputs:
+				node=self.outputs[0]
+				return node.path_from(node.ctx.launch_node())
+		if not(self.inputs or self.outputs):
+			return self.__class__.__name__
+		if len(self.inputs)==1:
+			node=self.inputs[0]
+			return node.path_from(node.ctx.launch_node())
+		src_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.inputs])
+		tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs])
+		if self.outputs:
+			sep=' -> '
+		else:
+			sep=''
+		return'%s: %s%s%s'%(self.__class__.__name__,src_str,sep,tgt_str)
+	def keyword(self):
+		name=self.__class__.__name__
+		if name.endswith(('lib','program')):
+			return'Linking'
+		if len(self.inputs)==1 and len(self.outputs)==1:
+			return'Compiling'
+		if not self.inputs:
+			if self.outputs:
+				return'Creating'
+			else:
+				return'Running'
+		return'Processing'
 	def __repr__(self):
-		return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",",".join([x.name for x in self.inputs])," -> ",",".join([x.name for x in self.outputs]),'}'])
+		try:
+			ins=",".join([x.name for x in self.inputs])
+			outs=",".join([x.name for x in self.outputs])
+		except AttributeError:
+			ins=",".join([str(x)for x in self.inputs])
+			outs=",".join([str(x)for x in self.outputs])
+		return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}'])
 	def uid(self):
 		try:
 			return self.uid_
 		except AttributeError:
-			m=Utils.md5()
+			m=Utils.md5(self.__class__.__name__)
 			up=m.update
-			up(self.__class__.__name__)
 			for x in self.inputs+self.outputs:
 				up(x.abspath())
 			self.uid_=m.digest()
 			return self.uid_
 	def set_inputs(self,inp):
-		if isinstance(inp,list):self.inputs+=inp
-		else:self.inputs.append(inp)
+		if isinstance(inp,list):
+			self.inputs+=inp
+		else:
+			self.inputs.append(inp)
 	def set_outputs(self,out):
-		if isinstance(out,list):self.outputs+=out
-		else:self.outputs.append(out)
+		if isinstance(out,list):
+			self.outputs+=out
+		else:
+			self.outputs.append(out)
 	def set_run_after(self,task):
-		assert isinstance(task,TaskBase)
+		assert isinstance(task,Task)
 		self.run_after.add(task)
 	def signature(self):
-		try:return self.cache_sig
-		except AttributeError:pass
-		self.m=Utils.md5()
-		self.m.update(self.hcode)
+		try:
+			return self.cache_sig
+		except AttributeError:
+			pass
+		self.m=Utils.md5(self.hcode)
 		self.sig_explicit_deps()
 		self.sig_vars()
 		if self.scan:
@@ -277,10 +360,14 @@ class Task(TaskBase):
 		ret=self.cache_sig=self.m.digest()
 		return ret
 	def runnable_status(self):
+		bld=self.generator.bld
+		if bld.is_install<0:
+			return SKIP_ME
 		for t in self.run_after:
 			if not t.hasrun:
 				return ASK_LATER
-		bld=self.generator.bld
+			elif t.hasrun<SKIPPED:
+				return CANCEL_ME
 		try:
 			new_sig=self.signature()
 		except Errors.TaskNotReady:
@@ -289,120 +376,109 @@ class Task(TaskBase):
 		try:
 			prev_sig=bld.task_sigs[key]
 		except KeyError:
-			Logs.debug("task: task %r must run as it was never run before or the task code changed"%self)
+			Logs.debug('task: task %r must run: it was never run before or the task code changed',self)
 			return RUN_ME
-		for node in self.outputs:
-			try:
-				if node.sig!=new_sig:
-					return RUN_ME
-			except AttributeError:
-				Logs.debug("task: task %r must run as the output nodes do not exist"%self)
-				return RUN_ME
 		if new_sig!=prev_sig:
+			Logs.debug('task: task %r must run: the task signature changed',self)
 			return RUN_ME
-		return SKIP_ME
+		for node in self.outputs:
+			sig=bld.node_sigs.get(node)
+			if not sig:
+				Logs.debug('task: task %r must run: an output node has no signature',self)
+				return RUN_ME
+			if sig!=key:
+				Logs.debug('task: task %r must run: an output node was produced by another task',self)
+				return RUN_ME
+			if not node.exists():
+				Logs.debug('task: task %r must run: an output node does not exist',self)
+				return RUN_ME
+		return(self.always_run and RUN_ME)or SKIP_ME
 	def post_run(self):
 		bld=self.generator.bld
-		sig=self.signature()
 		for node in self.outputs:
-			try:
-				os.stat(node.abspath())
-			except OSError:
+			if not node.exists():
 				self.hasrun=MISSING
 				self.err_msg='-> missing file: %r'%node.abspath()
 				raise Errors.WafError(self.err_msg)
-			node.sig=sig
-		bld.task_sigs[self.uid()]=self.cache_sig
+			bld.node_sigs[node]=self.uid()
+		bld.task_sigs[self.uid()]=self.signature()
+		if not self.keep_last_cmd:
+			try:
+				del self.last_cmd
+			except AttributeError:
+				pass
 	def sig_explicit_deps(self):
 		bld=self.generator.bld
 		upd=self.m.update
 		for x in self.inputs+self.dep_nodes:
-			try:
-				upd(x.get_bld_sig())
-			except(AttributeError,TypeError):
-				raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self))
+			upd(x.get_bld_sig())
 		if bld.deps_man:
 			additional_deps=bld.deps_man
 			for x in self.inputs+self.outputs:
 				try:
-					d=additional_deps[id(x)]
+					d=additional_deps[x]
 				except KeyError:
 					continue
 				for v in d:
-					if isinstance(v,bld.root.__class__):
-						try:
-							v=v.get_bld_sig()
-						except AttributeError:
-							raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self))
-					elif hasattr(v,'__call__'):
-						v=v()
+					try:
+						v=v.get_bld_sig()
+					except AttributeError:
+						if hasattr(v,'__call__'):
+							v=v()
 					upd(v)
-		return self.m.digest()
-	def sig_vars(self):
+	def sig_deep_inputs(self):
 		bld=self.generator.bld
-		env=self.env
-		upd=self.m.update
-		act_sig=bld.hash_env_vars(env,self.__class__.vars)
-		upd(act_sig)
-		dep_vars=getattr(self,'dep_vars',None)
-		if dep_vars:
-			upd(bld.hash_env_vars(env,dep_vars))
-		return self.m.digest()
+		lst=[bld.task_sigs[bld.node_sigs[node]]for node in(self.inputs+self.dep_nodes)if node.is_bld()]
+		self.m.update(Utils.h_list(lst))
+	def sig_vars(self):
+		sig=self.generator.bld.hash_env_vars(self.env,self.vars)
+		self.m.update(sig)
 	scan=None
 	def sig_implicit_deps(self):
 		bld=self.generator.bld
 		key=self.uid()
-		prev=bld.task_sigs.get((key,'imp'),[])
+		prev=bld.imp_sigs.get(key,[])
 		if prev:
 			try:
 				if prev==self.compute_sig_implicit_deps():
 					return prev
-			except:
+			except Errors.TaskNotReady:
+				raise
+			except EnvironmentError:
 				for x in bld.node_deps.get(self.uid(),[]):
-					if x.is_child_of(bld.srcnode):
+					if not x.is_bld()and not x.exists():
 						try:
-							os.stat(x.abspath())
-						except:
-							try:
-								del x.parent.children[x.name]
-							except:
-								pass
-			del bld.task_sigs[(key,'imp')]
+							del x.parent.children[x.name]
+						except KeyError:
+							pass
+			del bld.imp_sigs[key]
 			raise Errors.TaskRescan('rescan')
-		(nodes,names)=self.scan()
+		(bld.node_deps[key],bld.raw_deps[key])=self.scan()
 		if Logs.verbose:
-			Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names)))
-		bld.node_deps[key]=nodes
-		bld.raw_deps[key]=names
-		self.are_implicit_nodes_ready()
+			Logs.debug('deps: scanner for %s: %r; unresolved: %r',self,bld.node_deps[key],bld.raw_deps[key])
 		try:
-			bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps()
-		except:
-			if Logs.verbose:
-				for k in bld.node_deps.get(self.uid(),[]):
-					try:
-						k.get_bld_sig()
-					except:
-						Logs.warn('Missing signature for node %r (may cause rebuilds)'%k)
-		else:
-			return sig
+			bld.imp_sigs[key]=self.compute_sig_implicit_deps()
+		except EnvironmentError:
+			for k in bld.node_deps.get(self.uid(),[]):
+				if not k.exists():
+					Logs.warn('Dependency %r for %r is missing: check the task declaration and the build order!',k,self)
+			raise
 	def compute_sig_implicit_deps(self):
 		upd=self.m.update
-		bld=self.generator.bld
 		self.are_implicit_nodes_ready()
-		for k in bld.node_deps.get(self.uid(),[]):
+		for k in self.generator.bld.node_deps.get(self.uid(),[]):
 			upd(k.get_bld_sig())
 		return self.m.digest()
 	def are_implicit_nodes_ready(self):
 		bld=self.generator.bld
 		try:
 			cache=bld.dct_implicit_nodes
-		except:
+		except AttributeError:
 			bld.dct_implicit_nodes=cache={}
 		try:
-			dct=cache[bld.cur]
+			dct=cache[bld.current_group]
 		except KeyError:
-			dct=cache[bld.cur]={}
+			dct=cache[bld.current_group]={}
 			for tsk in bld.cur_tasks:
 				for x in tsk.outputs:
 					dct[x]=tsk
@@ -415,71 +491,19 @@ class Task(TaskBase):
 			for tsk in self.run_after:
 				if not tsk.hasrun:
 					raise Errors.TaskNotReady('not ready')
-	def can_retrieve_cache(self):
-		if not getattr(self,'outputs',None):
-			return None
-		sig=self.signature()
-		ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
-		dname=os.path.join(self.generator.bld.cache_global,ssig)
-		try:
-			t1=os.stat(dname).st_mtime
-		except OSError:
-			return None
-		for node in self.outputs:
-			orig=os.path.join(dname,node.name)
-			try:
-				shutil.copy2(orig,node.abspath())
-				os.utime(orig,None)
-			except(OSError,IOError):
-				Logs.debug('task: failed retrieving file')
-				return None
-		try:
-			t2=os.stat(dname).st_mtime
-		except OSError:
-			return None
-		if t1!=t2:
-			return None
-		for node in self.outputs:
-			node.sig=sig
-			if self.generator.bld.progress_bar<1:
-				self.generator.bld.to_log('restoring from cache %r\n'%node.abspath())
-		self.cached=True
-		return True
-	def put_files_cache(self):
-		if getattr(self,'cached',None):
-			return None
-		if not getattr(self,'outputs',None):
-			return None
-		sig=self.signature()
-		ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
-		dname=os.path.join(self.generator.bld.cache_global,ssig)
-		tmpdir=tempfile.mkdtemp(prefix=self.generator.bld.cache_global+os.sep+'waf')
-		try:
-			shutil.rmtree(dname)
-		except:
-			pass
+if sys.hexversion>0x3000000:
+	def uid(self):
 		try:
-			for node in self.outputs:
-				dest=os.path.join(tmpdir,node.name)
-				shutil.copy2(node.abspath(),dest)
-		except(OSError,IOError):
-			try:
-				shutil.rmtree(tmpdir)
-			except:
-				pass
-		else:
-			try:
-				os.rename(tmpdir,dname)
-			except OSError:
-				try:
-					shutil.rmtree(tmpdir)
-				except:
-					pass
-			else:
-				try:
-					os.chmod(dname,Utils.O755)
-				except:
-					pass
+			return self.uid_
+		except AttributeError:
+			m=Utils.md5(self.__class__.__name__.encode('latin-1','xmlcharrefreplace'))
+			up=m.update
+			for x in self.inputs+self.outputs:
+				up(x.abspath().encode('latin-1','xmlcharrefreplace'))
+			self.uid_=m.digest()
+			return self.uid_
+	uid.__doc__=Task.uid.__doc__
+	Task.uid=uid
 def is_before(t1,t2):
 	to_list=Utils.to_list
 	for k in to_list(t2.ext_in):
@@ -494,14 +518,27 @@ def set_file_constraints(tasks):
 	ins=Utils.defaultdict(set)
 	outs=Utils.defaultdict(set)
 	for x in tasks:
-		for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]):
-			ins[id(a)].add(x)
-		for a in getattr(x,'outputs',[]):
-			outs[id(a)].add(x)
+		for a in x.inputs:
+			ins[a].add(x)
+		for a in x.dep_nodes:
+			ins[a].add(x)
+		for a in x.outputs:
+			outs[a].add(x)
 	links=set(ins.keys()).intersection(outs.keys())
 	for k in links:
 		for a in ins[k]:
 			a.run_after.update(outs[k])
+class TaskGroup(object):
+	def __init__(self,prev,next):
+		self.prev=prev
+		self.next=next
+		self.done=False
+	def get_hasrun(self):
+		for k in self.prev:
+			if not k.hasrun:
+				return NOT_RUN
+		return SUCCESS
+	hasrun=property(get_hasrun,None)
 def set_precedence_constraints(tasks):
 	cstr_groups=Utils.defaultdict(list)
 	for x in tasks:
@@ -521,152 +558,235 @@ def set_precedence_constraints(tasks):
 				b=i
 			else:
 				continue
-			for x in cstr_groups[keys[b]]:
-				x.run_after.update(cstr_groups[keys[a]])
+			a=cstr_groups[keys[a]]
+			b=cstr_groups[keys[b]]
+			if len(a)<2 or len(b)<2:
+				for x in b:
+					x.run_after.update(a)
+			else:
+				group=TaskGroup(set(a),set(b))
+				for x in b:
+					x.run_after.add(group)
 def funex(c):
 	dc={}
 	exec(c,dc)
 	return dc['f']
-reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M)
+re_cond=re.compile(r'(?P<var>\w+)|(?P<or>\|)|(?P<and>&)')
+re_novar=re.compile(r'^(SRC|TGT)\W+.*?$')
+reg_act=re.compile(r'(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})',re.M)
 def compile_fun_shell(line):
 	extr=[]
 	def repl(match):
 		g=match.group
-		if g('dollar'):return"$"
-		elif g('backslash'):return'\\\\'
-		elif g('subst'):extr.append((g('var'),g('code')));return"%s"
+		if g('dollar'):
+			return"$"
+		elif g('backslash'):
+			return'\\\\'
+		elif g('subst'):
+			extr.append((g('var'),g('code')))
+			return"%s"
 		return None
 	line=reg_act.sub(repl,line)or line
-	parm=[]
 	dvars=[]
+	def add_dvar(x):
+		if x not in dvars:
+			dvars.append(x)
+	def replc(m):
+		if m.group('and'):
+			return' and '
+		elif m.group('or'):
+			return' or '
+		else:
+			x=m.group('var')
+			add_dvar(x)
+			return'env[%r]'%x
+	parm=[]
 	app=parm.append
 	for(var,meth)in extr:
 		if var=='SRC':
-			if meth:app('tsk.inputs%s'%meth)
-			else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])')
+			if meth:
+				app('tsk.inputs%s'%meth)
+			else:
+				app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
 		elif var=='TGT':
-			if meth:app('tsk.outputs%s'%meth)
-			else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])')
+			if meth:
+				app('tsk.outputs%s'%meth)
+			else:
+				app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
 		elif meth:
 			if meth.startswith(':'):
+				add_dvar(var)
 				m=meth[1:]
 				if m=='SRC':
-					m='[a.path_from(bld.bldnode) for a in tsk.inputs]'
+					m='[a.path_from(cwdx) for a in tsk.inputs]'
 				elif m=='TGT':
-					m='[a.path_from(bld.bldnode) for a in tsk.outputs]'
-				elif m[:3]not in('tsk','gen','bld'):
-					dvars.extend([var,meth[1:]])
-					m='%r'%m
+					m='[a.path_from(cwdx) for a in tsk.outputs]'
+				elif re_novar.match(m):
+					m='[tsk.inputs%s]'%m[3:]
+				elif re_novar.match(m):
+					m='[tsk.outputs%s]'%m[3:]
+				else:
+					add_dvar(m)
+					if m[:3]not in('tsk','gen','bld'):
+						m='%r'%m
 				app('" ".join(tsk.colon(%r, %s))'%(var,m))
+			elif meth.startswith('?'):
+				expr=re_cond.sub(replc,meth[1:])
+				app('p(%r) if (%s) else ""'%(var,expr))
 			else:
-				app('%s%s'%(var,meth))
+				call='%s%s'%(var,meth)
+				add_dvar(call)
+				app(call)
 		else:
-			if not var in dvars:dvars.append(var)
+			add_dvar(var)
 			app("p('%s')"%var)
-	if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
-	else:parm=''
+	if parm:
+		parm="%% (%s) "%(',\n\t\t'.join(parm))
+	else:
+		parm=''
 	c=COMPILE_TEMPLATE_SHELL%(line,parm)
-	Logs.debug('action: %s'%c)
+	Logs.debug('action: %s',c.strip().splitlines())
 	return(funex(c),dvars)
+reg_act_noshell=re.compile(r"(?P<space>\s+)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})|(?P<text>([^$ \t\n\r\f\v]|\$\$)+)",re.M)
 def compile_fun_noshell(line):
-	extr=[]
-	def repl(match):
-		g=match.group
-		if g('dollar'):return"$"
-		elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
-		return None
-	line2=reg_act.sub(repl,line)
-	params=line2.split('<<|@|>>')
-	assert(extr)
 	buf=[]
 	dvars=[]
+	merge=False
 	app=buf.append
-	for x in range(len(extr)):
-		params[x]=params[x].strip()
-		if params[x]:
-			app("lst.extend(%r)"%params[x].split())
-		(var,meth)=extr[x]
-		if var=='SRC':
-			if meth:app('lst.append(tsk.inputs%s)'%meth)
-			else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs])")
-		elif var=='TGT':
-			if meth:app('lst.append(tsk.outputs%s)'%meth)
-			else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs])")
-		elif meth:
-			if meth.startswith(':'):
-				m=meth[1:]
-				if m=='SRC':
-					m='[a.path_from(bld.bldnode) for a in tsk.inputs]'
-				elif m=='TGT':
-					m='[a.path_from(bld.bldnode) for a in tsk.outputs]'
-				elif m[:3]not in('tsk','gen','bld'):
-					dvars.extend([var,m])
-					m='%r'%m
-				app('lst.extend(tsk.colon(%r, %s))'%(var,m))
-			else:
-				app('lst.extend(gen.to_list(%s%s))'%(var,meth))
-		else:
-			app('lst.extend(to_list(env[%r]))'%var)
-			if not var in dvars:dvars.append(var)
-	if extr:
-		if params[-1]:
-			app("lst.extend(%r)"%params[-1].split())
+	def add_dvar(x):
+		if x not in dvars:
+			dvars.append(x)
+	def replc(m):
+		if m.group('and'):
+			return' and '
+		elif m.group('or'):
+			return' or '
+		else:
+			x=m.group('var')
+			add_dvar(x)
+			return'env[%r]'%x
+	for m in reg_act_noshell.finditer(line):
+		if m.group('space'):
+			merge=False
+			continue
+		elif m.group('text'):
+			app('[%r]'%m.group('text').replace('$$','$'))
+		elif m.group('subst'):
+			var=m.group('var')
+			code=m.group('code')
+			if var=='SRC':
+				if code:
+					app('[tsk.inputs%s]'%code)
+				else:
+					app('[a.path_from(cwdx) for a in tsk.inputs]')
+			elif var=='TGT':
+				if code:
+					app('[tsk.outputs%s]'%code)
+				else:
+					app('[a.path_from(cwdx) for a in tsk.outputs]')
+			elif code:
+				if code.startswith(':'):
+					add_dvar(var)
+					m=code[1:]
+					if m=='SRC':
+						m='[a.path_from(cwdx) for a in tsk.inputs]'
+					elif m=='TGT':
+						m='[a.path_from(cwdx) for a in tsk.outputs]'
+					elif re_novar.match(m):
+						m='[tsk.inputs%s]'%m[3:]
+					elif re_novar.match(m):
+						m='[tsk.outputs%s]'%m[3:]
+					else:
+						add_dvar(m)
+						if m[:3]not in('tsk','gen','bld'):
+							m='%r'%m
+					app('tsk.colon(%r, %s)'%(var,m))
+				elif code.startswith('?'):
+					expr=re_cond.sub(replc,code[1:])
+					app('to_list(env[%r] if (%s) else [])'%(var,expr))
+				else:
+					call='%s%s'%(var,code)
+					add_dvar(call)
+					app('to_list(%s)'%call)
+			else:
+				app('to_list(env[%r])'%var)
+				add_dvar(var)
+		if merge:
+			tmp='merge(%s, %s)'%(buf[-2],buf[-1])
+			del buf[-1]
+			buf[-1]=tmp
+		merge=True
+	buf=['lst.extend(%s)'%x for x in buf]
 	fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
-	Logs.debug('action: %s'%fun)
+	Logs.debug('action: %s',fun.strip().splitlines())
 	return(funex(fun),dvars)
 def compile_fun(line,shell=False):
-	if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
-		shell=True
+	if isinstance(line,str):
+		if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
+			shell=True
+	else:
+		dvars_lst=[]
+		funs_lst=[]
+		for x in line:
+			if isinstance(x,str):
+				fun,dvars=compile_fun(x,shell)
+				dvars_lst+=dvars
+				funs_lst.append(fun)
+			else:
+				funs_lst.append(x)
+		def composed_fun(task):
+			for x in funs_lst:
+				ret=x(task)
+				if ret:
+					return ret
+			return None
+		return composed_fun,dvars_lst
 	if shell:
 		return compile_fun_shell(line)
 	else:
 		return compile_fun_noshell(line)
+def compile_sig_vars(vars):
+	buf=[]
+	for x in sorted(vars):
+		if x[:3]in('tsk','gen','bld'):
+			buf.append('buf.append(%s)'%x)
+	if buf:
+		return funex(COMPILE_TEMPLATE_SIG_VARS%'\n\t'.join(buf))
+	return None
 def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None):
-	params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,}
-	if isinstance(func,str):
+	params={'vars':vars or[],'color':color,'name':name,'shell':shell,'scan':scan,}
+	if isinstance(func,str)or isinstance(func,tuple):
 		params['run_str']=func
 	else:
 		params['run']=func
 	cls=type(Task)(name,(Task,),params)
-	global classes
 	classes[name]=cls
+	if ext_in:
+		cls.ext_in=Utils.to_list(ext_in)
+	if ext_out:
+		cls.ext_out=Utils.to_list(ext_out)
+	if before:
+		cls.before=Utils.to_list(before)
+	if after:
+		cls.after=Utils.to_list(after)
 	return cls
-def always_run(cls):
-	old=cls.runnable_status
-	def always(self):
-		ret=old(self)
-		if ret==SKIP_ME:
-			ret=RUN_ME
-		return ret
-	cls.runnable_status=always
-	return cls
-def update_outputs(cls):
-	old_post_run=cls.post_run
-	def post_run(self):
-		old_post_run(self)
-		for node in self.outputs:
-			node.sig=Utils.h_file(node.abspath())
-			self.generator.bld.task_sigs[node.abspath()]=self.uid()
-	cls.post_run=post_run
-	old_runnable_status=cls.runnable_status
-	def runnable_status(self):
-		status=old_runnable_status(self)
-		if status!=RUN_ME:
-			return status
-		try:
-			bld=self.generator.bld
-			prev_sig=bld.task_sigs[self.uid()]
-			if prev_sig==self.signature():
-				for x in self.outputs:
-					if not x.sig or bld.task_sigs[x.abspath()]!=self.uid():
-						return RUN_ME
-				return SKIP_ME
-		except KeyError:
-			pass
-		except IndexError:
-			pass
-		except AttributeError:
-			pass
-		return RUN_ME
-	cls.runnable_status=runnable_status
+def deep_inputs(cls):
+	def sig_explicit_deps(self):
+		Task.sig_explicit_deps(self)
+		Task.sig_deep_inputs(self)
+	cls.sig_explicit_deps=sig_explicit_deps
 	return cls
+TaskBase=Task
+class TaskSemaphore(object):
+	def __init__(self,num):
+		self.num=num
+		self.locking=set()
+		self.waiting=set()
+	def is_locked(self):
+		return len(self.locking)>=self.num
+	def acquire(self,tsk):
+		if self.is_locked():
+			raise IndexError('Cannot lock more %r'%self.locking)
+		self.locking.add(tsk)
+	def release(self,tsk):
+		self.locking.remove(tsk)
--- pugl-0~svn32+dfsg0.orig/waflib/TaskGen.py
+++ pugl-0~svn32+dfsg0/waflib/TaskGen.py
@@ -1,21 +1,18 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-import copy,re,os
-from waflib import Task,Utils,Logs,Errors,ConfigSet
+import copy,re,os,functools
+from waflib import Task,Utils,Logs,Errors,ConfigSet,Node
 feats=Utils.defaultdict(set)
+HEADER_EXTS=['.h','.hpp','.hxx','.hh']
 class task_gen(object):
-	mappings={}
-	prec=Utils.defaultdict(list)
+	mappings=Utils.ordered_iter_dict()
+	prec=Utils.defaultdict(set)
 	def __init__(self,*k,**kw):
-		self.source=''
+		self.source=[]
 		self.target=''
 		self.meths=[]
-		self.prec=Utils.defaultdict(list)
-		self.mappings={}
 		self.features=[]
 		self.tasks=[]
 		if not'bld'in kw:
@@ -25,22 +22,29 @@ class task_gen(object):
 		else:
 			self.bld=kw['bld']
 			self.env=self.bld.env.derive()
-			self.path=self.bld.path
+			self.path=kw.get('path',self.bld.path)
+			path=self.path.abspath()
 			try:
-				self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1
+				self.idx=self.bld.idx[path]=self.bld.idx.get(path,0)+1
 			except AttributeError:
 				self.bld.idx={}
-				self.idx=self.bld.idx[id(self.path)]=1
+				self.idx=self.bld.idx[path]=1
+			try:
+				self.tg_idx_count=self.bld.tg_idx_count=self.bld.tg_idx_count+1
+			except AttributeError:
+				self.tg_idx_count=self.bld.tg_idx_count=1
 		for key,val in kw.items():
 			setattr(self,key,val)
 	def __str__(self):
 		return"<task_gen %r declared in %s>"%(self.name,self.path.abspath())
 	def __repr__(self):
 		lst=[]
-		for x in self.__dict__.keys():
-			if x not in['env','bld','compiled_tasks','tasks']:
+		for x in self.__dict__:
+			if x not in('env','bld','compiled_tasks','tasks'):
 				lst.append("%s=%s"%(x,repr(getattr(self,x))))
 		return"bld(%s) in %s"%(", ".join(lst),self.path.abspath())
+	def get_cwd(self):
+		return self.bld.bldnode
 	def get_name(self):
 		try:
 			return self._name
@@ -55,35 +59,41 @@ class task_gen(object):
 		self._name=name
 	name=property(get_name,set_name)
 	def to_list(self,val):
-		if isinstance(val,str):return val.split()
-		else:return val
+		if isinstance(val,str):
+			return val.split()
+		else:
+			return val
 	def post(self):
 		if getattr(self,'posted',None):
 			return False
 		self.posted=True
 		keys=set(self.meths)
+		keys.update(feats['*'])
 		self.features=Utils.to_list(self.features)
-		for x in self.features+['*']:
+		for x in self.features:
 			st=feats[x]
-			if not st:
-				if not x in Task.classes:
-					Logs.warn('feature %r does not exist - bind at least one method to it'%x)
-			keys.update(list(st))
+			if st:
+				keys.update(st)
+			elif not x in Task.classes:
+				Logs.warn('feature %r does not exist - bind at least one method to it?',x)
 		prec={}
-		prec_tbl=self.prec or task_gen.prec
+		prec_tbl=self.prec
 		for x in prec_tbl:
 			if x in keys:
 				prec[x]=prec_tbl[x]
 		tmp=[]
 		for a in keys:
 			for x in prec.values():
-				if a in x:break
+				if a in x:
+					break
 			else:
 				tmp.append(a)
+		tmp.sort(reverse=True)
 		out=[]
 		while tmp:
 			e=tmp.pop()
-			if e in keys:out.append(e)
+			if e in keys:
+				out.append(e)
 			try:
 				nlst=prec[e]
 			except KeyError:
@@ -96,43 +106,49 @@ class task_gen(object):
 							break
 					else:
 						tmp.append(x)
+						tmp.sort(reverse=True)
 		if prec:
-			raise Errors.WafError('Cycle detected in the method execution %r'%prec)
-		out.reverse()
+			buf=['Cycle detected in the method execution:']
+			for k,v in prec.items():
+				buf.append('- %s after %s'%(k,[x for x in v if x in prec]))
+			raise Errors.WafError('\n'.join(buf))
 		self.meths=out
-		Logs.debug('task_gen: posting %s %d'%(self,id(self)))
+		Logs.debug('task_gen: posting %s %d',self,id(self))
 		for x in out:
 			try:
 				v=getattr(self,x)
 			except AttributeError:
 				raise Errors.WafError('%r is not a valid task generator method'%x)
-			Logs.debug('task_gen: -> %s (%d)'%(x,id(self)))
+			Logs.debug('task_gen: -> %s (%d)',x,id(self))
 			v()
-		Logs.debug('task_gen: posted %s'%self.name)
+		Logs.debug('task_gen: posted %s',self.name)
 		return True
 	def get_hook(self,node):
 		name=node.name
 		for k in self.mappings:
-			if name.endswith(k):
-				return self.mappings[k]
-		for k in task_gen.mappings:
-			if name.endswith(k):
-				return task_gen.mappings[k]
-		raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)"%(node,task_gen.mappings.keys()))
-	def create_task(self,name,src=None,tgt=None):
+			try:
+				if name.endswith(k):
+					return self.mappings[k]
+			except TypeError:
+				if k.match(name):
+					return self.mappings[k]
+		keys=list(self.mappings.keys())
+		raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)"%(node,keys))
+	def create_task(self,name,src=None,tgt=None,**kw):
 		task=Task.classes[name](env=self.env.derive(),generator=self)
 		if src:
 			task.set_inputs(src)
 		if tgt:
 			task.set_outputs(tgt)
+		task.__dict__.update(kw)
 		self.tasks.append(task)
 		return task
 	def clone(self,env):
 		newobj=self.bld()
 		for x in self.__dict__:
-			if x in['env','bld']:
+			if x in('env','bld'):
 				continue
-			elif x in['path','features']:
+			elif x in('path','features'):
 				setattr(newobj,x,getattr(self,x))
 			else:
 				setattr(newobj,x,copy.copy(getattr(self,x)))
@@ -149,12 +165,11 @@ def declare_chain(name='',rule=None,reen
 		name=rule
 	cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
 	def x_file(self,node):
-		ext=decider and decider(self,node)or cls.ext_out
 		if ext_in:
 			_ext_in=ext_in[0]
 		tsk=self.create_task(name,node)
 		cnt=0
-		keys=self.mappings.keys()+self.__class__.mappings.keys()
+		ext=decider(self,node)if decider else cls.ext_out
 		for x in ext:
 			k=node.change_ext(x,ext_in=_ext_in)
 			tsk.outputs.append(k)
@@ -162,13 +177,13 @@ def declare_chain(name='',rule=None,reen
 				if cnt<int(reentrant):
 					self.source.append(k)
 			else:
-				for y in keys:
+				for y in self.mappings:
 					if k.name.endswith(y):
 						self.source.append(k)
 						break
 			cnt+=1
 		if install_path:
-			self.bld.install_files(install_path,tsk.outputs)
+			self.install_task=self.add_install_files(install_to=install_path,install_from=tsk.outputs)
 		return tsk
 	for x in cls.ext_in:
 		task_gen.mappings[x]=x_file
@@ -187,8 +202,7 @@ def before_method(*k):
 	def deco(func):
 		setattr(task_gen,func.__name__,func)
 		for fun_name in k:
-			if not func.__name__ in task_gen.prec[fun_name]:
-				task_gen.prec[fun_name].append(func.__name__)
+			task_gen.prec[func.__name__].add(fun_name)
 		return func
 	return deco
 before=before_method
@@ -196,8 +210,7 @@ def after_method(*k):
 	def deco(func):
 		setattr(task_gen,func.__name__,func)
 		for fun_name in k:
-			if not fun_name in task_gen.prec[func.__name__]:
-				task_gen.prec[func.__name__].append(fun_name)
+			task_gen.prec[fun_name].add(func.__name__)
 		return func
 	return deco
 after=after_method
@@ -208,31 +221,103 @@ def extension(*k):
 			task_gen.mappings[x]=func
 		return func
 	return deco
+@taskgen_method
 def to_nodes(self,lst,path=None):
 	tmp=[]
 	path=path or self.path
 	find=path.find_resource
-	if isinstance(lst,self.path.__class__):
+	if isinstance(lst,Node.Node):
 		lst=[lst]
 	for x in Utils.to_list(lst):
 		if isinstance(x,str):
 			node=find(x)
-		else:
+		elif hasattr(x,'name'):
 			node=x
+		else:
+			tmp.extend(self.to_nodes(x))
+			continue
 		if not node:
-			raise Errors.WafError("source not found: %r in %r"%(x,self))
+			raise Errors.WafError('source not found: %r in %r'%(x,self))
 		tmp.append(node)
 	return tmp
+@feature('*')
 def process_source(self):
 	self.source=self.to_nodes(getattr(self,'source',[]))
 	for node in self.source:
 		self.get_hook(node)(self,node)
+@feature('*')
+@before_method('process_source')
 def process_rule(self):
 	if not getattr(self,'rule',None):
 		return
-	name=str(getattr(self,'name',None)or self.target or self.rule)
-	cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'))
+	name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule))
+	try:
+		cache=self.bld.cache_rule_attr
+	except AttributeError:
+		cache=self.bld.cache_rule_attr={}
+	chmod=getattr(self,'chmod',None)
+	shell=getattr(self,'shell',True)
+	color=getattr(self,'color','BLUE')
+	scan=getattr(self,'scan',None)
+	_vars=getattr(self,'vars',[])
+	cls_str=getattr(self,'cls_str',None)
+	cls_keyword=getattr(self,'cls_keyword',None)
+	use_cache=getattr(self,'cache_rule','True')
+	deep_inputs=getattr(self,'deep_inputs',False)
+	scan_val=has_deps=hasattr(self,'deps')
+	if scan:
+		scan_val=id(scan)
+	key=Utils.h_list((name,self.rule,chmod,shell,color,cls_str,cls_keyword,scan_val,_vars,deep_inputs))
+	cls=None
+	if use_cache:
+		try:
+			cls=cache[key]
+		except KeyError:
+			pass
+	if not cls:
+		rule=self.rule
+		if chmod is not None:
+			def chmod_fun(tsk):
+				for x in tsk.outputs:
+					os.chmod(x.abspath(),tsk.generator.chmod)
+			if isinstance(rule,tuple):
+				rule=list(rule)
+				rule.append(chmod_fun)
+				rule=tuple(rule)
+			else:
+				rule=(rule,chmod_fun)
+		cls=Task.task_factory(name,rule,_vars,shell=shell,color=color)
+		if cls_str:
+			setattr(cls,'__str__',self.cls_str)
+		if cls_keyword:
+			setattr(cls,'keyword',self.cls_keyword)
+		if deep_inputs:
+			Task.deep_inputs(cls)
+		if scan:
+			cls.scan=self.scan
+		elif has_deps:
+			def scan(self):
+				nodes=[]
+				for x in self.generator.to_list(getattr(self.generator,'deps',None)):
+					node=self.generator.path.find_resource(x)
+					if not node:
+						self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
+					nodes.append(node)
+				return[nodes,[]]
+			cls.scan=scan
+		if use_cache:
+			cache[key]=cls
 	tsk=self.create_task(name)
+	for x in('after','before','ext_in','ext_out'):
+		setattr(tsk,x,getattr(self,x,[]))
+	if hasattr(self,'stdout'):
+		tsk.stdout=self.stdout
+	if hasattr(self,'stderr'):
+		tsk.stderr=self.stderr
+	if getattr(self,'timeout',None):
+		tsk.timeout=self.timeout
+	if getattr(self,'always',None):
+		tsk.always_run=True
 	if getattr(self,'target',None):
 		if isinstance(self.target,str):
 			self.target=self.target.split()
@@ -245,30 +330,15 @@ def process_rule(self):
 				x.parent.mkdir()
 				tsk.outputs.append(x)
 		if getattr(self,'install_path',None):
-			self.bld.install_files(self.install_path,tsk.outputs)
+			self.install_task=self.add_install_files(install_to=self.install_path,install_from=tsk.outputs,chmod=getattr(self,'chmod',Utils.O644))
 	if getattr(self,'source',None):
 		tsk.inputs=self.to_nodes(self.source)
 		self.source=[]
-	if getattr(self,'scan',None):
-		cls.scan=self.scan
-	elif getattr(self,'deps',None):
-		def scan(self):
-			nodes=[]
-			for x in self.generator.to_list(self.generator.deps):
-				node=self.generator.path.find_resource(x)
-				if not node:
-					self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
-				nodes.append(node)
-			return[nodes,[]]
-		cls.scan=scan
 	if getattr(self,'cwd',None):
 		tsk.cwd=self.cwd
-	if getattr(self,'update_outputs',None)or getattr(self,'on_results',None):
-		Task.update_outputs(cls)
-	if getattr(self,'always',None):
-		Task.always_run(cls)
-	for x in['after','before','ext_in','ext_out']:
-		setattr(cls,x,getattr(self,x,[]))
+	if isinstance(tsk.run,functools.partial):
+		tsk.run=functools.partial(tsk.run,tsk)
+@feature('seq')
 def sequence_order(self):
 	if self.meths and self.meths[-1]!='sequence_order':
 		self.meths.append('sequence_order')
@@ -281,10 +351,32 @@ def sequence_order(self):
 			for y in self.tasks:
 				y.set_run_after(x)
 	self.bld.prev=self
-re_m4=re.compile('@(\w+)@',re.M)
+re_m4=re.compile(r'@(\w+)@',re.M)
 class subst_pc(Task.Task):
+	def force_permissions(self):
+		if getattr(self.generator,'chmod',None):
+			for x in self.outputs:
+				os.chmod(x.abspath(),self.generator.chmod)
 	def run(self):
-		code=self.inputs[0].read()
+		if getattr(self.generator,'is_copy',None):
+			for i,x in enumerate(self.outputs):
+				x.write(self.inputs[i].read('rb'),'wb')
+				stat=os.stat(self.inputs[i].abspath())
+				os.utime(self.outputs[i].abspath(),(stat.st_atime,stat.st_mtime))
+			self.force_permissions()
+			return None
+		if getattr(self.generator,'fun',None):
+			ret=self.generator.fun(self)
+			if not ret:
+				self.force_permissions()
+			return ret
+		code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','latin-1'))
+		if getattr(self.generator,'subst_fun',None):
+			code=self.generator.subst_fun(self,code)
+			if code is not None:
+				self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','latin-1'))
+			self.force_permissions()
+			return None
 		code=code.replace('%','%%')
 		lst=[]
 		def repl(match):
@@ -293,61 +385,87 @@ class subst_pc(Task.Task):
 				lst.append(g(1))
 				return"%%(%s)s"%g(1)
 			return''
-		code=re_m4.sub(repl,code)
+		code=getattr(self.generator,'re_m4',re_m4).sub(repl,code)
 		try:
 			d=self.generator.dct
 		except AttributeError:
 			d={}
 			for x in lst:
-				tmp=getattr(self.generator,x,'')or self.env.get_flat(x)or self.env.get_flat(x.upper())
-				d[x]=str(tmp)
-		self.outputs[0].write(code%d)
-		self.generator.bld.raw_deps[self.uid()]=self.dep_vars=lst
-		try:delattr(self,'cache_sig')
-		except AttributeError:pass
-		if getattr(self.generator,'chmod',None):
-			os.chmod(self.outputs[0].abspath(),self.generator.chmod)
+				tmp=getattr(self.generator,x,'')or self.env[x]or self.env[x.upper()]
+				try:
+					tmp=''.join(tmp)
+				except TypeError:
+					tmp=str(tmp)
+				d[x]=tmp
+		code=code%d
+		self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','latin-1'))
+		self.generator.bld.raw_deps[self.uid()]=lst
+		try:
+			delattr(self,'cache_sig')
+		except AttributeError:
+			pass
+		self.force_permissions()
 	def sig_vars(self):
 		bld=self.generator.bld
 		env=self.env
 		upd=self.m.update
+		if getattr(self.generator,'fun',None):
+			upd(Utils.h_fun(self.generator.fun).encode())
+		if getattr(self.generator,'subst_fun',None):
+			upd(Utils.h_fun(self.generator.subst_fun).encode())
 		vars=self.generator.bld.raw_deps.get(self.uid(),[])
 		act_sig=bld.hash_env_vars(env,vars)
 		upd(act_sig)
 		lst=[getattr(self.generator,x,'')for x in vars]
 		upd(Utils.h_list(lst))
 		return self.m.digest()
+@extension('.pc.in')
 def add_pcfile(self,node):
 	tsk=self.create_task('subst_pc',node,node.change_ext('.pc','.pc.in'))
-	self.bld.install_files(getattr(self,'install_path','${LIBDIR}/pkgconfig/'),tsk.outputs)
+	self.install_task=self.add_install_files(install_to=getattr(self,'install_path','${LIBDIR}/pkgconfig/'),install_from=tsk.outputs)
 class subst(subst_pc):
 	pass
+@feature('subst')
+@before_method('process_source','process_rule')
 def process_subst(self):
-	src=self.to_nodes(getattr(self,'source',[]))
-	tgt=getattr(self,'target',[])
-	if isinstance(tgt,self.path.__class__):
+	src=Utils.to_list(getattr(self,'source',[]))
+	if isinstance(src,Node.Node):
+		src=[src]
+	tgt=Utils.to_list(getattr(self,'target',[]))
+	if isinstance(tgt,Node.Node):
 		tgt=[tgt]
-	tgt=[isinstance(x,self.path.__class__)and x or self.path.find_or_declare(x)for x in Utils.to_list(tgt)]
 	if len(src)!=len(tgt):
-		raise Errors.WafError('invalid source or target for %r'%self)
+		raise Errors.WafError('invalid number of source/target for %r'%self)
 	for x,y in zip(src,tgt):
-		if not(x and y):
-			raise Errors.WafError('invalid source or target for %r'%self)
-		tsk=self.create_task('subst',x,y)
-		for a in('after','before','ext_in','ext_out'):
-			val=getattr(self,a,None)
+		if not x or not y:
+			raise Errors.WafError('null source or target for %r'%self)
+		a,b=None,None
+		if isinstance(x,str)and isinstance(y,str)and x==y:
+			a=self.path.find_node(x)
+			b=self.path.get_bld().make_node(y)
+			if not os.path.isfile(b.abspath()):
+				b.parent.mkdir()
+		else:
+			if isinstance(x,str):
+				a=self.path.find_resource(x)
+			elif isinstance(x,Node.Node):
+				a=x
+			if isinstance(y,str):
+				b=self.path.find_or_declare(y)
+			elif isinstance(y,Node.Node):
+				b=y
+		if not a:
+			raise Errors.WafError('could not find %r for %r'%(x,self))
+		tsk=self.create_task('subst',a,b)
+		for k in('after','before','ext_in','ext_out'):
+			val=getattr(self,k,None)
 			if val:
-				setattr(tsk,a,val)
-	inst_to=getattr(self,'install_path',None)
-	if inst_to:
-		self.bld.install_files(inst_to,tgt,chmod=getattr(self,'chmod',Utils.O644))
+				setattr(tsk,k,val)
+		for xt in HEADER_EXTS:
+			if b.name.endswith(xt):
+				tsk.ext_out=tsk.ext_out+['.h']
+				break
+		inst_to=getattr(self,'install_path',None)
+		if inst_to:
+			self.install_task=self.add_install_files(install_to=inst_to,install_from=b,chmod=getattr(self,'chmod',Utils.O644))
 	self.source=[]
-
-taskgen_method(to_nodes)
-feature('*')(process_source)
-feature('*')(process_rule)
-before_method('process_source')(process_rule)
-feature('seq')(sequence_order)
-extension('.pc.in')(add_pcfile)
-feature('subst')(process_subst)
-before_method('process_source','process_rule')(process_subst)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/__init__.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/__init__.py
@@ -1,4 +1,4 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/ar.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/ar.py
@@ -1,12 +1,13 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib.Configure import conf
+@conf
 def find_ar(conf):
 	conf.load('ar')
 def configure(conf):
 	conf.find_program('ar',var='AR')
-	conf.env.ARFLAGS='rcs'
-
-conf(find_ar)
\ No newline at end of file
+	conf.add_os_flags('ARFLAGS')
+	if not conf.env.ARFLAGS:
+		conf.env.ARFLAGS=['rcs']
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/asm.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/asm.py
@@ -1,25 +1,50 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys
-from waflib import Task,Utils
-import waflib.Task
+import re
+from waflib import Errors,Logs,Task
 from waflib.Tools.ccroot import link_task,stlink_task
-from waflib.TaskGen import extension,feature
+from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+re_lines=re.compile('^[ \t]*(?:%)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
+class asm_parser(c_preproc.c_parser):
+	def filter_comments(self,node):
+		code=node.read()
+		code=c_preproc.re_nl.sub('',code)
+		code=c_preproc.re_cpp.sub(c_preproc.repl,code)
+		return re_lines.findall(code)
 class asm(Task.Task):
 	color='BLUE'
-	run_str='${AS} ${ASFLAGS} ${CPPPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+	run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+	def scan(self):
+		if self.env.ASM_NAME=='gas':
+			return c_preproc.scan(self)
+			Logs.warn('There is no dependency scanner for Nasm!')
+			return[[],[]]
+		elif self.env.ASM_NAME=='nasm':
+			Logs.warn('The Nasm dependency scanner is incomplete!')
+		try:
+			incn=self.generator.includes_nodes
+		except AttributeError:
+			raise Errors.WafError('%r is missing the "asm" feature'%self.generator)
+		if c_preproc.go_absolute:
+			nodepaths=incn
+		else:
+			nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
+		tmp=asm_parser(nodepaths)
+		tmp.start(self.inputs[0],self.env)
+		return(tmp.nodes,tmp.names)
+@extension('.s','.S','.asm','.ASM','.spp','.SPP')
 def asm_hook(self,node):
 	return self.create_compiled_task('asm',node)
 class asmprogram(link_task):
 	run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
 	ext_out=['.bin']
 	inst_to='${BINDIR}'
-	chmod=Utils.O755
 class asmshlib(asmprogram):
 	inst_to='${LIBDIR}'
 class asmstlib(stlink_task):
 	pass
-
-extension('.s','.S','.asm','.ASM','.spp','.SPP')(asm_hook)
\ No newline at end of file
+def configure(conf):
+	conf.env.ASMPATH_ST='-I%s'
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/bison.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/bison.py
@@ -1,6 +1,6 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib import Task
 from waflib.TaskGen import extension
@@ -8,8 +8,9 @@ class bison(Task.Task):
 	color='BLUE'
 	run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
 	ext_out=['.h']
+@extension('.y','.yc','.yy')
 def big_bison(self,node):
-	has_h='-d'in self.env['BISONFLAGS']
+	has_h='-d'in self.env.BISONFLAGS
 	outs=[]
 	if node.name.endswith('.yc'):
 		outs.append(node.change_ext('.tab.cc'))
@@ -20,10 +21,8 @@ def big_bison(self,node):
 		if has_h:
 			outs.append(node.change_ext('.tab.h'))
 	tsk=self.create_task('bison',node,outs)
-	tsk.cwd=node.parent.get_bld().abspath()
+	tsk.cwd=node.parent.get_bld()
 	self.source.append(outs[0])
 def configure(conf):
 	conf.find_program('bison',var='BISON')
 	conf.env.BISONFLAGS=['-d']
-
-extension('.y','.yc','.yy')(big_bison)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/c.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/c.py
@@ -1,27 +1,26 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-from waflib import TaskGen,Task,Utils
+from waflib import TaskGen,Task
 from waflib.Tools import c_preproc
 from waflib.Tools.ccroot import link_task,stlink_task
+@TaskGen.extension('.c')
 def c_hook(self,node):
+	if not self.env.CC and self.env.CXX:
+		return self.create_compiled_task('cxx',node)
 	return self.create_compiled_task('c',node)
 class c(Task.Task):
-	run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
+	run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
 	vars=['CCDEPS']
 	ext_in=['.h']
 	scan=c_preproc.scan
-Task.classes['cc']=cc=c
 class cprogram(link_task):
-	run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
+	run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
 	ext_out=['.bin']
 	vars=['LINKDEPS']
 	inst_to='${BINDIR}'
-	chmod=Utils.O755
 class cshlib(cprogram):
 	inst_to='${LIBDIR}'
 class cstlib(stlink_task):
 	pass
-
-TaskGen.extension('.c')(c_hook)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/c_aliases.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/c_aliases.py
@@ -1,56 +1,62 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,re
-from waflib import Utils,Build
+from waflib import Utils,Errors
 from waflib.Configure import conf
 def get_extensions(lst):
 	ret=[]
 	for x in Utils.to_list(lst):
-		try:
-			if not isinstance(x,str):
-				x=x.name
-			ret.append(x[x.rfind('.')+1:])
-		except:
-			pass
+		if not isinstance(x,str):
+			x=x.name
+		ret.append(x[x.rfind('.')+1:])
 	return ret
 def sniff_features(**kw):
-	exts=get_extensions(kw['source'])
-	type=kw['_type']
+	exts=get_extensions(kw.get('source',[]))
+	typ=kw['typ']
 	feats=[]
-	if'cxx'in exts or'cpp'in exts or'c++'in exts or'cc'in exts or'C'in exts:
-		feats.append('cxx')
-	if'c'in exts or'vala'in exts:
+	for x in'cxx cpp c++ cc C'.split():
+		if x in exts:
+			feats.append('cxx')
+			break
+	if'c'in exts or'vala'in exts or'gs'in exts:
 		feats.append('c')
+	if's'in exts or'S'in exts:
+		feats.append('asm')
+	for x in'f f90 F F90 for FOR'.split():
+		if x in exts:
+			feats.append('fc')
+			break
 	if'd'in exts:
 		feats.append('d')
 	if'java'in exts:
 		feats.append('java')
-	if'java'in exts:
 		return'java'
-	if type in['program','shlib','stlib']:
+	if typ in('program','shlib','stlib'):
+		will_link=False
 		for x in feats:
-			if x in['cxx','d','c']:
-				feats.append(x+type)
+			if x in('cxx','d','fc','c','asm'):
+				feats.append(x+typ)
+				will_link=True
+		if not will_link and not kw.get('features',[]):
+			raise Errors.WafError('Unable to determine how to link %r, try adding eg: features="c cshlib"?'%kw)
 	return feats
-def set_features(kw,_type):
-	kw['_type']=_type
+def set_features(kw,typ):
+	kw['typ']=typ
 	kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw))
+@conf
 def program(bld,*k,**kw):
 	set_features(kw,'program')
 	return bld(*k,**kw)
+@conf
 def shlib(bld,*k,**kw):
 	set_features(kw,'shlib')
 	return bld(*k,**kw)
+@conf
 def stlib(bld,*k,**kw):
 	set_features(kw,'stlib')
 	return bld(*k,**kw)
+@conf
 def objects(bld,*k,**kw):
 	set_features(kw,'objects')
 	return bld(*k,**kw)
-
-conf(program)
-conf(shlib)
-conf(stlib)
-conf(objects)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/c_config.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/c_config.py
@@ -1,229 +1,256 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-import os,imp,sys,re,shlex,shutil
-from waflib import Build,Utils,Configure,Task,Options,Logs,TaskGen,Errors,ConfigSet,Runner
-from waflib.TaskGen import before_method,after_method,feature
+from __future__ import with_statement
+import os,re,shlex
+from waflib import Build,Utils,Task,Options,Logs,Errors,Runner
+from waflib.TaskGen import after_method,feature
 from waflib.Configure import conf
 WAF_CONFIG_H='config.h'
 DEFKEYS='define_key'
 INCKEYS='include_key'
-cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',}
-SNIP_FUNCTION='''
-	int main() {
-	void *p;
-	p=(void*)(%s);
-	return 0;
-}
-'''
-SNIP_TYPE='''
-int main() {
-	if ((%(type_name)s *) 0) return 0;
-	if (sizeof (%(type_name)s)) return 0;
-}
-'''
-SNIP_CLASS='''
-int main() {
-	if (
-}
-'''
 SNIP_EMPTY_PROGRAM='''
-int main() {
+int main(int argc, char **argv) {
+	(void)argc; (void)argv;
 	return 0;
 }
 '''
-SNIP_FIELD='''
-int main() {
-	char *off;
-	off = (char*) &((%(type_name)s*)0)->%(field_name)s;
-	return (size_t) off < sizeof(%(type_name)s);
-}
-'''
-MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'}
-MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc',}
-def parse_flags(self,line,uselib,env=None,force_static=False):
+MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'cygwin','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'}
+MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh','__xtensa__':'xtensa',}
+@conf
+def parse_flags(self,line,uselib_store,env=None,force_static=False,posix=None):
 	assert(isinstance(line,str))
 	env=env or self.env
-	app=env.append_value
-	appu=env.append_unique
-	lex=shlex.shlex(line,posix=False)
+	if posix is None:
+		posix=True
+		if'\\'in line:
+			posix=('\\ 'in line)or('\\\\'in line)
+	lex=shlex.shlex(line,posix=posix)
 	lex.whitespace_split=True
 	lex.commenters=''
 	lst=list(lex)
+	so_re=re.compile(r"\.so(?:\.[0-9]+)*$")
+	uselib=uselib_store
+	def app(var,val):
+		env.append_value('%s_%s'%(var,uselib),val)
+	def appu(var,val):
+		env.append_unique('%s_%s'%(var,uselib),val)
+	static=False
 	while lst:
 		x=lst.pop(0)
 		st=x[:2]
 		ot=x[2:]
 		if st=='-I'or st=='/I':
-			if not ot:ot=lst.pop(0)
-			appu('INCLUDES_'+uselib,[ot])
-		elif st=='-include':
+			if not ot:
+				ot=lst.pop(0)
+			appu('INCLUDES',ot)
+		elif st=='-i':
 			tmp=[x,lst.pop(0)]
 			app('CFLAGS',tmp)
 			app('CXXFLAGS',tmp)
-		elif st=='-D'or(self.env.CXX_NAME=='msvc'and st=='/D'):
-			if not ot:ot=lst.pop(0)
-			app('DEFINES_'+uselib,[ot])
+		elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'):
+			if not ot:
+				ot=lst.pop(0)
+			app('DEFINES',ot)
 		elif st=='-l':
-			if not ot:ot=lst.pop(0)
-			prefix=force_static and'STLIB_'or'LIB_'
-			appu(prefix+uselib,[ot])
+			if not ot:
+				ot=lst.pop(0)
+			prefix='STLIB'if(force_static or static)else'LIB'
+			app(prefix,ot)
 		elif st=='-L':
-			if not ot:ot=lst.pop(0)
-			appu('LIBPATH_'+uselib,[ot])
-		elif x=='-pthread'or x.startswith('+')or x.startswith('-std'):
-			app('CFLAGS_'+uselib,[x])
-			app('CXXFLAGS_'+uselib,[x])
-			app('LINKFLAGS_'+uselib,[x])
+			if not ot:
+				ot=lst.pop(0)
+			prefix='STLIBPATH'if(force_static or static)else'LIBPATH'
+			appu(prefix,ot)
+		elif x.startswith('/LIBPATH:'):
+			prefix='STLIBPATH'if(force_static or static)else'LIBPATH'
+			appu(prefix,x.replace('/LIBPATH:',''))
+		elif x.startswith('-std='):
+			prefix='CXXFLAGS'if'++'in x else'CFLAGS'
+			app(prefix,x)
+		elif x.startswith('+')or x in('-pthread','-fPIC','-fpic','-fPIE','-fpie'):
+			app('CFLAGS',x)
+			app('CXXFLAGS',x)
+			app('LINKFLAGS',x)
 		elif x=='-framework':
-			appu('FRAMEWORK_'+uselib,[lst.pop(0)])
+			appu('FRAMEWORK',lst.pop(0))
 		elif x.startswith('-F'):
-			appu('FRAMEWORKPATH_'+uselib,[x[2:]])
-		elif x.startswith('-Wl'):
-			app('LINKFLAGS_'+uselib,[x])
-		elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'):
-			app('CFLAGS_'+uselib,[x])
-			app('CXXFLAGS_'+uselib,[x])
+			appu('FRAMEWORKPATH',x[2:])
+		elif x=='-Wl,-rpath'or x=='-Wl,-R':
+			app('RPATH',lst.pop(0).lstrip('-Wl,'))
+		elif x.startswith('-Wl,-R,'):
+			app('RPATH',x[7:])
+		elif x.startswith('-Wl,-R'):
+			app('RPATH',x[6:])
+		elif x.startswith('-Wl,-rpath,'):
+			app('RPATH',x[11:])
+		elif x=='-Wl,-Bstatic'or x=='-Bstatic':
+			static=True
+		elif x=='-Wl,-Bdynamic'or x=='-Bdynamic':
+			static=False
+		elif x.startswith('-Wl')or x in('-rdynamic','-pie'):
+			app('LINKFLAGS',x)
+		elif x.startswith(('-m','-f','-dynamic','-O','-g')):
+			app('CFLAGS',x)
+			app('CXXFLAGS',x)
 		elif x.startswith('-bundle'):
-			app('LINKFLAGS_'+uselib,[x])
-		elif x.startswith('-undefined'):
+			app('LINKFLAGS',x)
+		elif x.startswith(('-undefined','-Xlinker')):
 			arg=lst.pop(0)
-			app('LINKFLAGS_'+uselib,[x,arg])
-		elif x.startswith('-arch')or x.startswith('-isysroot'):
+			app('LINKFLAGS',[x,arg])
+		elif x.startswith(('-arch','-isysroot')):
 			tmp=[x,lst.pop(0)]
-			app('CFLAGS_'+uselib,tmp)
-			app('CXXFLAGS_'+uselib,tmp)
-			app('LINKFLAGS_'+uselib,tmp)
-		elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib'):
-			appu('LINKFLAGS_'+uselib,[x])
-def ret_msg(self,f,kw):
-	if isinstance(f,str):
-		return f
-	return f(kw)
+			app('CFLAGS',tmp)
+			app('CXXFLAGS',tmp)
+			app('LINKFLAGS',tmp)
+		elif x.endswith(('.a','.dylib','.lib'))or so_re.search(x):
+			appu('LINKFLAGS',x)
+		else:
+			self.to_log('Unhandled flag %r'%x)
+@conf
 def validate_cfg(self,kw):
 	if not'path'in kw:
 		if not self.env.PKGCONFIG:
 			self.find_program('pkg-config',var='PKGCONFIG')
 		kw['path']=self.env.PKGCONFIG
-	if'atleast_pkgconfig_version'in kw:
-		if not'msg'in kw:
+	s=('atleast_pkgconfig_version'in kw)+('modversion'in kw)+('package'in kw)
+	if s!=1:
+		raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set')
+	if not'msg'in kw:
+		if'atleast_pkgconfig_version'in kw:
 			kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version']
-		return
-	if not'okmsg'in kw:
+		elif'modversion'in kw:
+			kw['msg']='Checking for %r version'%kw['modversion']
+		else:
+			kw['msg']='Checking for %r'%(kw['package'])
+	if not'okmsg'in kw and not'modversion'in kw:
 		kw['okmsg']='yes'
 	if not'errmsg'in kw:
 		kw['errmsg']='not found'
-	if'modversion'in kw:
-		if not'msg'in kw:
-			kw['msg']='Checking for %r version'%kw['modversion']
-		return
-	for x in cfg_ver.keys():
-		y=x.replace('-','_')
-		if y in kw:
-			if not'package'in kw:
-				raise ValueError('%s requires a package'%x)
-			if not'msg'in kw:
-				kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y])
-			return
-	if not'msg'in kw:
-		kw['msg']='Checking for %r'%(kw['package']or kw['path'])
+	if'atleast_pkgconfig_version'in kw:
+		pass
+	elif'modversion'in kw:
+		if not'uselib_store'in kw:
+			kw['uselib_store']=kw['modversion']
+		if not'define_name'in kw:
+			kw['define_name']='%s_VERSION'%Utils.quote_define_name(kw['uselib_store'])
+	else:
+		if not'uselib_store'in kw:
+			kw['uselib_store']=Utils.to_list(kw['package'])[0].upper()
+		if not'define_name'in kw:
+			kw['define_name']=self.have_define(kw['uselib_store'])
+@conf
 def exec_cfg(self,kw):
+	path=Utils.to_list(kw['path'])
+	env=self.env.env or None
+	if kw.get('pkg_config_path'):
+		if not env:
+			env=dict(self.environ)
+		env['PKG_CONFIG_PATH']=kw['pkg_config_path']
+	def define_it():
+		define_name=kw['define_name']
+		if kw.get('global_define',1):
+			self.define(define_name,1,False)
+		else:
+			self.env.append_unique('DEFINES_%s'%kw['uselib_store'],"%s=1"%define_name)
+		if kw.get('add_have_to_env',1):
+			self.env[define_name]=1
 	if'atleast_pkgconfig_version'in kw:
-		cmd=[kw['path'],'--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
-		self.cmd_and_log(cmd)
-		if not'okmsg'in kw:
-			kw['okmsg']='yes'
+		cmd=path+['--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
+		self.cmd_and_log(cmd,env=env)
 		return
-	for x in cfg_ver:
-		y=x.replace('-','_')
-		if y in kw:
-			self.cmd_and_log([kw['path'],'--%s=%s'%(x,kw[y]),kw['package']])
-			if not'okmsg'in kw:
-				kw['okmsg']='yes'
-			self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
-			break
 	if'modversion'in kw:
-		version=self.cmd_and_log([kw['path'],'--modversion',kw['modversion']]).strip()
-		self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version)
+		version=self.cmd_and_log(path+['--modversion',kw['modversion']],env=env).strip()
+		if not'okmsg'in kw:
+			kw['okmsg']=version
+		self.define(kw['define_name'],version)
 		return version
-	lst=[kw['path']]
-	defi=kw.get('define_variable',None)
+	lst=[]+path
+	defi=kw.get('define_variable')
 	if not defi:
 		defi=self.env.PKG_CONFIG_DEFINES or{}
 	for key,val in defi.items():
 		lst.append('--define-variable=%s=%s'%(key,val))
-	if kw['package']:
-		lst.extend(Utils.to_list(kw['package']))
-	if'variables'in kw:
-		env=kw.get('env',self.env)
-		uselib=kw.get('uselib_store',kw['package'].upper())
-		vars=Utils.to_list(kw['variables'])
-		for v in vars:
-			val=self.cmd_and_log(lst+['--variable='+v]).strip()
-			var='%s_%s'%(uselib,v)
-			env[var]=val
-		if not'okmsg'in kw:
-			kw['okmsg']='yes'
-		return
-	static=False
+	static=kw.get('force_static',False)
 	if'args'in kw:
 		args=Utils.to_list(kw['args'])
 		if'--static'in args or'--static-libs'in args:
 			static=True
 		lst+=args
-	ret=self.cmd_and_log(lst)
-	if not'okmsg'in kw:
-		kw['okmsg']='yes'
-	self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
-	self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static)
+	lst.extend(Utils.to_list(kw['package']))
+	if'variables'in kw:
+		v_env=kw.get('env',self.env)
+		vars=Utils.to_list(kw['variables'])
+		for v in vars:
+			val=self.cmd_and_log(lst+['--variable='+v],env=env).strip()
+			var='%s_%s'%(kw['uselib_store'],v)
+			v_env[var]=val
+		return
+	ret=self.cmd_and_log(lst,env=env)
+	define_it()
+	self.parse_flags(ret,kw['uselib_store'],kw.get('env',self.env),force_static=static,posix=kw.get('posix'))
 	return ret
+@conf
 def check_cfg(self,*k,**kw):
-	if k:
-		lst=k[0].split()
-		kw['package']=lst[0]
-		kw['args']=' '.join(lst[1:])
 	self.validate_cfg(kw)
 	if'msg'in kw:
-		self.start_msg(kw['msg'])
+		self.start_msg(kw['msg'],**kw)
 	ret=None
 	try:
 		ret=self.exec_cfg(kw)
-	except self.errors.WafError ,e:
+	except self.errors.WafError as e:
 		if'errmsg'in kw:
-			self.end_msg(kw['errmsg'],'YELLOW')
+			self.end_msg(kw['errmsg'],'YELLOW',**kw)
 		if Logs.verbose>1:
-			raise
-		else:
-			self.fatal('The configuration failed')
+			self.to_log('Command failure: %s'%e)
+		self.fatal('The configuration failed')
 	else:
+		if not ret:
+			ret=True
 		kw['success']=ret
 		if'okmsg'in kw:
-			self.end_msg(self.ret_msg(kw['okmsg'],kw))
+			self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
 	return ret
+def build_fun(bld):
+	if bld.kw['compile_filename']:
+		node=bld.srcnode.make_node(bld.kw['compile_filename'])
+		node.write(bld.kw['code'])
+	o=bld(features=bld.kw['features'],source=bld.kw['compile_filename'],target='testprog')
+	for k,v in bld.kw.items():
+		setattr(o,k,v)
+	if not bld.kw.get('quiet'):
+		bld.conf.to_log("==>\n%s\n<=="%bld.kw['code'])
+@conf
 def validate_c(self,kw):
+	for x in('type_name','field_name','function_name'):
+		if x in kw:
+			Logs.warn('Invalid argument %r in test'%x)
+	if not'build_fun'in kw:
+		kw['build_fun']=build_fun
 	if not'env'in kw:
 		kw['env']=self.env.derive()
 	env=kw['env']
 	if not'compiler'in kw and not'features'in kw:
 		kw['compiler']='c'
-		if env['CXX_NAME']and Task.classes.get('cxx',None):
+		if env.CXX_NAME and Task.classes.get('cxx'):
 			kw['compiler']='cxx'
-			if not self.env['CXX']:
+			if not self.env.CXX:
 				self.fatal('a c++ compiler is required')
 		else:
-			if not self.env['CC']:
+			if not self.env.CC:
 				self.fatal('a c compiler is required')
 	if not'compile_mode'in kw:
 		kw['compile_mode']='c'
-		if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx':
+		if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler')=='cxx':
 			kw['compile_mode']='cxx'
 	if not'type'in kw:
 		kw['type']='cprogram'
 	if not'features'in kw:
-		kw['features']=[kw['compile_mode'],kw['type']]
+		if not'header_name'in kw or kw.get('link_header_test',True):
+			kw['features']=[kw['compile_mode'],kw['type']]
+		else:
+			kw['features']=[kw['compile_mode']]
 	else:
 		kw['features']=Utils.to_list(kw['features'])
 	if not'compile_filename'in kw:
@@ -237,46 +264,19 @@ def validate_c(self,kw):
 		fwkname=kw['framework_name']
 		if not'uselib_store'in kw:
 			kw['uselib_store']=fwkname.upper()
-		if not kw.get('no_header',False):
-			if not'header_name'in kw:
-				kw['header_name']=[]
+		if not kw.get('no_header'):
 			fwk='%s/%s.h'%(fwkname,fwkname)
-			if kw.get('remove_dot_h',None):
+			if kw.get('remove_dot_h'):
 				fwk=fwk[:-2]
-			kw['header_name']=Utils.to_list(kw['header_name'])+[fwk]
+			val=kw.get('header_name',[])
+			kw['header_name']=Utils.to_list(val)+[fwk]
 		kw['msg']='Checking for framework %s'%fwkname
 		kw['framework']=fwkname
-	if'function_name'in kw:
-		fu=kw['function_name']
-		if not'msg'in kw:
-			kw['msg']='Checking for function %s'%fu
-		kw['code']=to_header(kw)+SNIP_FUNCTION%fu
-		if not'uselib_store'in kw:
-			kw['uselib_store']=fu.upper()
-		if not'define_name'in kw:
-			kw['define_name']=self.have_define(fu)
-	elif'type_name'in kw:
-		tu=kw['type_name']
-		if not'header_name'in kw:
-			kw['header_name']='stdint.h'
-		if'field_name'in kw:
-			field=kw['field_name']
-			kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field}
-			if not'msg'in kw:
-				kw['msg']='Checking for field %s in %s'%(field,tu)
-			if not'define_name'in kw:
-				kw['define_name']=self.have_define((tu+'_'+field).upper())
-		else:
-			kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu}
-			if not'msg'in kw:
-				kw['msg']='Checking for type %s'%tu
-			if not'define_name'in kw:
-				kw['define_name']=self.have_define(tu.upper())
 	elif'header_name'in kw:
 		if not'msg'in kw:
 			kw['msg']='Checking for header %s'%kw['header_name']
 		l=Utils.to_list(kw['header_name'])
-		assert len(l)>0,'list of headers in header_name is empty'
+		assert len(l),'list of headers in header_name is empty'
 		kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM
 		if not'uselib_store'in kw:
 			kw['uselib_store']=l[0].upper()
@@ -298,7 +298,7 @@ def validate_c(self,kw):
 			kw['msg']='Checking for code snippet'
 		if not'errmsg'in kw:
 			kw['errmsg']='no'
-	for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]:
+	for(flagsname,flagstype)in(('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')):
 		if flagsname in kw:
 			if not'msg'in kw:
 				kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
@@ -308,6 +308,7 @@ def validate_c(self,kw):
 		kw['execute']=False
 	if kw['execute']:
 		kw['features'].append('test_exec')
+		kw['chmod']=Utils.O755
 	if not'errmsg'in kw:
 		kw['errmsg']='not found'
 	if not'okmsg'in kw:
@@ -316,158 +317,145 @@ def validate_c(self,kw):
 		kw['code']=SNIP_EMPTY_PROGRAM
 	if self.env[INCKEYS]:
 		kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code']
-	if not kw.get('success'):kw['success']=None
+	if kw.get('merge_config_header')or env.merge_config_header:
+		kw['code']='%s\n\n%s'%(self.get_config_header(),kw['code'])
+		env.DEFINES=[]
+	if not kw.get('success'):
+		kw['success']=None
 	if'define_name'in kw:
 		self.undefine(kw['define_name'])
-	assert'msg'in kw,'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
+	if not'msg'in kw:
+		self.fatal('missing "msg" in conf.check(...)')
+@conf
 def post_check(self,*k,**kw):
 	is_success=0
 	if kw['execute']:
 		if kw['success']is not None:
-			if kw.get('define_ret',False):
+			if kw.get('define_ret'):
 				is_success=kw['success']
 			else:
 				is_success=(kw['success']==0)
 	else:
 		is_success=(kw['success']==0)
-	if'define_name'in kw:
-		if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
-			nm=kw['define_name']
-			if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str):
-				self.define(kw['define_name'],is_success,quote=kw.get('quote',1))
+	if kw.get('define_name'):
+		comment=kw.get('comment','')
+		define_name=kw['define_name']
+		if kw['execute']and kw.get('define_ret')and isinstance(is_success,str):
+			if kw.get('global_define',1):
+				self.define(define_name,is_success,quote=kw.get('quote',1),comment=comment)
 			else:
-				self.define_cond(kw['define_name'],is_success)
+				if kw.get('quote',1):
+					succ='"%s"'%is_success
+				else:
+					succ=int(is_success)
+				val='%s=%s'%(define_name,succ)
+				var='DEFINES_%s'%kw['uselib_store']
+				self.env.append_value(var,val)
 		else:
-			self.define_cond(kw['define_name'],is_success)
+			if kw.get('global_define',1):
+				self.define_cond(define_name,is_success,comment=comment)
+			else:
+				var='DEFINES_%s'%kw['uselib_store']
+				self.env.append_value(var,'%s=%s'%(define_name,int(is_success)))
+		if kw.get('add_have_to_env',1):
+			if kw.get('uselib_store'):
+				self.env[self.have_define(kw['uselib_store'])]=1
+			elif kw['execute']and kw.get('define_ret'):
+				self.env[define_name]=is_success
+			else:
+				self.env[define_name]=int(is_success)
 	if'header_name'in kw:
-		if kw.get('auto_add_header_name',False):
+		if kw.get('auto_add_header_name'):
 			self.env.append_value(INCKEYS,Utils.to_list(kw['header_name']))
 	if is_success and'uselib_store'in kw:
 		from waflib.Tools import ccroot
-		_vars=set([])
+		_vars=set()
 		for x in kw['features']:
 			if x in ccroot.USELIB_VARS:
 				_vars|=ccroot.USELIB_VARS[x]
 		for k in _vars:
-			lk=k.lower()
-			if k=='INCLUDES':lk='includes'
-			if k=='DEFINES':lk='defines'
-			if lk in kw:
-				val=kw[lk]
-				if isinstance(val,str):
-					val=val.rstrip(os.path.sep)
-				self.env.append_unique(k+'_'+kw['uselib_store'],val)
+			x=k.lower()
+			if x in kw:
+				self.env.append_value(k+'_'+kw['uselib_store'],kw[x])
 	return is_success
+@conf
 def check(self,*k,**kw):
 	self.validate_c(kw)
-	self.start_msg(kw['msg'])
+	self.start_msg(kw['msg'],**kw)
 	ret=None
 	try:
-		ret=self.run_c_code(*k,**kw)
-	except self.errors.ConfigurationError ,e:
-		self.end_msg(kw['errmsg'],'YELLOW')
+		ret=self.run_build(*k,**kw)
+	except self.errors.ConfigurationError:
+		self.end_msg(kw['errmsg'],'YELLOW',**kw)
 		if Logs.verbose>1:
 			raise
 		else:
 			self.fatal('The configuration failed')
 	else:
 		kw['success']=ret
-		self.end_msg(self.ret_msg(kw['okmsg'],kw))
 	ret=self.post_check(*k,**kw)
 	if not ret:
+		self.end_msg(kw['errmsg'],'YELLOW',**kw)
 		self.fatal('The configuration failed %r'%ret)
+	else:
+		self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
 	return ret
 class test_exec(Task.Task):
 	color='PINK'
 	def run(self):
+		cmd=[self.inputs[0].abspath()]+getattr(self.generator,'test_args',[])
 		if getattr(self.generator,'rpath',None):
 			if getattr(self.generator,'define_ret',False):
-				self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
+				self.generator.bld.retval=self.generator.bld.cmd_and_log(cmd)
 			else:
-				self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()])
+				self.generator.bld.retval=self.generator.bld.exec_command(cmd)
 		else:
 			env=self.env.env or{}
 			env.update(dict(os.environ))
 			for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'):
 				env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'')
 			if getattr(self.generator,'define_ret',False):
-				self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env)
+				self.generator.bld.retval=self.generator.bld.cmd_and_log(cmd,env=env)
 			else:
-				self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env)
+				self.generator.bld.retval=self.generator.bld.exec_command(cmd,env=env)
+@feature('test_exec')
+@after_method('apply_link')
 def test_exec_fun(self):
 	self.create_task('test_exec',self.link_task.outputs[0])
-CACHE_RESULTS=1
-COMPILE_ERRORS=2
-def run_c_code(self,*k,**kw):
-	lst=[str(v)for(p,v)in kw.items()if p!='env']
-	h=Utils.h_list(lst)
-	dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
-	try:
-		os.makedirs(dir)
-	except:
-		pass
-	try:
-		os.stat(dir)
-	except:
-		self.fatal('cannot use the configuration test folder %r'%dir)
-	cachemode=getattr(Options.options,'confcache',None)
-	if cachemode==CACHE_RESULTS:
-		try:
-			proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code'))
-			ret=proj['cache_run_c_code']
-		except:
-			pass
-		else:
-			if isinstance(ret,str)and ret.startswith('Test does not build'):
-				self.fatal(ret)
-			return ret
-	bdir=os.path.join(dir,'testbuild')
-	if not os.path.exists(bdir):
-		os.makedirs(bdir)
-	self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
-	bld.init_dirs()
-	bld.progress_bar=0
-	bld.targets='*'
-	if kw['compile_filename']:
-		node=bld.srcnode.make_node(kw['compile_filename'])
-		node.write(kw['code'])
-	bld.logger=self.logger
-	bld.all_envs.update(self.all_envs)
-	bld.env=kw['env']
-	o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog')
-	for k,v in kw.items():
-		setattr(o,k,v)
-	self.to_log("==>\n%s\n<=="%kw['code'])
-	bld.targets='*'
-	ret=-1
-	try:
-		try:
-			bld.compile()
-		except Errors.WafError:
-			ret='Test does not build: %s'%Utils.ex_stack()
-			self.fatal(ret)
-		else:
-			ret=getattr(bld,'retval',0)
-	finally:
-		proj=ConfigSet.ConfigSet()
-		proj['cache_run_c_code']=ret
-		proj.store(os.path.join(dir,'cache_run_c_code'))
-	return ret
+@conf
 def check_cxx(self,*k,**kw):
 	kw['compiler']='cxx'
 	return self.check(*k,**kw)
+@conf
 def check_cc(self,*k,**kw):
 	kw['compiler']='c'
 	return self.check(*k,**kw)
-def define(self,key,val,quote=True):
-	assert key and isinstance(key,str)
+@conf
+def set_define_comment(self,key,comment):
+	coms=self.env.DEFINE_COMMENTS
+	if not coms:
+		coms=self.env.DEFINE_COMMENTS={}
+	coms[key]=comment or''
+@conf
+def get_define_comment(self,key):
+	coms=self.env.DEFINE_COMMENTS or{}
+	return coms.get(key,'')
+@conf
+def define(self,key,val,quote=True,comment=''):
+	assert isinstance(key,str)
+	if not key:
+		return
+	if val is True:
+		val=1
+	elif val in(False,None):
+		val=0
 	if isinstance(val,int)or isinstance(val,float):
 		s='%s=%s'
 	else:
 		s=quote and'%s="%s"'or'%s=%s'
 	app=s%(key,str(val))
 	ban=key+'='
-	lst=self.env['DEFINES']
+	lst=self.env.DEFINES
 	for x in lst:
 		if x.startswith(ban):
 			lst[lst.index(x)]=app
@@ -475,102 +463,130 @@ def define(self,key,val,quote=True):
 	else:
 		self.env.append_value('DEFINES',app)
 	self.env.append_unique(DEFKEYS,key)
-def undefine(self,key):
-	assert key and isinstance(key,str)
+	self.set_define_comment(key,comment)
+@conf
+def undefine(self,key,comment=''):
+	assert isinstance(key,str)
+	if not key:
+		return
 	ban=key+'='
-	lst=[x for x in self.env['DEFINES']if not x.startswith(ban)]
-	self.env['DEFINES']=lst
+	lst=[x for x in self.env.DEFINES if not x.startswith(ban)]
+	self.env.DEFINES=lst
 	self.env.append_unique(DEFKEYS,key)
-def define_cond(self,key,val):
-	assert key and isinstance(key,str)
+	self.set_define_comment(key,comment)
+@conf
+def define_cond(self,key,val,comment=''):
+	assert isinstance(key,str)
+	if not key:
+		return
 	if val:
-		self.define(key,1)
+		self.define(key,1,comment=comment)
 	else:
-		self.undefine(key)
+		self.undefine(key,comment=comment)
+@conf
 def is_defined(self,key):
 	assert key and isinstance(key,str)
 	ban=key+'='
-	for x in self.env['DEFINES']:
+	for x in self.env.DEFINES:
 		if x.startswith(ban):
 			return True
 	return False
+@conf
 def get_define(self,key):
 	assert key and isinstance(key,str)
 	ban=key+'='
-	for x in self.env['DEFINES']:
+	for x in self.env.DEFINES:
 		if x.startswith(ban):
 			return x[len(ban):]
 	return None
+@conf
 def have_define(self,key):
-	return self.__dict__.get('HAVE_PAT','HAVE_%s')%Utils.quote_define_name(key)
-def write_config_header(self,configfile='',guard='',top=False,env=None,defines=True,headers=False,remove=True):
-	if not configfile:configfile=WAF_CONFIG_H
-	waf_guard=guard or'_%s_WAF'%Utils.quote_define_name(configfile)
+	return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key)
+@conf
+def write_config_header(self,configfile='',guard='',top=False,defines=True,headers=False,remove=True,define_prefix=''):
+	if not configfile:
+		configfile=WAF_CONFIG_H
+	waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile)
 	node=top and self.bldnode or self.path.get_bld()
 	node=node.make_node(configfile)
 	node.parent.mkdir()
 	lst=['/* WARNING! All changes made to this file will be lost! */\n']
 	lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard))
-	lst.append(self.get_config_header(defines,headers))
+	lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix))
 	lst.append('\n#endif /* %s */\n'%waf_guard)
 	node.write('\n'.join(lst))
-	env=env or self.env
-	env.append_unique(Build.CFG_FILES,[node.abspath()])
+	self.env.append_unique(Build.CFG_FILES,[node.abspath()])
 	if remove:
 		for key in self.env[DEFKEYS]:
 			self.undefine(key)
 		self.env[DEFKEYS]=[]
-def get_config_header(self,defines=True,headers=False):
+@conf
+def get_config_header(self,defines=True,headers=False,define_prefix=''):
 	lst=[]
+	if self.env.WAF_CONFIG_H_PRELUDE:
+		lst.append(self.env.WAF_CONFIG_H_PRELUDE)
 	if headers:
 		for x in self.env[INCKEYS]:
 			lst.append('#include <%s>'%x)
 	if defines:
-		for x in self.env[DEFKEYS]:
-			if self.is_defined(x):
-				val=self.get_define(x)
-				lst.append('#define %s %s'%(x,val))
-			else:
-				lst.append('/* #undef %s */'%x)
+		tbl={}
+		for k in self.env.DEFINES:
+			a,_,b=k.partition('=')
+			tbl[a]=b
+		for k in self.env[DEFKEYS]:
+			caption=self.get_define_comment(k)
+			if caption:
+				caption=' /* %s */'%caption
+			try:
+				txt='#define %s%s %s%s'%(define_prefix,k,tbl[k],caption)
+			except KeyError:
+				txt='/* #undef %s%s */%s'%(define_prefix,k,caption)
+			lst.append(txt)
 	return"\n".join(lst)
+@conf
 def cc_add_flags(conf):
-	conf.add_os_flags('CPPFLAGS','CFLAGS')
-	conf.add_os_flags('CFLAGS')
+	conf.add_os_flags('CPPFLAGS',dup=False)
+	conf.add_os_flags('CFLAGS',dup=False)
+@conf
 def cxx_add_flags(conf):
-	conf.add_os_flags('CPPFLAGS','CXXFLAGS')
-	conf.add_os_flags('CXXFLAGS')
+	conf.add_os_flags('CPPFLAGS',dup=False)
+	conf.add_os_flags('CXXFLAGS',dup=False)
+@conf
 def link_add_flags(conf):
-	conf.add_os_flags('LINKFLAGS')
-	conf.add_os_flags('LDFLAGS','LINKFLAGS')
+	conf.add_os_flags('LINKFLAGS',dup=False)
+	conf.add_os_flags('LDFLAGS',dup=False)
+@conf
 def cc_load_tools(conf):
 	if not conf.env.DEST_OS:
 		conf.env.DEST_OS=Utils.unversioned_sys_platform()
 	conf.load('c')
+@conf
 def cxx_load_tools(conf):
 	if not conf.env.DEST_OS:
 		conf.env.DEST_OS=Utils.unversioned_sys_platform()
 	conf.load('cxx')
-def get_cc_version(conf,cc,gcc=False,icc=False):
+@conf
+def get_cc_version(conf,cc,gcc=False,icc=False,clang=False):
 	cmd=cc+['-dM','-E','-']
 	env=conf.env.env or None
 	try:
-		p=Utils.subprocess.Popen(cmd,stdin=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env)
-		p.stdin.write('\n')
-		out=p.communicate()[0]
-	except:
+		out,err=conf.cmd_and_log(cmd,output=0,input='\n'.encode(),env=env)
+	except Errors.WafError:
 		conf.fatal('Could not determine the compiler version %r'%cmd)
-	if not isinstance(out,str):
-		out=out.decode(sys.stdout.encoding)
 	if gcc:
 		if out.find('__INTEL_COMPILER')>=0:
 			conf.fatal('The intel compiler pretends to be gcc')
-		if out.find('__GNUC__')<0:
+		if out.find('__GNUC__')<0 and out.find('__clang__')<0:
 			conf.fatal('Could not determine the compiler type')
 	if icc and out.find('__INTEL_COMPILER')<0:
 		conf.fatal('Not icc/icpc')
+	if clang and out.find('__clang__')<0:
+		conf.fatal('Not clang/clang++')
+	if not clang and out.find('__clang__')>=0:
+		conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure')
 	k={}
-	if icc or gcc:
-		out=out.split('\n')
+	if icc or gcc or clang:
+		out=out.splitlines()
 		for line in out:
 			lst=shlex.split(line)
 			if len(lst)>2:
@@ -579,8 +595,6 @@ def get_cc_version(conf,cc,gcc=False,icc
 				k[key]=val
 		def isD(var):
 			return var in k
-		def isT(var):
-			return var in k and k[var]!='0'
 		if not conf.env.DEST_OS:
 			conf.env.DEST_OS=''
 		for i in MACRO_TO_DESTOS:
@@ -594,9 +608,11 @@ def get_cc_version(conf,cc,gcc=False,icc
 				conf.env.DEST_OS='generic'
 		if isD('__ELF__'):
 			conf.env.DEST_BINFMT='elf'
-		elif isD('__WINNT__')or isD('__CYGWIN__'):
+		elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'):
 			conf.env.DEST_BINFMT='pe'
-			conf.env.LIBDIR=conf.env['PREFIX']+'/bin'
+			if not conf.env.IMPLIBDIR:
+				conf.env.IMPLIBDIR=conf.env.LIBDIR
+			conf.env.LIBDIR=conf.env.BINDIR
 		elif isD('__APPLE__'):
 			conf.env.DEST_BINFMT='mac-o'
 		if not conf.env.DEST_BINFMT:
@@ -608,31 +624,67 @@ def get_cc_version(conf,cc,gcc=False,icc
 		Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')]))
 		if icc:
 			ver=k['__INTEL_COMPILER']
-			conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1])
+			conf.env.CC_VERSION=(ver[:-2],ver[-2],ver[-1])
 		else:
-			conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
+			if isD('__clang__')and isD('__clang_major__'):
+				conf.env.CC_VERSION=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__'])
+			else:
+				conf.env.CC_VERSION=(k['__GNUC__'],k['__GNUC_MINOR__'],k.get('__GNUC_PATCHLEVEL__','0'))
 	return k
+@conf
 def get_xlc_version(conf,cc):
-	version_re=re.compile(r"IBM XL C/C\+\+.*, V(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
 	cmd=cc+['-qversion']
 	try:
 		out,err=conf.cmd_and_log(cmd,output=0)
 	except Errors.WafError:
 		conf.fatal('Could not find xlc %r'%cmd)
-	if out:match=version_re(out)
-	else:match=version_re(err)
-	if not match:
+	for v in(r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+		version_re=re.compile(v,re.I).search
+		match=version_re(out or err)
+		if match:
+			k=match.groupdict()
+			conf.env.CC_VERSION=(k['major'],k['minor'])
+			break
+	else:
 		conf.fatal('Could not determine the XLC version.')
-	k=match.groupdict()
-	conf.env['CC_VERSION']=(k['major'],k['minor'])
+@conf
+def get_suncc_version(conf,cc):
+	cmd=cc+['-V']
+	try:
+		out,err=conf.cmd_and_log(cmd,output=0)
+	except Errors.WafError as e:
+		if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')):
+			conf.fatal('Could not find suncc %r'%cmd)
+		out=e.stdout
+		err=e.stderr
+	version=(out or err)
+	version=version.splitlines()[0]
+	version_re=re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)',re.I).search
+	match=version_re(version)
+	if match:
+		k=match.groupdict()
+		conf.env.CC_VERSION=(k['major'],k['minor'])
+	else:
+		conf.fatal('Could not determine the suncc version.')
+@conf
 def add_as_needed(self):
 	if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME):
-		self.env.append_unique('LINKFLAGS','--as-needed')
-class cfgtask(Task.TaskBase):
+		self.env.append_unique('LINKFLAGS','-Wl,--as-needed')
+class cfgtask(Task.Task):
+	def __init__(self,*k,**kw):
+		Task.Task.__init__(self,*k,**kw)
+		self.run_after=set()
 	def display(self):
 		return''
 	def runnable_status(self):
+		for x in self.run_after:
+			if not x.hasrun:
+				return Task.ASK_LATER
 		return Task.RUN_ME
+	def uid(self):
+		return Utils.SIG_NIL
+	def signature(self):
+		return Utils.SIG_NIL
 	def run(self):
 		conf=self.conf
 		bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath())
@@ -640,74 +692,115 @@ class cfgtask(Task.TaskBase):
 		bld.init_dirs()
 		bld.in_msg=1
 		bld.logger=self.logger
+		bld.multicheck_task=self
+		args=self.args
 		try:
-			bld.check(**self.args)
-		except:
+			if'func'in args:
+				bld.test(build_fun=args['func'],msg=args.get('msg',''),okmsg=args.get('okmsg',''),errmsg=args.get('errmsg',''),)
+			else:
+				args['multicheck_mandatory']=args.get('mandatory',True)
+				args['mandatory']=True
+				try:
+					bld.check(**args)
+				finally:
+					args['mandatory']=args['multicheck_mandatory']
+		except Exception:
 			return 1
+	def process(self):
+		Task.Task.process(self)
+		if'msg'in self.args:
+			with self.generator.bld.multicheck_lock:
+				self.conf.start_msg(self.args['msg'])
+				if self.hasrun==Task.NOT_RUN:
+					self.conf.end_msg('test cancelled','YELLOW')
+				elif self.hasrun!=Task.SUCCESS:
+					self.conf.end_msg(self.args.get('errmsg','no'),'YELLOW')
+				else:
+					self.conf.end_msg(self.args.get('okmsg','yes'),'GREEN')
+@conf
 def multicheck(self,*k,**kw):
-	self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)))
+	self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw)
+	for var in('DEFINES',DEFKEYS):
+		self.env.append_value(var,[])
+	self.env.DEFINE_COMMENTS=self.env.DEFINE_COMMENTS or{}
 	class par(object):
 		def __init__(self):
 			self.keep=False
-			self.cache_global=Options.cache_global
-			self.nocache=Options.options.nocache
-			self.returned_tasks=[]
+			self.task_sigs={}
+			self.progress_bar=0
 		def total(self):
 			return len(tasks)
 		def to_log(self,*k,**kw):
 			return
 	bld=par()
+	bld.keep=kw.get('run_all_tests',True)
+	bld.imp_sigs={}
 	tasks=[]
+	id_to_task={}
 	for dct in k:
-		x=cfgtask(bld=bld)
+		x=Task.classes['cfgtask'](bld=bld,env=None)
 		tasks.append(x)
 		x.args=dct
 		x.bld=bld
 		x.conf=self
 		x.args=dct
 		x.logger=Logs.make_mem_logger(str(id(x)),self.logger)
+		if'id'in dct:
+			id_to_task[dct['id']]=x
+	for x in tasks:
+		for key in Utils.to_list(x.args.get('before_tests',[])):
+			tsk=id_to_task[key]
+			if not tsk:
+				raise ValueError('No test named %r'%key)
+			tsk.run_after.add(x)
+		for key in Utils.to_list(x.args.get('after_tests',[])):
+			tsk=id_to_task[key]
+			if not tsk:
+				raise ValueError('No test named %r'%key)
+			x.run_after.add(tsk)
 	def it():
 		yield tasks
 		while 1:
 			yield[]
-	p=Runner.Parallel(bld,Options.options.jobs)
+	bld.producer=p=Runner.Parallel(bld,Options.options.jobs)
+	bld.multicheck_lock=Utils.threading.Lock()
 	p.biter=it()
+	self.end_msg('started')
 	p.start()
 	for x in tasks:
 		x.logger.memhandler.flush()
+	self.start_msg('-> processing test results')
+	if p.error:
+		for x in p.error:
+			if getattr(x,'err_msg',None):
+				self.to_log(x.err_msg)
+				self.end_msg('fail',color='RED')
+				raise Errors.WafError('There is an error in the library, read config.log for more information')
+	failure_count=0
+	for x in tasks:
+		if x.hasrun not in(Task.SUCCESS,Task.NOT_RUN):
+			failure_count+=1
+	if failure_count:
+		self.end_msg(kw.get('errmsg','%s test failed'%failure_count),color='YELLOW',**kw)
+	else:
+		self.end_msg('all ok',**kw)
 	for x in tasks:
 		if x.hasrun!=Task.SUCCESS:
-			self.end_msg(kw.get('errmsg','no'),color='YELLOW')
-			self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, see the config.log for more information')
-	self.end_msg('ok')
-
-conf(parse_flags)
-conf(ret_msg)
-conf(validate_cfg)
-conf(exec_cfg)
-conf(check_cfg)
-conf(validate_c)
-conf(post_check)
-conf(check)
-feature('test_exec')(test_exec_fun)
-after_method('apply_link')(test_exec_fun)
-conf(run_c_code)
-conf(check_cxx)
-conf(check_cc)
-conf(define)
-conf(undefine)
-conf(define_cond)
-conf(is_defined)
-conf(get_define)
-conf(have_define)
-conf(write_config_header)
-conf(get_config_header)
-conf(cc_add_flags)
-conf(cxx_add_flags)
-conf(link_add_flags)
-conf(cc_load_tools)
-conf(cxx_load_tools)
-conf(get_cc_version)
-conf(get_xlc_version)
-conf(add_as_needed)
-conf(multicheck)
\ No newline at end of file
+			if x.args.get('mandatory',True):
+				self.fatal(kw.get('fatalmsg')or'One of the tests has failed, read config.log for more information')
+@conf
+def check_gcc_o_space(self,mode='c'):
+	if int(self.env.CC_VERSION[0])>4:
+		return
+	self.env.stash()
+	if mode=='c':
+		self.env.CCLNK_TGT_F=['-o','']
+	elif mode=='cxx':
+		self.env.CXXLNK_TGT_F=['-o','']
+	features='%s %sshlib'%(mode,mode)
+	try:
+		self.check(msg='Checking if the -o link must be split from arguments',fragment=SNIP_EMPTY_PROGRAM,features=features)
+	except self.errors.ConfigurationError:
+		self.env.revert()
+	else:
+		self.env.commit()
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/c_osx.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/c_osx.py
@@ -1,9 +1,9 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,shutil,sys,platform
-from waflib import TaskGen,Task,Build,Options,Utils,Errors
+import os,shutil,platform
+from waflib import Task,Utils
 from waflib.TaskGen import taskgen_method,feature,after_method,before_method
 app_info='''
 <?xml version="1.0" encoding="UTF-8"?>
@@ -19,18 +19,19 @@ app_info='''
 	<key>NOTE</key>
 	<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
 	<key>CFBundleExecutable</key>
-	<string>%s</string>
+	<string>{app_name}</string>
 </dict>
 </plist>
 '''
+@feature('c','cxx')
 def set_macosx_deployment_target(self):
-	if self.env['MACOSX_DEPLOYMENT_TARGET']:
-		os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET']
+	if self.env.MACOSX_DEPLOYMENT_TARGET:
+		os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env.MACOSX_DEPLOYMENT_TARGET
 	elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ:
 		if Utils.unversioned_sys_platform()=='darwin':
 			os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2])
+@taskgen_method
 def create_bundle_dirs(self,name,out):
-	bld=self.bld
 	dir=out.parent.find_or_declare(name)
 	dir.mkdir()
 	macos=dir.find_or_declare(['Contents','MacOS'])
@@ -44,40 +45,44 @@ def bundle_name_for_output(out):
 	else:
 		name=name+'.app'
 	return name
+@feature('cprogram','cxxprogram')
+@after_method('apply_link')
 def create_task_macapp(self):
-	if self.env['MACAPP']or getattr(self,'mac_app',False):
+	if self.env.MACAPP or getattr(self,'mac_app',False):
 		out=self.link_task.outputs[0]
 		name=bundle_name_for_output(out)
 		dir=self.create_bundle_dirs(name,out)
 		n1=dir.find_or_declare(['Contents','MacOS',out.name])
 		self.apptask=self.create_task('macapp',self.link_task.outputs,n1)
 		inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name
-		self.bld.install_files(inst_to,n1,chmod=Utils.O755)
-		if getattr(self,'mac_resources',None):
+		self.add_install_files(install_to=inst_to,install_from=n1,chmod=Utils.O755)
+		if getattr(self,'mac_files',None):
+			mac_files_root=getattr(self,'mac_files_root',None)
+			if isinstance(mac_files_root,str):
+				mac_files_root=self.path.find_node(mac_files_root)
+				if not mac_files_root:
+					self.bld.fatal('Invalid mac_files_root %r'%self.mac_files_root)
 			res_dir=n1.parent.parent.make_node('Resources')
 			inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name
-			for x in self.to_list(self.mac_resources):
-				node=self.path.find_node(x)
-				if not node:
-					raise Errors.WafError('Missing mac_resource %r in %r'%(x,self))
-				parent=node.parent
-				if os.path.isdir(node.abspath()):
-					nodes=node.ant_glob('**')
-				else:
-					nodes=[node]
-				for node in nodes:
-					rel=node.path_from(parent)
-					tsk=self.create_task('macapp',node,res_dir.make_node(rel))
-					self.bld.install_as(inst_to+'/%s'%rel,node)
+			for node in self.to_nodes(self.mac_files):
+				relpath=node.path_from(mac_files_root or node.parent)
+				self.create_task('macapp',node,res_dir.make_node(relpath))
+				self.add_install_as(install_to=os.path.join(inst_to,relpath),install_from=node)
 		if getattr(self.bld,'is_install',None):
 			self.install_task.hasrun=Task.SKIP_ME
+@feature('cprogram','cxxprogram')
+@after_method('apply_link')
 def create_task_macplist(self):
-	if self.env['MACAPP']or getattr(self,'mac_app',False):
+	if self.env.MACAPP or getattr(self,'mac_app',False):
 		out=self.link_task.outputs[0]
 		name=bundle_name_for_output(out)
 		dir=self.create_bundle_dirs(name,out)
 		n1=dir.find_or_declare(['Contents','Info.plist'])
 		self.plisttask=plisttask=self.create_task('macplist',[],n1)
+		plisttask.context={'app_name':self.link_task.outputs[0].name,'env':self.env}
+		plist_ctx=getattr(self,'plist_context',None)
+		if(plist_ctx):
+			plisttask.context.update(plist_ctx)
 		if getattr(self,'mac_plist',False):
 			node=self.path.find_resource(self.mac_plist)
 			if node:
@@ -85,13 +90,15 @@ def create_task_macplist(self):
 			else:
 				plisttask.code=self.mac_plist
 		else:
-			plisttask.code=app_info%self.link_task.outputs[0].name
+			plisttask.code=app_info
 		inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name
-		self.bld.install_files(inst_to,n1)
+		self.add_install_files(install_to=inst_to,install_from=n1)
+@feature('cshlib','cxxshlib')
+@before_method('apply_link','propagate_uselib_vars')
 def apply_bundle(self):
-	if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False):
-		self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[]
-		self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN']
+	if self.env.MACBUNDLE or getattr(self,'mac_bundle',False):
+		self.env.LINKFLAGS_cshlib=self.env.LINKFLAGS_cxxshlib=[]
+		self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN
 		use=self.use=self.to_list(getattr(self,'use',[]))
 		if not'MACBUNDLE'in use:
 			use.append('MACBUNDLE')
@@ -109,13 +116,6 @@ class macplist(Task.Task):
 			txt=self.code
 		else:
 			txt=self.inputs[0].read()
+		context=getattr(self,'context',{})
+		txt=txt.format(**context)
 		self.outputs[0].write(txt)
-
-feature('c','cxx')(set_macosx_deployment_target)
-taskgen_method(create_bundle_dirs)
-feature('cprogram','cxxprogram')(create_task_macapp)
-after_method('apply_link')(create_task_macapp)
-feature('cprogram','cxxprogram')(create_task_macplist)
-after_method('apply_link')(create_task_macplist)
-feature('cshlib','cxxshlib')(apply_bundle)
-before_method('apply_link','propagate_uselib_vars')(apply_bundle)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/c_preproc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/c_preproc.py
@@ -1,29 +1,28 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-import re,sys,os,string,traceback
-from waflib import Logs,Build,Utils,Errors
-from waflib.Logs import debug,error
+import re,string,traceback
+from waflib import Logs,Utils,Errors
 class PreprocError(Errors.WafError):
 	pass
+FILE_CACHE_SIZE=100000
+LINE_CACHE_SIZE=100000
 POPFILE='-'
 recursion_limit=150
 go_absolute=False
-standard_includes=['/usr/include']
+standard_includes=['/usr/local/include','/usr/include']
 if Utils.is_win32:
 	standard_includes=[]
 use_trigraphs=0
 strict_quotes=0
-g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
-re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
-re_mac=re.compile("^[a-zA-Z_]\w*")
+g_optrans={'not':'!','not_eq':'!','and':'&&','and_eq':'&=','or':'||','or_eq':'|=','xor':'^','xor_eq':'^=','bitand':'&','bitor':'|','compl':'~',}
+re_lines=re.compile('^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
+re_mac=re.compile(r"^[a-zA-Z_]\w*")
 re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
-re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
+re_pragma_once=re.compile(r'^\s*once\s*',re.IGNORECASE)
 re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
-re_cpp=re.compile(r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",re.MULTILINE)
+re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE)
 trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
 chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
 NUM='i'
@@ -39,57 +38,69 @@ ignored='i'
 undefined='u'
 skipped='s'
 def repl(m):
-	s=m.group(1)
-	if s:
+	s=m.group()
+	if s[0]=='/':
 		return' '
-	return m.group(3)or''
-def filter_comments(filename):
-	code=Utils.readf(filename)
-	if use_trigraphs:
-		for(a,b)in trig_def:code=code.split(a).join(b)
-	code=re_nl.sub('',code)
-	code=re_cpp.sub(repl,code)
-	return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
+	return s
 prec={}
 ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
-for x in range(len(ops)):
-	syms=ops[x]
+for x,syms in enumerate(ops):
 	for u in syms.split():
 		prec[u]=x
-def trimquotes(s):
-	if not s:return''
-	s=s.rstrip()
-	if s[0]=="'"and s[-1]=="'":return s[1:-1]
-	return s
 def reduce_nums(val_1,val_2,val_op):
-	try:a=0+val_1
-	except TypeError:a=int(val_1)
-	try:b=0+val_2
-	except TypeError:b=int(val_2)
+	try:
+		a=0+val_1
+	except TypeError:
+		a=int(val_1)
+	try:
+		b=0+val_2
+	except TypeError:
+		b=int(val_2)
 	d=val_op
-	if d=='%':c=a%b
-	elif d=='+':c=a+b
-	elif d=='-':c=a-b
-	elif d=='*':c=a*b
-	elif d=='/':c=a/b
-	elif d=='^':c=a^b
-	elif d=='|':c=a|b
-	elif d=='||':c=int(a or b)
-	elif d=='&':c=a&b
-	elif d=='&&':c=int(a and b)
-	elif d=='==':c=int(a==b)
-	elif d=='!=':c=int(a!=b)
-	elif d=='<=':c=int(a<=b)
-	elif d=='<':c=int(a<b)
-	elif d=='>':c=int(a>b)
-	elif d=='>=':c=int(a>=b)
-	elif d=='^':c=int(a^b)
-	elif d=='<<':c=a<<b
-	elif d=='>>':c=a>>b
-	else:c=0
+	if d=='%':
+		c=a%b
+	elif d=='+':
+		c=a+b
+	elif d=='-':
+		c=a-b
+	elif d=='*':
+		c=a*b
+	elif d=='/':
+		c=a/b
+	elif d=='^':
+		c=a^b
+	elif d=='==':
+		c=int(a==b)
+	elif d=='|'or d=='bitor':
+		c=a|b
+	elif d=='||'or d=='or':
+		c=int(a or b)
+	elif d=='&'or d=='bitand':
+		c=a&b
+	elif d=='&&'or d=='and':
+		c=int(a and b)
+	elif d=='!='or d=='not_eq':
+		c=int(a!=b)
+	elif d=='^'or d=='xor':
+		c=int(a^b)
+	elif d=='<=':
+		c=int(a<=b)
+	elif d=='<':
+		c=int(a<b)
+	elif d=='>':
+		c=int(a>b)
+	elif d=='>=':
+		c=int(a>=b)
+	elif d=='<<':
+		c=a<<b
+	elif d=='>>':
+		c=a>>b
+	else:
+		c=0
 	return c
 def get_num(lst):
-	if not lst:raise PreprocError("empty list for get_num")
+	if not lst:
+		raise PreprocError('empty list for get_num')
 	(p,v)=lst[0]
 	if p==OP:
 		if v=='(':
@@ -106,7 +117,7 @@ def get_num(lst):
 						count_par+=1
 				i+=1
 			else:
-				raise PreprocError("rparen expected %r"%lst)
+				raise PreprocError('rparen expected %r'%lst)
 			(num,_)=get_term(lst[1:i])
 			return(num,lst[i+1:])
 		elif v=='+':
@@ -118,27 +129,25 @@ def get_num(lst):
 			num,lst=get_num(lst[1:])
 			return(int(not int(num)),lst)
 		elif v=='~':
+			num,lst=get_num(lst[1:])
 			return(~int(num),lst)
 		else:
-			raise PreprocError("Invalid op token %r for get_num"%lst)
+			raise PreprocError('Invalid op token %r for get_num'%lst)
 	elif p==NUM:
 		return v,lst[1:]
 	elif p==IDENT:
 		return 0,lst[1:]
 	else:
-		raise PreprocError("Invalid token %r for get_num"%lst)
+		raise PreprocError('Invalid token %r for get_num'%lst)
 def get_term(lst):
-	if not lst:raise PreprocError("empty list for get_term")
+	if not lst:
+		raise PreprocError('empty list for get_term')
 	num,lst=get_num(lst)
 	if not lst:
 		return(num,[])
 	(p,v)=lst[0]
 	if p==OP:
-		if v=='&&'and not num:
-			return(num,[])
-		elif v=='||'and num:
-			return(num,[])
-		elif v==',':
+		if v==',':
 			return get_term(lst[1:])
 		elif v=='?':
 			count_par=0
@@ -155,7 +164,7 @@ def get_term(lst):
 							break
 				i+=1
 			else:
-				raise PreprocError("rparen expected %r"%lst)
+				raise PreprocError('rparen expected %r'%lst)
 			if int(num):
 				return get_term(lst[1:i])
 			else:
@@ -167,7 +176,7 @@ def get_term(lst):
 				return get_term([(NUM,num2)]+lst)
 			p2,v2=lst[0]
 			if p2!=OP:
-				raise PreprocError("op expected %r"%lst)
+				raise PreprocError('op expected %r'%lst)
 			if prec[v2]>=prec[v]:
 				num2=reduce_nums(num,num2,v)
 				return get_term([(NUM,num2)]+lst)
@@ -175,7 +184,7 @@ def get_term(lst):
 				num3,lst=get_num(lst[1:])
 				num3=reduce_nums(num2,num3,v2)
 				return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
-	raise PreprocError("cannot reduce %r"%lst)
+	raise PreprocError('cannot reduce %r'%lst)
 def reduce_eval(lst):
 	num,lst=get_term(lst)
 	return(NUM,num)
@@ -215,7 +224,7 @@ def reduce_tokens(lst,defs,ban=[]):
 					else:
 						lst[i]=(NUM,0)
 				else:
-					raise PreprocError("Invalid define expression %r"%lst)
+					raise PreprocError('Invalid define expression %r'%lst)
 		elif p==IDENT and v in defs:
 			if isinstance(defs[v],str):
 				a,b=extract_macro(defs[v])
@@ -224,17 +233,19 @@ def reduce_tokens(lst,defs,ban=[]):
 			to_add=macro_def[1]
 			if isinstance(macro_def[0],list):
 				del lst[i]
-				for x in range(len(to_add)):
-					lst.insert(i,to_add[x])
+				accu=to_add[:]
+				reduce_tokens(accu,defs,ban+[v])
+				for tmp in accu:
+					lst.insert(i,tmp)
 					i+=1
 			else:
 				args=[]
 				del lst[i]
 				if i>=len(lst):
-					raise PreprocError("expected '(' after %r (got nothing)"%v)
+					raise PreprocError('expected ( after %r (got nothing)'%v)
 				(p2,v2)=lst[i]
 				if p2!=OP or v2!='(':
-					raise PreprocError("expected '(' after %r"%v)
+					raise PreprocError('expected ( after %r'%v)
 				del lst[i]
 				one_param=[]
 				count_paren=0
@@ -246,18 +257,22 @@ def reduce_tokens(lst,defs,ban=[]):
 							one_param.append((p2,v2))
 							count_paren+=1
 						elif v2==')':
-							if one_param:args.append(one_param)
+							if one_param:
+								args.append(one_param)
 							break
 						elif v2==',':
-							if not one_param:raise PreprocError("empty param in funcall %s"%p)
+							if not one_param:
+								raise PreprocError('empty param in funcall %r'%v)
 							args.append(one_param)
 							one_param=[]
 						else:
 							one_param.append((p2,v2))
 					else:
 						one_param.append((p2,v2))
-						if v2=='(':count_paren+=1
-						elif v2==')':count_paren-=1
+						if v2=='(':
+							count_paren+=1
+						elif v2==')':
+							count_paren-=1
 				else:
 					raise PreprocError('malformed macro')
 				accu=[]
@@ -290,7 +305,8 @@ def reduce_tokens(lst,defs,ban=[]):
 								for x in args[pt-st+1:]:
 									va_toks.extend(x)
 									va_toks.append((OP,','))
-								if va_toks:va_toks.pop()
+								if va_toks:
+									va_toks.pop()
 								if len(accu)>1:
 									(p3,v3)=accu[-1]
 									(p4,v4)=accu[-2]
@@ -317,15 +333,21 @@ def reduce_tokens(lst,defs,ban=[]):
 		i+=1
 def eval_macro(lst,defs):
 	reduce_tokens(lst,defs,[])
-	if not lst:raise PreprocError("missing tokens to evaluate")
-	(p,v)=reduce_eval(lst)
+	if not lst:
+		raise PreprocError('missing tokens to evaluate')
+	if lst:
+		p,v=lst[0]
+		if p==IDENT and v not in defs:
+			raise PreprocError('missing macro %r'%lst)
+	p,v=reduce_eval(lst)
 	return int(v)!=0
 def extract_macro(txt):
 	t=tokenize(txt)
 	if re_fun.search(txt):
 		p,name=t[0]
 		p,v=t[1]
-		if p!=OP:raise PreprocError("expected open parenthesis")
+		if p!=OP:
+			raise PreprocError('expected (')
 		i=1
 		pindex=0
 		params={}
@@ -341,65 +363,76 @@ def extract_macro(txt):
 				elif p==OP and v==')':
 					break
 				else:
-					raise PreprocError("unexpected token (3)")
+					raise PreprocError('unexpected token (3)')
 			elif prev==IDENT:
 				if p==OP and v==',':
 					prev=v
 				elif p==OP and v==')':
 					break
 				else:
-					raise PreprocError("comma or ... expected")
+					raise PreprocError('comma or ... expected')
 			elif prev==',':
 				if p==IDENT:
 					params[v]=pindex
 					pindex+=1
 					prev=p
 				elif p==OP and v=='...':
-					raise PreprocError("not implemented (1)")
+					raise PreprocError('not implemented (1)')
 				else:
-					raise PreprocError("comma or ... expected (2)")
+					raise PreprocError('comma or ... expected (2)')
 			elif prev=='...':
-				raise PreprocError("not implemented (2)")
+				raise PreprocError('not implemented (2)')
 			else:
-				raise PreprocError("unexpected else")
+				raise PreprocError('unexpected else')
 		return(name,[params,t[i+1:]])
 	else:
 		(p,v)=t[0]
-		return(v,[[],t[1:]])
-re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
+		if len(t)>1:
+			return(v,[[],t[1:]])
+		else:
+			return(v,[[],[('T','')]])
+re_include=re.compile(r'^\s*(<(?:.*)>|"(?:.*)")')
 def extract_include(txt,defs):
 	m=re_include.search(txt)
 	if m:
-		if m.group('a'):return'<',m.group('a')
-		if m.group('b'):return'"',m.group('b')
+		txt=m.group(1)
+		return txt[0],txt[1:-1]
 	toks=tokenize(txt)
 	reduce_tokens(toks,defs,['waf_include'])
 	if not toks:
-		raise PreprocError("could not parse include %s"%txt)
+		raise PreprocError('could not parse include %r'%txt)
 	if len(toks)==1:
 		if toks[0][0]==STR:
 			return'"',toks[0][1]
 	else:
 		if toks[0][1]=='<'and toks[-1][1]=='>':
-			return stringize(toks).lstrip('<').rstrip('>')
-	raise PreprocError("could not parse include %s."%txt)
+			ret='<',stringize(toks).lstrip('<').rstrip('>')
+			return ret
+	raise PreprocError('could not parse include %r'%txt)
 def parse_char(txt):
-	if not txt:raise PreprocError("attempted to parse a null char")
+	if not txt:
+		raise PreprocError('attempted to parse a null char')
 	if txt[0]!='\\':
 		return ord(txt)
 	c=txt[1]
 	if c=='x':
-		if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
+		if len(txt)==4 and txt[3]in string.hexdigits:
+			return int(txt[2:],16)
 		return int(txt[2:],16)
 	elif c.isdigit():
-		if c=='0'and len(txt)==2:return 0
+		if c=='0'and len(txt)==2:
+			return 0
 		for i in 3,2,1:
 			if len(txt)>i and txt[1:1+i].isdigit():
 				return(1+i,int(txt[1:1+i],8))
 	else:
-		try:return chr_esc[c]
-		except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
+		try:
+			return chr_esc[c]
+		except KeyError:
+			raise PreprocError('could not parse char literal %r'%txt)
 def tokenize(s):
+	return tokenize_private(s)[:]
+def tokenize_private(s):
 	ret=[]
 	for match in re_clexer.finditer(s):
 		m=match.group
@@ -407,32 +440,49 @@ def tokenize(s):
 			v=m(name)
 			if v:
 				if name==IDENT:
-					try:v=g_optrans[v];name=OP
-					except KeyError:
-						if v.lower()=="true":
-							v=1
-							name=NUM
-						elif v.lower()=="false":
-							v=0
-							name=NUM
+					if v in g_optrans:
+						name=OP
+					elif v.lower()=="true":
+						v=1
+						name=NUM
+					elif v.lower()=="false":
+						v=0
+						name=NUM
 				elif name==NUM:
-					if m('oct'):v=int(v,8)
-					elif m('hex'):v=int(m('hex'),16)
-					elif m('n0'):v=m('n0')
+					if m('oct'):
+						v=int(v,8)
+					elif m('hex'):
+						v=int(m('hex'),16)
+					elif m('n0'):
+						v=m('n0')
 					else:
 						v=m('char')
-						if v:v=parse_char(v)
-						else:v=m('n2')or m('n4')
+						if v:
+							v=parse_char(v)
+						else:
+							v=m('n2')or m('n4')
 				elif name==OP:
-					if v=='%:':v='#'
-					elif v=='%:%:':v='##'
+					if v=='%:':
+						v='#'
+					elif v=='%:%:':
+						v='##'
 				elif name==STR:
 					v=v[1:-1]
 				ret.append((name,v))
 				break
 	return ret
-def define_name(line):
-	return re_mac.match(line).group(0)
+def format_defines(lst):
+	ret=[]
+	for y in lst:
+		if y:
+			pos=y.find('=')
+			if pos==-1:
+				ret.append(y)
+			elif pos>0:
+				ret.append('%s %s'%(y[:pos],y[pos+1:]))
+			else:
+				raise ValueError('Invalid define expression %r'%y)
+	return ret
 class c_parser(object):
 	def __init__(self,nodepaths=None,defines=None):
 		self.lines=[]
@@ -447,84 +497,101 @@ class c_parser(object):
 		self.nodes=[]
 		self.names=[]
 		self.curfile=''
-		self.ban_includes=set([])
+		self.ban_includes=set()
+		self.listed=set()
 	def cached_find_resource(self,node,filename):
 		try:
-			nd=node.ctx.cache_nd
-		except:
-			nd=node.ctx.cache_nd={}
-		tup=(node,filename)
+			cache=node.ctx.preproc_cache_node
+		except AttributeError:
+			cache=node.ctx.preproc_cache_node=Utils.lru_cache(FILE_CACHE_SIZE)
+		key=(node,filename)
 		try:
-			return nd[tup]
+			return cache[key]
 		except KeyError:
 			ret=node.find_resource(filename)
 			if ret:
 				if getattr(ret,'children',None):
 					ret=None
 				elif ret.is_child_of(node.ctx.bldnode):
-					tmp=node.ctx.srcnode.search(ret.path_from(node.ctx.bldnode))
+					tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
 					if tmp and getattr(tmp,'children',None):
 						ret=None
-			nd[tup]=ret
+			cache[key]=ret
 			return ret
-	def tryfind(self,filename):
+	def tryfind(self,filename,kind='"',env=None):
+		if filename.endswith('.moc'):
+			self.names.append(filename)
+			return None
 		self.curfile=filename
-		found=self.cached_find_resource(self.currentnode_stack[-1],filename)
-		for n in self.nodepaths:
-			if found:
-				break
-			found=self.cached_find_resource(n,filename)
-		if found:
-			self.nodes.append(found)
-			if filename[-4:]!='.moc':
-				self.addlines(found)
+		found=None
+		if kind=='"':
+			if env.MSVC_VERSION:
+				for n in reversed(self.currentnode_stack):
+					found=self.cached_find_resource(n,filename)
+					if found:
+						break
+			else:
+				found=self.cached_find_resource(self.currentnode_stack[-1],filename)
+		if not found:
+			for n in self.nodepaths:
+				found=self.cached_find_resource(n,filename)
+				if found:
+					break
+		listed=self.listed
+		if found and not found in self.ban_includes:
+			if found not in listed:
+				listed.add(found)
+				self.nodes.append(found)
+			self.addlines(found)
 		else:
-			if not filename in self.names:
+			if filename not in listed:
+				listed.add(filename)
 				self.names.append(filename)
 		return found
+	def filter_comments(self,node):
+		code=node.read()
+		if use_trigraphs:
+			for(a,b)in trig_def:
+				code=code.split(a).join(b)
+		code=re_nl.sub('',code)
+		code=re_cpp.sub(repl,code)
+		return re_lines.findall(code)
+	def parse_lines(self,node):
+		try:
+			cache=node.ctx.preproc_cache_lines
+		except AttributeError:
+			cache=node.ctx.preproc_cache_lines=Utils.lru_cache(LINE_CACHE_SIZE)
+		try:
+			return cache[node]
+		except KeyError:
+			cache[node]=lines=self.filter_comments(node)
+			lines.append((POPFILE,''))
+			lines.reverse()
+			return lines
 	def addlines(self,node):
 		self.currentnode_stack.append(node.parent)
-		filepath=node.abspath()
 		self.count_files+=1
 		if self.count_files>recursion_limit:
-			raise PreprocError("recursion limit exceeded")
-		pc=self.parse_cache
-		debug('preproc: reading file %r',filepath)
+			raise PreprocError('recursion limit exceeded')
+		if Logs.verbose:
+			Logs.debug('preproc: reading file %r',node)
 		try:
-			lns=pc[filepath]
-		except KeyError:
-			pass
-		else:
-			self.lines.extend(lns)
-			return
-		try:
-			lines=filter_comments(filepath)
-			lines.append((POPFILE,''))
-			lines.reverse()
-			pc[filepath]=lines
-			self.lines.extend(lines)
-		except IOError:
-			raise PreprocError("could not read the file %s"%filepath)
+			lines=self.parse_lines(node)
+		except EnvironmentError:
+			raise PreprocError('could not read the file %r'%node)
 		except Exception:
 			if Logs.verbose>0:
-				error("parsing %s failed"%filepath)
-				traceback.print_exc()
+				Logs.error('parsing %r failed %s',node,traceback.format_exc())
+		else:
+			self.lines.extend(lines)
 	def start(self,node,env):
-		debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
-		bld=node.ctx
-		try:
-			self.parse_cache=bld.parse_cache
-		except AttributeError:
-			bld.parse_cache={}
-			self.parse_cache=bld.parse_cache
+		Logs.debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
+		self.current_file=node
 		self.addlines(node)
-		if env['DEFINES']:
-			try:
-				lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
-				lst.reverse()
-				self.lines.extend([('define',x)for x in lst])
-			except AttributeError:
-				pass
+		if env.DEFINES:
+			lst=format_defines(env.DEFINES)
+			lst.reverse()
+			self.lines.extend([('define',x)for x in lst])
 		while self.lines:
 			(token,line)=self.lines.pop()
 			if token==POPFILE:
@@ -532,8 +599,6 @@ class c_parser(object):
 				self.currentnode_stack.pop()
 				continue
 			try:
-				ve=Logs.verbose
-				if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
 				state=self.state
 				if token[:2]=='if':
 					state.append(undefined)
@@ -544,24 +609,27 @@ class c_parser(object):
 						continue
 				if token=='if':
 					ret=eval_macro(tokenize(line),self.defs)
-					if ret:state[-1]=accepted
-					else:state[-1]=ignored
+					if ret:
+						state[-1]=accepted
+					else:
+						state[-1]=ignored
 				elif token=='ifdef':
 					m=re_mac.match(line)
-					if m and m.group(0)in self.defs:state[-1]=accepted
-					else:state[-1]=ignored
+					if m and m.group()in self.defs:
+						state[-1]=accepted
+					else:
+						state[-1]=ignored
 				elif token=='ifndef':
 					m=re_mac.match(line)
-					if m and m.group(0)in self.defs:state[-1]=ignored
-					else:state[-1]=accepted
+					if m and m.group()in self.defs:
+						state[-1]=ignored
+					else:
+						state[-1]=accepted
 				elif token=='include'or token=='import':
 					(kind,inc)=extract_include(line,self.defs)
-					if inc in self.ban_includes:
-						continue
-					if token=='import':self.ban_includes.add(inc)
-					if ve:debug('preproc: include found %s    (%s) ',inc,kind)
-					if kind=='"'or not strict_quotes:
-						self.tryfind(inc)
+					self.current_file=self.tryfind(inc,kind,env)
+					if token=='import':
+						self.ban_includes.add(self.current_file)
 				elif token=='elif':
 					if state[-1]==accepted:
 						state[-1]=skipped
@@ -569,38 +637,36 @@ class c_parser(object):
 						if eval_macro(tokenize(line),self.defs):
 							state[-1]=accepted
 				elif token=='else':
-					if state[-1]==accepted:state[-1]=skipped
-					elif state[-1]==ignored:state[-1]=accepted
+					if state[-1]==accepted:
+						state[-1]=skipped
+					elif state[-1]==ignored:
+						state[-1]=accepted
 				elif token=='define':
 					try:
-						self.defs[define_name(line)]=line
-					except:
-						raise PreprocError("Invalid define line %s"%line)
+						self.defs[self.define_name(line)]=line
+					except AttributeError:
+						raise PreprocError('Invalid define line %r'%line)
 				elif token=='undef':
 					m=re_mac.match(line)
-					if m and m.group(0)in self.defs:
-						self.defs.__delitem__(m.group(0))
+					if m and m.group()in self.defs:
+						self.defs.__delitem__(m.group())
 				elif token=='pragma':
 					if re_pragma_once.match(line.lower()):
-						self.ban_includes.add(self.curfile)
-			except Exception ,e:
+						self.ban_includes.add(self.current_file)
+			except Exception as e:
 				if Logs.verbose:
-					debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
+					Logs.debug('preproc: line parsing failed (%s): %s %s',e,line,traceback.format_exc())
+	def define_name(self,line):
+		return re_mac.match(line).group()
 def scan(task):
-	global go_absolute
 	try:
 		incn=task.generator.includes_nodes
 	except AttributeError:
 		raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator)
 	if go_absolute:
-		nodepaths=incn+standard_includes
+		nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes]
 	else:
 		nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
 	tmp=c_parser(nodepaths)
 	tmp.start(task.inputs[0],task.env)
-	if Logs.verbose:
-		debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names))
 	return(tmp.nodes,tmp.names)
-
-Utils.run_once(tokenize)
-Utils.run_once(define_name)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/c_tests.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/c_tests.py
@@ -1,11 +1,10 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib import Task
 from waflib.Configure import conf
 from waflib.TaskGen import feature,before_method,after_method
-import sys
 LIB_CODE='''
 #ifdef _MSC_VER
 #define testEXPORT __declspec(dllexport)
@@ -21,8 +20,13 @@ MAIN_CODE='''
 #define testEXPORT
 #endif
 testEXPORT int lib_func(void);
-int main(void) {return !(lib_func() == 9);}
+int main(int argc, char **argv) {
+	(void)argc; (void)argv;
+	return !(lib_func() == 9);
+}
 '''
+@feature('link_lib_test')
+@before_method('process_source')
 def link_lib_test_fun(self):
 	def write_test_file(task):
 		task.outputs[0].write(task.generator.code)
@@ -37,12 +41,13 @@ def link_lib_test_fun(self):
 	bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE)
 	bld(features='%sshlib'%m,source='test.'+mode,target='test')
 	bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath)
+@conf
 def check_library(self,mode=None,test_exec=True):
 	if not mode:
 		mode='c'
 		if self.env.CXX:
 			mode='cxx'
-	self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,)
+	self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec)
 INLINE_CODE='''
 typedef int foo_t;
 static %s foo_t static_foo () {return 0; }
@@ -51,6 +56,7 @@ static %s foo_t static_foo () {return 0;
 }
 '''
 INLINE_VALUES=['inline','__inline__','__inline']
+@conf
 def check_inline(self,**kw):
 	self.start_msg('Checking for inline')
 	if not'define_name'in kw:
@@ -72,7 +78,13 @@ def check_inline(self,**kw):
 				self.define('inline',x,quote=False)
 			return x
 	self.fatal('could not use inline functions')
-LARGE_FRAGMENT='#include <unistd.h>\nint main() { return !(sizeof(off_t) >= 8); }\n'
+LARGE_FRAGMENT='''#include <unistd.h>
+int main(int argc, char **argv) {
+	(void)argc; (void)argv;
+	return !(sizeof(off_t) >= 8);
+}
+'''
+@conf
 def check_large_file(self,**kw):
 	if not'define_name'in kw:
 		kw['define_name']='HAVE_LARGEFILE'
@@ -105,9 +117,15 @@ def check_large_file(self,**kw):
 		return ret
 	self.fatal('There is no support for large files')
 ENDIAN_FRAGMENT='''
+#ifdef _MSC_VER
+#define testshlib_EXPORT __declspec(dllexport)
+#else
+#define testshlib_EXPORT
+#endif
+
 short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
 short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
-int use_ascii (int i) {
+int testshlib_EXPORT use_ascii (int i) {
 	return ascii_mm[i] + ascii_ii[i];
 }
 short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
@@ -120,27 +138,21 @@ extern int foo;
 class grep_for_endianness(Task.Task):
 	color='PINK'
 	def run(self):
-		txt=self.inputs[0].read(flags='rb').decode('iso8859-1')
+		txt=self.inputs[0].read(flags='rb').decode('latin-1')
 		if txt.find('LiTTleEnDian')>-1:
 			self.generator.tmp.append('little')
 		elif txt.find('BIGenDianSyS')>-1:
 			self.generator.tmp.append('big')
 		else:
 			return-1
+@feature('grep_for_endianness')
+@after_method('apply_link')
 def grep_for_endianness_fun(self):
-	self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0])
+	self.create_task('grep_for_endianness',self.link_task.outputs[0])
+@conf
 def check_endianness(self):
 	tmp=[]
 	def check_msg(self):
 		return tmp[0]
-	self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg)
+	self.check(fragment=ENDIAN_FRAGMENT,features='c cshlib grep_for_endianness',msg='Checking for endianness',define='ENDIANNESS',tmp=tmp,okmsg=check_msg,confcache=None)
 	return tmp[0]
-
-feature('link_lib_test')(link_lib_test_fun)
-before_method('process_source')(link_lib_test_fun)
-conf(check_library)
-conf(check_inline)
-conf(check_large_file)
-feature('grep_for_endianness')(grep_for_endianness_fun)
-after_method('process_source')(grep_for_endianness_fun)
-conf(check_endianness)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/ccroot.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/ccroot.py
@@ -1,28 +1,26 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-import os,sys,re
-from waflib import TaskGen,Task,Utils,Logs,Build,Options,Node,Errors
-from waflib.Logs import error,debug,warn
+import os,re
+from waflib import Task,Utils,Node,Errors,Logs
 from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension
 from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests
 from waflib.Configure import conf
+SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib']
 USELIB_VARS=Utils.defaultdict(set)
 USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH'])
 USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH'])
 USELIB_VARS['d']=set(['INCLUDES','DFLAGS'])
-USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH'])
-USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH'])
+USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH'])
+USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS'])
+USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS'])
 USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS'])
 USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
 USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
 USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS'])
-USELIB_VARS['go']=set(['GOCFLAGS'])
-USELIB_VARS['goprogram']=set(['GOLFLAGS'])
 USELIB_VARS['asm']=set(['ASFLAGS'])
+@taskgen_method
 def create_compiled_task(self,name,node):
 	out='%s.%d.o'%(node.name,self.idx)
 	task=self.create_task(name,node,node.parent.find_or_declare(out))
@@ -31,9 +29,10 @@ def create_compiled_task(self,name,node)
 	except AttributeError:
 		self.compiled_tasks=[task]
 	return task
+@taskgen_method
 def to_incnodes(self,inlst):
 	lst=[]
-	seen=set([])
+	seen=set()
 	for x in self.to_list(inlst):
 		if x in seen or not x:
 			continue
@@ -55,36 +54,88 @@ def to_incnodes(self,inlst):
 				lst.append(p)
 				lst.append(v)
 	return lst
+@feature('c','cxx','d','asm','fc','includes')
+@after_method('propagate_uselib_vars','process_source')
 def apply_incpaths(self):
-	lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES'])
+	lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env.INCLUDES)
 	self.includes_nodes=lst
-	self.env['INCPATHS']=[x.abspath()for x in lst]
+	cwd=self.get_cwd()
+	self.env.INCPATHS=[x.path_from(cwd)for x in lst]
 class link_task(Task.Task):
 	color='YELLOW'
+	weight=3
 	inst_to=None
-	chmod=Utils.O644
+	chmod=Utils.O755
 	def add_target(self,target):
 		if isinstance(target,str):
+			base=self.generator.path
+			if target.startswith('#'):
+				target=target[1:]
+				base=self.generator.bld.bldnode
 			pattern=self.env[self.__class__.__name__+'_PATTERN']
 			if not pattern:
 				pattern='%s'
 			folder,name=os.path.split(target)
-			if self.__class__.__name__.find('shlib')>0:
-				if self.env.DEST_BINFMT=='pe'and getattr(self.generator,'vnum',None):
-					name=name+'-'+self.generator.vnum.split('.')[0]
-			tmp=folder+os.sep+pattern%name
-			target=self.generator.path.find_or_declare(tmp)
+			if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None):
+				nums=self.generator.vnum.split('.')
+				if self.env.DEST_BINFMT=='pe':
+					name=name+'-'+nums[0]
+				elif self.env.DEST_OS=='openbsd':
+					pattern='%s.%s'%(pattern,nums[0])
+					if len(nums)>=2:
+						pattern+='.%s'%nums[1]
+			if folder:
+				tmp=folder+os.sep+pattern%name
+			else:
+				tmp=pattern%name
+			target=base.find_or_declare(tmp)
 		self.set_outputs(target)
+	def exec_command(self,*k,**kw):
+		ret=super(link_task,self).exec_command(*k,**kw)
+		if not ret and self.env.DO_MANIFEST:
+			ret=self.exec_mf()
+		return ret
+	def exec_mf(self):
+		if not self.env.MT:
+			return 0
+		manifest=None
+		for out_node in self.outputs:
+			if out_node.name.endswith('.manifest'):
+				manifest=out_node.abspath()
+				break
+		else:
+			return 0
+		mode=''
+		for x in Utils.to_list(self.generator.features):
+			if x in('cprogram','cxxprogram','fcprogram','fcprogram_test'):
+				mode=1
+			elif x in('cshlib','cxxshlib','fcshlib'):
+				mode=2
+		Logs.debug('msvc: embedding manifest in mode %r',mode)
+		lst=[]+self.env.MT
+		lst.extend(Utils.to_list(self.env.MTFLAGS))
+		lst.extend(['-manifest',manifest])
+		lst.append('-outputresource:%s;%s'%(self.outputs[0].abspath(),mode))
+		return super(link_task,self).exec_command(lst)
 class stlink_task(link_task):
 	run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+	chmod=Utils.O644
 def rm_tgt(cls):
 	old=cls.run
 	def wrap(self):
-		try:os.remove(self.outputs[0].abspath())
-		except OSError:pass
+		try:
+			os.remove(self.outputs[0].abspath())
+		except OSError:
+			pass
 		return old(self)
 	setattr(cls,'run',wrap)
 rm_tgt(stlink_task)
+@feature('skip_stlib_link_deps')
+@before_method('process_use')
+def apply_skip_stlib_link_deps(self):
+	self.env.SKIP_STLIB_LINK_DEPS=True
+@feature('c','cxx','d','fc','asm')
+@after_method('process_source')
 def apply_link(self):
 	for x in self.features:
 		if x=='cprogram'and'cxx'in self.features:
@@ -103,9 +154,10 @@ def apply_link(self):
 	try:
 		inst_to=self.install_path
 	except AttributeError:
-		inst_to=self.link_task.__class__.inst_to
+		inst_to=self.link_task.inst_to
 	if inst_to:
-		self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod)
+		self.install_task=self.add_install_files(install_to=inst_to,install_from=self.link_task.outputs[:],chmod=self.link_task.chmod,task=self.link_task)
+@taskgen_method
 def use_rec(self,name,**kw):
 	if name in self.tmp_use_not or name in self.tmp_use_seen:
 		return
@@ -125,21 +177,26 @@ def use_rec(self,name,**kw):
 		y.tmp_use_var=''
 	else:
 		objects=False
-		if not isinstance(y.link_task,stlink_task):
+		if not isinstance(link_task,stlink_task):
 			stlib=False
 			y.tmp_use_var='LIB'
 		else:
 			y.tmp_use_var='STLIB'
 	p=self.tmp_use_prec
 	for x in self.to_list(getattr(y,'use',[])):
+		if self.env["STLIB_"+x]:
+			continue
 		try:
 			p[x].append(name)
-		except:
+		except KeyError:
 			p[x]=[name]
 		self.use_rec(x,objects=objects,stlib=stlib)
+@feature('c','cxx','d','use','fc')
+@before_method('apply_incpaths','propagate_uselib_vars')
+@after_method('apply_link','process_source')
 def process_use(self):
-	use_not=self.tmp_use_not=set([])
-	use_seen=self.tmp_use_seen=[]
+	use_not=self.tmp_use_not=set()
+	self.tmp_use_seen=[]
 	use_prec=self.tmp_use_prec={}
 	self.uselib=self.to_list(getattr(self,'uselib',[]))
 	self.includes=self.to_list(getattr(self,'includes',[]))
@@ -149,7 +206,7 @@ def process_use(self):
 	for x in use_not:
 		if x in use_prec:
 			del use_prec[x]
-	out=[]
+	out=self.tmp_use_sorted=[]
 	tmp=[]
 	for x in self.tmp_use_seen:
 		for k in use_prec.values():
@@ -180,26 +237,34 @@ def process_use(self):
 		y=self.bld.get_tgen_by_name(x)
 		var=y.tmp_use_var
 		if var and link_task:
-			if var=='LIB'or y.tmp_use_stlib:
+			if self.env.SKIP_STLIB_LINK_DEPS and isinstance(link_task,stlink_task):
+				pass
+			elif var=='LIB'or y.tmp_use_stlib or x in names:
 				self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]])
 				self.link_task.dep_nodes.extend(y.link_task.outputs)
-				tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
-				self.env.append_value(var+'PATH',[tmp_path])
+				tmp_path=y.link_task.outputs[0].parent.path_from(self.get_cwd())
+				self.env.append_unique(var+'PATH',[tmp_path])
 		else:
 			if y.tmp_use_objects:
 				self.add_objects_from_tgen(y)
 		if getattr(y,'export_includes',None):
-			self.includes.extend(y.to_incnodes(y.export_includes))
+			self.includes=self.includes+y.to_incnodes(y.export_includes)
+		if getattr(y,'export_defines',None):
+			self.env.append_value('DEFINES',self.to_list(y.export_defines))
 	for x in names:
 		try:
 			y=self.bld.get_tgen_by_name(x)
-		except:
+		except Errors.WafError:
 			if not self.env['STLIB_'+x]and not x in self.uselib:
 				self.uselib.append(x)
 		else:
-			for k in self.to_list(getattr(y,'uselib',[])):
+			for k in self.to_list(getattr(y,'use',[])):
 				if not self.env['STLIB_'+k]and not k in self.uselib:
 					self.uselib.append(k)
+@taskgen_method
+def accept_node_to_link(self,node):
+	return not node.name.endswith('.pdb')
+@taskgen_method
 def add_objects_from_tgen(self,tg):
 	try:
 		link_task=self.link_task
@@ -208,27 +273,33 @@ def add_objects_from_tgen(self,tg):
 	else:
 		for tsk in getattr(tg,'compiled_tasks',[]):
 			for x in tsk.outputs:
-				if x.name.endswith('.o')or x.name.endswith('.obj'):
+				if self.accept_node_to_link(x):
 					link_task.inputs.append(x)
+@taskgen_method
 def get_uselib_vars(self):
-	_vars=set([])
+	_vars=set()
 	for x in self.features:
 		if x in USELIB_VARS:
 			_vars|=USELIB_VARS[x]
 	return _vars
+@feature('c','cxx','d','fc','javac','cs','uselib','asm')
+@after_method('process_use')
 def propagate_uselib_vars(self):
 	_vars=self.get_uselib_vars()
 	env=self.env
-	for x in _vars:
-		y=x.lower()
-		env.append_unique(x,self.to_list(getattr(self,y,[])))
-	for x in self.features:
-		for var in _vars:
-			compvar='%s_%s'%(var,x)
-			env.append_value(var,env[compvar])
-	for x in self.to_list(getattr(self,'uselib',[])):
-		for v in _vars:
-			env.append_value(v,env[v+'_'+x])
+	app=env.append_value
+	feature_uselib=self.features+self.to_list(getattr(self,'uselib',[]))
+	for var in _vars:
+		y=var.lower()
+		val=getattr(self,y,[])
+		if val:
+			app(var,self.to_list(val))
+		for x in feature_uselib:
+			val=env['%s_%s'%(var,x)]
+			if val:
+				app(var,val)
+@feature('cshlib','cxxshlib','fcshlib')
+@after_method('apply_link')
 def apply_implib(self):
 	if not self.env.DEST_BINFMT=='pe':
 		return
@@ -237,58 +308,93 @@ def apply_implib(self):
 		name=self.target.name
 	else:
 		name=os.path.split(self.target)[1]
-	implib=self.env['implib_PATTERN']%name
+	implib=self.env.implib_PATTERN%name
 	implib=dll.parent.find_or_declare(implib)
-	self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath())
+	self.env.append_value('LINKFLAGS',self.env.IMPLIB_ST%implib.bldpath())
 	self.link_task.outputs.append(implib)
 	if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe':
 		node=self.path.find_resource(self.defs)
 		if not node:
 			raise Errors.WafError('invalid def file %r'%self.defs)
-		if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME):
-			self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode))
+		if self.env.def_PATTERN:
+			self.env.append_value('LINKFLAGS',self.env.def_PATTERN%node.path_from(self.get_cwd()))
 			self.link_task.dep_nodes.append(node)
 		else:
 			self.link_task.inputs.append(node)
-	try:
-		inst_to=self.install_path
-	except AttributeError:
-		inst_to=self.link_task.__class__.inst_to
-	if not inst_to:
-		return
-	self.implib_install_task=self.bld.install_as('${PREFIX}/lib/%s'%implib.name,implib,self.env)
+	if getattr(self,'install_task',None):
+		try:
+			inst_to=self.install_path_implib
+		except AttributeError:
+			try:
+				inst_to=self.install_path
+			except AttributeError:
+				inst_to='${IMPLIBDIR}'
+				self.install_task.install_to='${BINDIR}'
+				if not self.env.IMPLIBDIR:
+					self.env.IMPLIBDIR=self.env.LIBDIR
+		self.implib_install_task=self.add_install_files(install_to=inst_to,install_from=implib,chmod=self.link_task.chmod,task=self.link_task)
+re_vnum=re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$')
+@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum')
+@after_method('apply_link','propagate_uselib_vars')
 def apply_vnum(self):
 	if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'):
 		return
 	link=self.link_task
+	if not re_vnum.match(self.vnum):
+		raise Errors.WafError('Invalid vnum %r for target %r'%(self.vnum,getattr(self,'name',self)))
 	nums=self.vnum.split('.')
 	node=link.outputs[0]
+	cnum=getattr(self,'cnum',str(nums[0]))
+	cnums=cnum.split('.')
+	if len(cnums)>len(nums)or nums[0:len(cnums)]!=cnums:
+		raise Errors.WafError('invalid compatibility version %s'%cnum)
 	libname=node.name
 	if libname.endswith('.dylib'):
 		name3=libname.replace('.dylib','.%s.dylib'%self.vnum)
-		name2=libname.replace('.dylib','.%s.dylib'%nums[0])
+		name2=libname.replace('.dylib','.%s.dylib'%cnum)
 	else:
 		name3=libname+'.'+self.vnum
-		name2=libname+'.'+nums[0]
+		name2=libname+'.'+cnum
 	if self.env.SONAME_ST:
 		v=self.env.SONAME_ST%name2
 		self.env.append_value('LINKFLAGS',v.split())
-	tsk=self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)])
-	if getattr(self.bld,'is_install',None):
-		self.install_task.hasrun=Task.SKIP_ME
-		bld=self.bld
-		path=self.install_task.dest
-		t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod)
-		t2=bld.symlink_as(path+os.sep+name2,name3)
-		t3=bld.symlink_as(path+os.sep+libname,name3)
-		self.vnum_install_task=(t1,t2,t3)
-	if'-dynamiclib'in self.env['LINKFLAGS']and getattr(self,'install_task',None):
-		path=os.path.join(self.install_task.get_install_path(),self.link_task.outputs[0].name)
-		self.env.append_value('LINKFLAGS',['-install_name',path])
+	if self.env.DEST_OS!='openbsd':
+		outs=[node.parent.make_node(name3)]
+		if name2!=name3:
+			outs.append(node.parent.make_node(name2))
+		self.create_task('vnum',node,outs)
+	if getattr(self,'install_task',None):
+		self.install_task.hasrun=Task.SKIPPED
+		self.install_task.no_errcheck_out=True
+		path=self.install_task.install_to
+		if self.env.DEST_OS=='openbsd':
+			libname=self.link_task.outputs[0].name
+			t1=self.add_install_as(install_to='%s/%s'%(path,libname),install_from=node,chmod=self.link_task.chmod)
+			self.vnum_install_task=(t1,)
+		else:
+			t1=self.add_install_as(install_to=path+os.sep+name3,install_from=node,chmod=self.link_task.chmod)
+			t3=self.add_symlink_as(install_to=path+os.sep+libname,install_from=name3)
+			if name2!=name3:
+				t2=self.add_symlink_as(install_to=path+os.sep+name2,install_from=name3)
+				self.vnum_install_task=(t1,t2,t3)
+			else:
+				self.vnum_install_task=(t1,t3)
+	if'-dynamiclib'in self.env.LINKFLAGS:
+		try:
+			inst_to=self.install_path
+		except AttributeError:
+			inst_to=self.link_task.inst_to
+		if inst_to:
+			p=Utils.subst_vars(inst_to,self.env)
+			path=os.path.join(p,name2)
+			self.env.append_value('LINKFLAGS',['-install_name',path])
+			self.env.append_value('LINKFLAGS','-Wl,-compatibility_version,%s'%cnum)
+			self.env.append_value('LINKFLAGS','-Wl,-current_version,%s'%self.vnum)
 class vnum(Task.Task):
 	color='CYAN'
-	quient=True
 	ext_in=['.bin']
+	def keyword(self):
+		return'Symlinking'
 	def run(self):
 		for x in self.outputs:
 			path=x.abspath()
@@ -305,26 +411,25 @@ class fake_shlib(link_task):
 		for t in self.run_after:
 			if not t.hasrun:
 				return Task.ASK_LATER
-		for x in self.outputs:
-			x.sig=Utils.h_file(x.abspath())
 		return Task.SKIP_ME
 class fake_stlib(stlink_task):
 	def runnable_status(self):
 		for t in self.run_after:
 			if not t.hasrun:
 				return Task.ASK_LATER
-		for x in self.outputs:
-			x.sig=Utils.h_file(x.abspath())
 		return Task.SKIP_ME
-def read_shlib(self,name,paths=[]):
-	return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib')
-def read_stlib(self,name,paths=[]):
-	return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib')
-lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],}
+@conf
+def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]):
+	return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines)
+@conf
+def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]):
+	return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines)
+lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],}
+@feature('fake_lib')
 def process_lib(self):
 	node=None
 	names=[x%self.name for x in lib_patterns[self.lib_type]]
-	for x in self.lib_paths+[self.path,'/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib']:
+	for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS:
 		if not isinstance(x,Node.Node):
 			x=self.bld.root.find_node(x)or self.path.find_node(x)
 			if not x:
@@ -332,7 +437,10 @@ def process_lib(self):
 		for y in names:
 			node=x.find_node(y)
 			if node:
-				node.sig=Utils.h_file(node.abspath())
+				try:
+					Utils.h_file(node.abspath())
+				except EnvironmentError:
+					raise ValueError('Could not read %r'%y)
 				break
 		else:
 			continue
@@ -344,32 +452,35 @@ def process_lib(self):
 class fake_o(Task.Task):
 	def runnable_status(self):
 		return Task.SKIP_ME
+@extension('.o','.obj')
 def add_those_o_files(self,node):
 	tsk=self.create_task('fake_o',[],node)
 	try:
 		self.compiled_tasks.append(tsk)
 	except AttributeError:
 		self.compiled_tasks=[tsk]
-
-taskgen_method(create_compiled_task)
-taskgen_method(to_incnodes)
-feature('c','cxx','d','go','asm','fc','includes')(apply_incpaths)
-after_method('propagate_uselib_vars','process_source')(apply_incpaths)
-feature('c','cxx','d','go','fc','asm')(apply_link)
-after_method('process_source')(apply_link)
-taskgen_method(use_rec)
-feature('c','cxx','d','use','fc')(process_use)
-before_method('apply_incpaths','propagate_uselib_vars')(process_use)
-after_method('apply_link','process_source')(process_use)
-taskgen_method(add_objects_from_tgen)
-taskgen_method(get_uselib_vars)
-feature('c','cxx','d','fc','javac','cs','uselib')(propagate_uselib_vars)
-after_method('process_use')(propagate_uselib_vars)
-feature('cshlib','cxxshlib','fcshlib')(apply_implib)
-after_method('apply_link')(apply_implib)
-feature('cshlib','cxxshlib','dshlib','fcshlib','vnum')(apply_vnum)
-after_method('apply_link')(apply_vnum)
-conf(read_shlib)
-conf(read_stlib)
-feature('fake_lib')(process_lib)
-extension('.o','.obj')(add_those_o_files)
\ No newline at end of file
+@feature('fake_obj')
+@before_method('process_source')
+def process_objs(self):
+	for node in self.to_nodes(self.source):
+		self.add_those_o_files(node)
+	self.source=[]
+@conf
+def read_object(self,obj):
+	if not isinstance(obj,self.path.__class__):
+		obj=self.path.find_resource(obj)
+	return self(features='fake_obj',source=obj,name=obj.name)
+@feature('cxxprogram','cprogram')
+@after_method('apply_link','process_use')
+def set_full_paths_hpux(self):
+	if self.env.DEST_OS!='hp-ux':
+		return
+	base=self.bld.bldnode.abspath()
+	for var in['LIBPATH','STLIBPATH']:
+		lst=[]
+		for x in self.env[var]:
+			if x.startswith('/'):
+				lst.append(x)
+			else:
+				lst.append(os.path.normpath(os.path.join(base,x)))
+		self.env[var]=lst
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/Tools/clang.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar,gcc
+from waflib.Configure import conf
+@conf
+def find_clang(conf):
+	cc=conf.find_program('clang',var='CC')
+	conf.get_cc_version(cc,clang=True)
+	conf.env.CC_NAME='clang'
+def configure(conf):
+	conf.find_clang()
+	conf.find_program(['llvm-ar','ar'],var='AR')
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.gcc_modifier_platform()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/Tools/clangxx.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar,gxx
+from waflib.Configure import conf
+@conf
+def find_clangxx(conf):
+	cxx=conf.find_program('clang++',var='CXX')
+	conf.get_cc_version(cxx,clang=True)
+	conf.env.CXX_NAME='clang'
+def configure(conf):
+	conf.find_clangxx()
+	conf.find_program(['llvm-ar','ar'],var='AR')
+	conf.find_ar()
+	conf.gxx_common_flags()
+	conf.gxx_modifier_platform()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/compiler_c.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/compiler_c.py
@@ -1,39 +1,44 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,imp,types
+import re
 from waflib.Tools import ccroot
-from waflib import Utils,Configure
+from waflib import Utils
 from waflib.Logs import debug
-c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc'],'java':['gcc','msvc','icc'],'default':['gcc'],}
+c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'osf1V':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['clang','gcc'],}
+def default_compilers():
+	build_platform=Utils.unversioned_sys_platform()
+	possible_compiler_list=c_compiler.get(build_platform,c_compiler['default'])
+	return' '.join(possible_compiler_list)
 def configure(conf):
-	try:test_for_compiler=conf.options.check_c_compiler
-	except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')")
-	for compiler in test_for_compiler.split():
+	try:
+		test_for_compiler=conf.options.check_c_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_c')")
+	for compiler in re.split('[ ,]+',test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (c compiler)'%compiler)
+		conf.start_msg('Checking for %r (C compiler)'%compiler)
 		try:
 			conf.load(compiler)
-		except conf.errors.ConfigurationError ,e:
+		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			debug('compiler_c: %r'%e)
+			debug('compiler_c: %r',e)
 		else:
-			if conf.env['CC']:
+			if conf.env.CC:
 				conf.end_msg(conf.env.get_flat('CC'))
-				conf.env['COMPILER_CC']=compiler
+				conf.env.COMPILER_CC=compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('could not configure a c compiler!')
+		conf.fatal('could not configure a C compiler!')
 def options(opt):
+	test_for_compiler=default_compilers()
 	opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py'])
-	global c_compiler
-	build_platform=Utils.unversioned_sys_platform()
-	possible_compiler_list=c_compiler[build_platform in c_compiler and build_platform or'default']
-	test_for_compiler=' '.join(possible_compiler_list)
-	cc_compiler_opts=opt.add_option_group("C Compiler Options")
-	cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler")
+	cc_compiler_opts=opt.add_option_group('Configuration options')
+	cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler")
 	for x in test_for_compiler.split():
 		opt.load('%s'%x)
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/compiler_cxx.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/compiler_cxx.py
@@ -1,39 +1,44 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,imp,types
+import re
 from waflib.Tools import ccroot
-from waflib import Utils,Configure
+from waflib import Utils
 from waflib.Logs import debug
-cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix':['xlc++','g++'],'linux':['g++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++'],'java':['g++','msvc','icpc'],'default':['g++']}
+cxx_compiler={'win32':['msvc','g++','clang++'],'cygwin':['g++'],'darwin':['clang++','g++'],'aix':['xlc++','g++','clang++'],'linux':['g++','clang++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'osf1V':['g++'],'gnu':['g++','clang++'],'java':['g++','msvc','clang++','icpc'],'default':['clang++','g++']}
+def default_compilers():
+	build_platform=Utils.unversioned_sys_platform()
+	possible_compiler_list=cxx_compiler.get(build_platform,cxx_compiler['default'])
+	return' '.join(possible_compiler_list)
 def configure(conf):
-	try:test_for_compiler=conf.options.check_cxx_compiler
-	except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')")
-	for compiler in test_for_compiler.split():
+	try:
+		test_for_compiler=conf.options.check_cxx_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_cxx')")
+	for compiler in re.split('[ ,]+',test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (c++ compiler)'%compiler)
+		conf.start_msg('Checking for %r (C++ compiler)'%compiler)
 		try:
 			conf.load(compiler)
-		except conf.errors.ConfigurationError ,e:
+		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			debug('compiler_cxx: %r'%e)
+			debug('compiler_cxx: %r',e)
 		else:
-			if conf.env['CXX']:
+			if conf.env.CXX:
 				conf.end_msg(conf.env.get_flat('CXX'))
-				conf.env['COMPILER_CXX']=compiler
+				conf.env.COMPILER_CXX=compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('could not configure a c++ compiler!')
+		conf.fatal('could not configure a C++ compiler!')
 def options(opt):
+	test_for_compiler=default_compilers()
 	opt.load_special_tools('cxx_*.py')
-	global cxx_compiler
-	build_platform=Utils.unversioned_sys_platform()
-	possible_compiler_list=cxx_compiler[build_platform in cxx_compiler and build_platform or'default']
-	test_for_compiler=' '.join(possible_compiler_list)
-	cxx_compiler_opts=opt.add_option_group('C++ Compiler Options')
-	cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_cxx_compiler")
+	cxx_compiler_opts=opt.add_option_group('Configuration options')
+	cxx_compiler_opts.add_option('--check-cxx-compiler',default=None,help='list of C++ compilers to try [%s]'%test_for_compiler,dest="check_cxx_compiler")
 	for x in test_for_compiler.split():
 		opt.load('%s'%x)
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/compiler_d.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/compiler_d.py
@@ -1,30 +1,41 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,imp,types
-from waflib import Utils,Configure,Options,Logs
+import re
+from waflib import Utils,Logs
+d_compiler={'default':['gdc','dmd','ldc2']}
+def default_compilers():
+	build_platform=Utils.unversioned_sys_platform()
+	possible_compiler_list=d_compiler.get(build_platform,d_compiler['default'])
+	return' '.join(possible_compiler_list)
 def configure(conf):
-	for compiler in conf.options.dcheck.split(','):
+	try:
+		test_for_compiler=conf.options.check_d_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_d')")
+	for compiler in re.split('[ ,]+',test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (d compiler)'%compiler)
+		conf.start_msg('Checking for %r (D compiler)'%compiler)
 		try:
 			conf.load(compiler)
-		except conf.errors.ConfigurationError ,e:
+		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			Logs.debug('compiler_cxx: %r'%e)
+			Logs.debug('compiler_d: %r',e)
 		else:
 			if conf.env.D:
 				conf.end_msg(conf.env.get_flat('D'))
-				conf.env['COMPILER_D']=compiler
-				conf.env.D_COMPILER=conf.env.D
+				conf.env.COMPILER_D=compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('no suitable d compiler was found')
+		conf.fatal('could not configure a D compiler!')
 def options(opt):
-	d_compiler_opts=opt.add_option_group('D Compiler Options')
-	d_compiler_opts.add_option('--check-d-compiler',default='gdc,dmd',action='store',help='check for the compiler [Default:gdc,dmd]',dest='dcheck')
-	for d_compiler in['gdc','dmd']:
-		opt.load('%s'%d_compiler)
+	test_for_compiler=default_compilers()
+	d_compiler_opts=opt.add_option_group('Configuration options')
+	d_compiler_opts.add_option('--check-d-compiler',default=None,help='list of D compilers to try [%s]'%test_for_compiler,dest='check_d_compiler')
+	for x in test_for_compiler.split():
+		opt.load('%s'%x)
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/compiler_fc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/compiler_fc.py
@@ -1,43 +1,43 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,imp,types
-from waflib import Utils,Configure,Options,Logs,Errors
+import re
+from waflib import Utils,Logs
 from waflib.Tools import fc
 fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']}
-def __list_possible_compiler(platform):
-	try:
-		return fc_compiler[platform]
-	except KeyError:
-		return fc_compiler["default"]
+def default_compilers():
+	build_platform=Utils.unversioned_sys_platform()
+	possible_compiler_list=fc_compiler.get(build_platform,fc_compiler['default'])
+	return' '.join(possible_compiler_list)
 def configure(conf):
-	try:test_for_compiler=conf.options.check_fc
-	except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')")
-	for compiler in test_for_compiler.split():
+	try:
+		test_for_compiler=conf.options.check_fortran_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_fc')")
+	for compiler in re.split('[ ,]+',test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (fortran compiler)'%compiler)
+		conf.start_msg('Checking for %r (Fortran compiler)'%compiler)
 		try:
 			conf.load(compiler)
-		except conf.errors.ConfigurationError ,e:
+		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			Logs.debug('compiler_fortran: %r'%e)
+			Logs.debug('compiler_fortran: %r',e)
 		else:
-			if conf.env['FC']:
+			if conf.env.FC:
 				conf.end_msg(conf.env.get_flat('FC'))
 				conf.env.COMPILER_FORTRAN=compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('could not configure a fortran compiler!')
+		conf.fatal('could not configure a Fortran compiler!')
 def options(opt):
+	test_for_compiler=default_compilers()
 	opt.load_special_tools('fc_*.py')
-	build_platform=Utils.unversioned_sys_platform()
-	detected_platform=Options.platform
-	possible_compiler_list=__list_possible_compiler(detected_platform)
-	test_for_compiler=' '.join(possible_compiler_list)
-	fortran_compiler_opts=opt.add_option_group("Fortran Compiler Options")
-	fortran_compiler_opts.add_option('--check-fortran-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_fc")
-	for compiler in test_for_compiler.split():
-		opt.load('%s'%compiler)
+	fortran_compiler_opts=opt.add_option_group('Configuration options')
+	fortran_compiler_opts.add_option('--check-fortran-compiler',default=None,help='list of Fortran compiler to try [%s]'%test_for_compiler,dest="check_fortran_compiler")
+	for x in test_for_compiler.split():
+		opt.load('%s'%x)
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/cs.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/cs.py
@@ -1,15 +1,15 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-from waflib import Utils,Task,Options,Logs,Errors
+from waflib import Utils,Task,Options,Errors
 from waflib.TaskGen import before_method,after_method,feature
 from waflib.Tools import ccroot
 from waflib.Configure import conf
 ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES'])
 ccroot.lib_patterns['csshlib']=['%s']
+@feature('cs')
+@before_method('process_source')
 def apply_cs(self):
 	cs_nodes=[]
 	no_nodes=[]
@@ -19,14 +19,17 @@ def apply_cs(self):
 		else:
 			no_nodes.append(x)
 	self.source=no_nodes
-	bintype=getattr(self,'type',self.gen.endswith('.dll')and'library'or'exe')
+	bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe')
 	self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen))
 	tsk.env.CSTYPE='/target:%s'%bintype
 	tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath()
+	self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu'))
 	inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}')
 	if inst_to:
 		mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644)
-		self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod)
+		self.install_task=self.add_install_files(install_to=inst_to,install_from=self.cs_task.outputs[:],chmod=mod)
+@feature('cs')
+@after_method('apply_cs')
 def use_cs(self):
 	names=self.to_list(getattr(self,'use',[]))
 	get=self.bld.get_tgen_by_name
@@ -34,7 +37,7 @@ def use_cs(self):
 		try:
 			y=get(x)
 		except Errors.WafError:
-			self.cs_task.env.append_value('CSFLAGS','/reference:%s'%x)
+			self.env.append_value('CSFLAGS','/reference:%s'%x)
 			continue
 		y.post()
 		tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None)
@@ -42,7 +45,9 @@ def use_cs(self):
 			self.bld.fatal('cs task has no link task for use %r'%self)
 		self.cs_task.dep_nodes.extend(tsk.outputs)
 		self.cs_task.set_run_after(tsk)
-		self.cs_task.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath())
+		self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath())
+@feature('cs')
+@after_method('apply_cs','use_cs')
 def debug_cs(self):
 	csdebug=getattr(self,'csdebug',self.env.CSDEBUG)
 	if not csdebug:
@@ -53,20 +58,39 @@ def debug_cs(self):
 	else:
 		out=node.change_ext('.pdb')
 	self.cs_task.outputs.append(out)
-	try:
-		self.install_task.source.append(out)
-	except AttributeError:
-		pass
+	if getattr(self,'install_task',None):
+		self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=out)
 	if csdebug=='pdbonly':
 		val=['/debug+','/debug:pdbonly']
 	elif csdebug=='full':
 		val=['/debug+','/debug:full']
 	else:
 		val=['/debug-']
-	self.cs_task.env.append_value('CSFLAGS',val)
+	self.env.append_value('CSFLAGS',val)
+@feature('cs')
+@after_method('debug_cs')
+def doc_cs(self):
+	csdoc=getattr(self,'csdoc',self.env.CSDOC)
+	if not csdoc:
+		return
+	node=self.cs_task.outputs[0]
+	out=node.change_ext('.xml')
+	self.cs_task.outputs.append(out)
+	if getattr(self,'install_task',None):
+		self.doc_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=out)
+	self.env.append_value('CSFLAGS','/doc:%s'%out.abspath())
 class mcs(Task.Task):
 	color='YELLOW'
 	run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+	def split_argfile(self,cmd):
+		inline=[cmd[0]]
+		infile=[]
+		for x in cmd[1:]:
+			if x.lower()=='/noconfig':
+				inline.append(x)
+			else:
+				infile.append(self.quote_flag(x))
+		return(inline,infile)
 def configure(conf):
 	csc=getattr(Options.options,'cscbinary',None)
 	if csc:
@@ -83,16 +107,7 @@ class fake_csshlib(Task.Task):
 	color='YELLOW'
 	inst_to=None
 	def runnable_status(self):
-		for x in self.outputs:
-			x.sig=Utils.h_file(x.abspath())
 		return Task.SKIP_ME
+@conf
 def read_csshlib(self,name,paths=[]):
 	return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib')
-
-feature('cs')(apply_cs)
-before_method('process_source')(apply_cs)
-feature('cs')(use_cs)
-after_method('apply_cs')(use_cs)
-feature('cs')(debug_cs)
-after_method('apply_cs','use_cs')(debug_cs)
-conf(read_csshlib)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/cxx.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/cxx.py
@@ -1,26 +1,25 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-from waflib import TaskGen,Task,Utils
+from waflib import TaskGen,Task
 from waflib.Tools import c_preproc
 from waflib.Tools.ccroot import link_task,stlink_task
+@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
 def cxx_hook(self,node):
 	return self.create_compiled_task('cxx',node)
-TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')(cxx_hook)
 if not'.c'in TaskGen.task_gen.mappings:
 	TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp']
 class cxx(Task.Task):
-	run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
+	run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
 	vars=['CXXDEPS']
 	ext_in=['.h']
 	scan=c_preproc.scan
 class cxxprogram(link_task):
-	run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
+	run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
 	vars=['LINKDEPS']
 	ext_out=['.bin']
 	inst_to='${BINDIR}'
-	chmod=Utils.O755
 class cxxshlib(cxxprogram):
 	inst_to='${LIBDIR}'
 class cxxstlib(stlink_task):
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/d.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/d.py
@@ -1,6 +1,6 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib import Utils,Task,Errors
 from waflib.TaskGen import taskgen_method,feature,extension
@@ -18,11 +18,11 @@ class d_header(Task.Task):
 class dprogram(link_task):
 	run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
 	inst_to='${BINDIR}'
-	chmod=Utils.O755
 class dshlib(dprogram):
 	inst_to='${LIBDIR}'
 class dstlib(stlink_task):
 	pass
+@extension('.d','.di','.D')
 def d_hook(self,node):
 	ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o'
 	out='%s.%d.%s'%(node.name,self.idx,ext)
@@ -35,22 +35,20 @@ def d_hook(self,node):
 		return task
 	if getattr(self,'generate_headers',None):
 		tsk=create_compiled_task(self,'d_with_header',node)
-		tsk.outputs.append(node.change_ext(self.env['DHEADER_ext']))
+		tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
 	else:
 		tsk=create_compiled_task(self,'d',node)
 	return tsk
-def generate_header(self,filename,install_path=None):
+@taskgen_method
+def generate_header(self,filename):
 	try:
-		self.header_lst.append([filename,install_path])
+		self.header_lst.append([filename,self.install_path])
 	except AttributeError:
-		self.header_lst=[[filename,install_path]]
+		self.header_lst=[[filename,self.install_path]]
+@feature('d')
 def process_header(self):
 	for i in getattr(self,'header_lst',[]):
 		node=self.path.find_resource(i[0])
 		if not node:
 			raise Errors.WafError('file %r not found on d obj'%i[0])
 		self.create_task('d_header',node,node.change_ext('.di'))
-
-extension('.d','.di','.D')(d_hook)
-taskgen_method(generate_header)
-feature('d')(process_header)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/d_config.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/d_config.py
@@ -1,21 +1,27 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib import Utils
 from waflib.Configure import conf
+@conf
 def d_platform_flags(self):
 	v=self.env
 	if not v.DEST_OS:
 		v.DEST_OS=Utils.unversioned_sys_platform()
-	if Utils.destos_to_binfmt(self.env.DEST_OS)=='pe':
-		v['dprogram_PATTERN']='%s.exe'
-		v['dshlib_PATTERN']='lib%s.dll'
-		v['dstlib_PATTERN']='lib%s.a'
+	binfmt=Utils.destos_to_binfmt(self.env.DEST_OS)
+	if binfmt=='pe':
+		v.dprogram_PATTERN='%s.exe'
+		v.dshlib_PATTERN='lib%s.dll'
+		v.dstlib_PATTERN='lib%s.a'
+	elif binfmt=='mac-o':
+		v.dprogram_PATTERN='%s'
+		v.dshlib_PATTERN='lib%s.dylib'
+		v.dstlib_PATTERN='lib%s.a'
 	else:
-		v['dprogram_PATTERN']='%s'
-		v['dshlib_PATTERN']='lib%s.so'
-		v['dstlib_PATTERN']='lib%s.a'
+		v.dprogram_PATTERN='%s'
+		v.dshlib_PATTERN='lib%s.so'
+		v.dstlib_PATTERN='lib%s.a'
 DLIB='''
 version(D_Version2) {
 	import std.stdio;
@@ -39,9 +45,8 @@ version(D_Version2) {
 	}
 }
 '''
-def check_dlibrary(self):
-	ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=True,define_ret=True)
-	self.env.DLIBRARY=ret.strip()
-
-conf(d_platform_flags)
-conf(check_dlibrary)
\ No newline at end of file
+@conf
+def check_dlibrary(self,execute=True):
+	ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True)
+	if execute:
+		self.env.DLIBRARY=ret.strip()
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/d_scan.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/d_scan.py
@@ -1,9 +1,9 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import re
-from waflib import Utils,Logs
+from waflib import Utils
 def filter_comments(filename):
 	txt=Utils.readf(filename)
 	i=0
@@ -18,7 +18,8 @@ def filter_comments(filename):
 			i+=1
 			while i<max:
 				c=txt[i]
-				if c==delim:break
+				if c==delim:
+					break
 				elif c=='\\':
 					i+=1
 				i+=1
@@ -27,7 +28,8 @@ def filter_comments(filename):
 		elif c=='/':
 			buf.append(txt[begin:i])
 			i+=1
-			if i==max:break
+			if i==max:
+				break
 			c=txt[i]
 			if c=='+':
 				i+=1
@@ -41,7 +43,8 @@ def filter_comments(filename):
 						c=None
 					elif prev=='+'and c=='/':
 						nesting-=1
-						if nesting==0:break
+						if nesting==0:
+							break
 						c=None
 					i+=1
 			elif c=='*':
@@ -50,7 +53,8 @@ def filter_comments(filename):
 				while i<max:
 					prev=c
 					c=txt[i]
-					if prev=='*'and c=='/':break
+					if prev=='*'and c=='/':
+						break
 					i+=1
 			elif c=='/':
 				i+=1
@@ -69,8 +73,8 @@ def filter_comments(filename):
 class d_parser(object):
 	def __init__(self,env,incpaths):
 		self.allnames=[]
-		self.re_module=re.compile("module\s+([^;]+)")
-		self.re_import=re.compile("import\s+([^;]+)")
+		self.re_module=re.compile(r"module\s+([^;]+)")
+		self.re_import=re.compile(r"import\s+([^;]+)")
 		self.re_import_bindings=re.compile("([^:]+):(.*)")
 		self.re_import_alias=re.compile("[^=]+=(.+)")
 		self.env=env
@@ -93,11 +97,11 @@ class d_parser(object):
 		lst=[]
 		mod_name=self.re_module.search(code)
 		if mod_name:
-			self.module=re.sub('\s+','',mod_name.group(1))
+			self.module=re.sub(r'\s+','',mod_name.group(1))
 		import_iterator=self.re_import.finditer(code)
 		if import_iterator:
 			for import_match in import_iterator:
-				import_match_str=re.sub('\s+','',import_match.group(1))
+				import_match_str=re.sub(r'\s+','',import_match.group(1))
 				bindings_match=self.re_import_bindings.match(import_match_str)
 				if bindings_match:
 					import_match_str=bindings_match.group(1)
@@ -118,7 +122,8 @@ class d_parser(object):
 		code="".join(filter_comments(path))
 		names=self.get_strings(code)
 		for x in names:
-			if x in self.allnames:continue
+			if x in self.allnames:
+				continue
 			self.allnames.append(x)
 			self.tryfind(x)
 def scan(self):
@@ -128,6 +133,4 @@ def scan(self):
 	gruik.start(node)
 	nodes=gruik.nodes
 	names=gruik.names
-	if Logs.verbose:
-		Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(node),nodes,names))
 	return(nodes,names)
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/dbus.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/dbus.py
@@ -1,15 +1,17 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib import Task,Errors
 from waflib.TaskGen import taskgen_method,before_method
+@taskgen_method
 def add_dbus_file(self,filename,prefix,mode):
 	if not hasattr(self,'dbus_lst'):
 		self.dbus_lst=[]
 	if not'process_dbus'in self.meths:
 		self.meths.append('process_dbus')
 	self.dbus_lst.append([filename,prefix,mode])
+@before_method('process_source')
 def process_dbus(self):
 	for filename,prefix,mode in getattr(self,'dbus_lst',[]):
 		node=self.path.find_resource(filename)
@@ -24,7 +26,4 @@ class dbus_binding_tool(Task.Task):
 	run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
 	shell=True
 def configure(conf):
-	dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL')
-
-taskgen_method(add_dbus_file)
-before_method('apply_core')(process_dbus)
\ No newline at end of file
+	conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL')
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/dmd.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/dmd.py
@@ -1,47 +1,51 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import sys
 from waflib.Tools import ar,d
 from waflib.Configure import conf
+@conf
 def find_dmd(conf):
-	conf.find_program(['dmd','ldc'],var='D')
+	conf.find_program(['dmd','dmd2','ldc'],var='D')
+	out=conf.cmd_and_log(conf.env.D+['--help'])
+	if out.find("D Compiler v")==-1:
+		out=conf.cmd_and_log(conf.env.D+['-version'])
+		if out.find("based on DMD v1.")==-1:
+			conf.fatal("detected compiler is not dmd/ldc")
+@conf
 def common_flags_ldc(conf):
 	v=conf.env
-	v['DFLAGS']=['-d-version=Posix']
-	v['LINKFLAGS']=[]
-	v['DFLAGS_dshlib']=['-relocation-model=pic']
+	v.DFLAGS=['-d-version=Posix']
+	v.LINKFLAGS=[]
+	v.DFLAGS_dshlib=['-relocation-model=pic']
+@conf
 def common_flags_dmd(conf):
 	v=conf.env
-	v['D_SRC_F']=['-c']
-	v['D_TGT_F']='-of%s'
-	v['D_LINKER']=v['D']
-	v['DLNK_SRC_F']=''
-	v['DLNK_TGT_F']='-of%s'
-	v['DINC_ST']='-I%s'
-	v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
-	v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
-	v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
-	v['LINKFLAGS_dprogram']=['-quiet']
-	v['DFLAGS_dshlib']=['-fPIC']
-	v['LINKFLAGS_dshlib']=['-L-shared']
-	v['DHEADER_ext']='.di'
+	v.D_SRC_F=['-c']
+	v.D_TGT_F='-of%s'
+	v.D_LINKER=v.D
+	v.DLNK_SRC_F=''
+	v.DLNK_TGT_F='-of%s'
+	v.DINC_ST='-I%s'
+	v.DSHLIB_MARKER=v.DSTLIB_MARKER=''
+	v.DSTLIB_ST=v.DSHLIB_ST='-L-l%s'
+	v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L-L%s'
+	v.LINKFLAGS_dprogram=['-quiet']
+	v.DFLAGS_dshlib=['-fPIC']
+	v.LINKFLAGS_dshlib=['-L-shared']
+	v.DHEADER_ext='.di'
 	v.DFLAGS_d_with_header=['-H','-Hf']
-	v['D_HDR_F']='%s'
+	v.D_HDR_F='%s'
 def configure(conf):
 	conf.find_dmd()
 	if sys.platform=='win32':
-		out=conf.cmd_and_log([conf.env.D,'--help'])
-		if out.find("D Compiler v2.")>-1:
-			conf.fatal('dmd2 on Windows is not supported, use gdc or ldc instead')
+		out=conf.cmd_and_log(conf.env.D+['--help'])
+		if out.find('D Compiler v2.')>-1:
+			conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
 	conf.load('ar')
 	conf.load('d')
 	conf.common_flags_dmd()
 	conf.d_platform_flags()
 	if str(conf.env.D).find('ldc')>-1:
 		conf.common_flags_ldc()
-
-conf(find_dmd)
-conf(common_flags_ldc)
-conf(common_flags_dmd)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/errcheck.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/errcheck.py
@@ -1,24 +1,25 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path',}
+typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy','uses':'use',}
 meths_typos=['__call__','program','shlib','stlib','objects']
+import sys
 from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils
-import waflib.Tools.ccroot
+from waflib.Tools import ccroot
 def check_same_targets(self):
 	mp=Utils.defaultdict(list)
 	uids={}
 	def check_task(tsk):
 		if not isinstance(tsk,Task.Task):
 			return
+		if hasattr(tsk,'no_errcheck_out'):
+			return
 		for node in tsk.outputs:
 			mp[node].append(tsk)
 		try:
 			uids[tsk.uid()].append(tsk)
-		except:
+		except KeyError:
 			uids[tsk.uid()]=[tsk]
 	for g in self.groups:
 		for tg in g:
@@ -31,47 +32,54 @@ def check_same_targets(self):
 	for(k,v)in mp.items():
 		if len(v)>1:
 			dupe=True
-			msg='* Node %r is created by more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"")
+			msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"")
 			Logs.error(msg)
 			for x in v:
 				if Logs.verbose>1:
-					Logs.error('  %d. %r'%(1+v.index(x),x.generator))
+					Logs.error('  %d. %r',1+v.index(x),x.generator)
 				else:
-					Logs.error('  %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None)))
+					Logs.error('  %d. %r in %r',1+v.index(x),x.generator.name,getattr(x.generator,'path',None))
+			Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')
 	if not dupe:
 		for(k,v)in uids.items():
 			if len(v)>1:
-				Logs.error('* Several tasks use the same identifier. Please check the information on\n   http://waf.googlecode.com/git/docs/apidocs/Task.html#waflib.Task.Task.uid')
+				Logs.error('* Several tasks use the same identifier. Please check the information on\n   https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
+				tg_details=tsk.generator.name
+				if Logs.verbose>2:
+					tg_details=tsk.generator
 				for tsk in v:
-					Logs.error('  - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator))
+					Logs.error('  - object %r (%r) defined in %r',tsk.__class__.__name__,tsk,tg_details)
 def check_invalid_constraints(self):
-	feat=set([])
+	feat=set()
 	for x in list(TaskGen.feats.values()):
 		feat.union(set(x))
 	for(x,y)in TaskGen.task_gen.prec.items():
 		feat.add(x)
 		feat.union(set(y))
-	ext=set([])
+	ext=set()
 	for x in TaskGen.task_gen.mappings.values():
 		ext.add(x.__name__)
 	invalid=ext&feat
 	if invalid:
-		Logs.error('The methods %r have invalid annotations:  @extension <-> @feature/@before_method/@after_method'%list(invalid))
+		Logs.error('The methods %r have invalid annotations:  @extension <-> @feature/@before_method/@after_method',list(invalid))
 	for cls in list(Task.classes.values()):
+		if sys.hexversion>0x3000000 and issubclass(cls,Task.Task)and isinstance(cls.hcode,str):
+			raise Errors.WafError('Class %r has hcode value %r of type <str>, expecting <bytes> (use Utils.h_cmd() ?)'%(cls,cls.hcode))
 		for x in('before','after'):
 			for y in Utils.to_list(getattr(cls,x,[])):
-				if not Task.classes.get(y,None):
-					Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__))
+				if not Task.classes.get(y):
+					Logs.error('Erroneous order constraint %r=%r on task class %r',x,y,cls.__name__)
 		if getattr(cls,'rule',None):
-			Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__)
+			Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")',cls.__name__)
 def replace(m):
 	oldcall=getattr(Build.BuildContext,m)
 	def call(self,*k,**kw):
 		ret=oldcall(self,*k,**kw)
 		for x in typos:
 			if x in kw:
-				err=True
-				Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret))
+				if x=='iscopy'and'subst'in getattr(self,'features',''):
+					continue
+				Logs.error('Fix the typo %r -> %r on %r',x,typos[x],ret)
 		return ret
 	setattr(Build.BuildContext,m,call)
 def enhance_lib():
@@ -81,22 +89,30 @@ def enhance_lib():
 		if k:
 			lst=Utils.to_list(k[0])
 			for pat in lst:
-				if'..'in pat.split('/'):
-					Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0])
-		if kw.get('remove',True):
-			try:
-				if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False):
-					Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self)
-			except AttributeError:
-				pass
+				sp=pat.split('/')
+				if'..'in sp:
+					Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'",k[0])
+				if'.'in sp:
+					Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'",k[0])
 		return self.old_ant_glob(*k,**kw)
 	Node.Node.old_ant_glob=Node.Node.ant_glob
 	Node.Node.ant_glob=ant_glob
+	def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True,quiet=False):
+		if remove:
+			try:
+				if self.is_child_of(self.ctx.bldnode)and not quiet:
+					quiet=True
+					Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False',self)
+			except AttributeError:
+				pass
+		return self.old_ant_iter(accept,maxdepth,pats,dir,src,remove,quiet)
+	Node.Node.old_ant_iter=Node.Node.ant_iter
+	Node.Node.ant_iter=ant_iter
 	old=Task.is_before
 	def is_before(t1,t2):
 		ret=old(t1,t2)
 		if ret and old(t2,t1):
-			Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2))
+			Logs.error('Contradictory order constraints in classes %r %r',t1,t2)
 		return ret
 	Task.is_before=is_before
 	def check_err_features(self):
@@ -105,18 +121,18 @@ def enhance_lib():
 			Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
 		for x in('c','cxx','d','fc'):
 			if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]:
-				Logs.error('%r features is probably missing %r'%(self,x))
+				Logs.error('%r features is probably missing %r',self,x)
 	TaskGen.feature('*')(check_err_features)
 	def check_err_order(self):
-		if not hasattr(self,'rule'):
+		if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features):
 			for x in('before','after','ext_in','ext_out'):
 				if hasattr(self,x):
-					Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self))
+					Logs.warn('Erroneous order constraint %r on non-rule based task generator %r',x,self)
 		else:
 			for x in('before','after'):
 				for y in self.to_list(getattr(self,x,[])):
-					if not Task.classes.get(y,None):
-						Logs.error('Erroneous order constraint %s=%r on %r'%(x,y,self))
+					if not Task.classes.get(y):
+						Logs.error('Erroneous order constraint %s=%r on %r (no such class)',x,y,self)
 	TaskGen.feature('*')(check_err_order)
 	def check_compile(self):
 		check_invalid_constraints(self)
@@ -145,17 +161,15 @@ def enhance_lib():
 		self.orig_use_rec(name,**kw)
 	TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec
 	TaskGen.task_gen.use_rec=use_rec
-	def getattri(self,name,default=None):
+	def _getattr(self,name,default=None):
 		if name=='append'or name=='add':
 			raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
 		elif name=='prepend':
 			raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
 		if name in self.__slots__:
-			return object.__getattr__(self,name,default)
+			return super(ConfigSet.ConfigSet,self).__getattr__(name,default)
 		else:
 			return self[name]
-	ConfigSet.ConfigSet.__getattr__=getattri
+	ConfigSet.ConfigSet.__getattr__=_getattr
 def options(opt):
 	enhance_lib()
-def configure(conf):
-	pass
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/fc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/fc.py
@@ -1,38 +1,41 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-import re
-from waflib import Utils,Task,TaskGen,Logs
+from waflib import Utils,Task,Errors
 from waflib.Tools import ccroot,fc_config,fc_scan
-from waflib.TaskGen import feature,before_method,after_method,extension
+from waflib.TaskGen import extension
 from waflib.Configure import conf
-ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES'])
-ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
-ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES','FCPPFLAGS'])
+ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','LDFLAGS'])
+ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','LDFLAGS'])
 ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS'])
-def dummy(self):
-	pass
+@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
 def fc_hook(self,node):
 	return self.create_compiled_task('fc',node)
+@conf
 def modfile(conf,name):
-	return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower']
+	if name.find(':')>=0:
+		separator=conf.env.FC_SUBMOD_SEPARATOR or'@'
+		modpath=name.split(':')
+		modname=modpath[0]+separator+modpath[-1]
+		suffix=conf.env.FC_SUBMOD_SUFFIX or'.smod'
+	else:
+		modname=name
+		suffix='.mod'
+	return{'lower':modname.lower()+suffix.lower(),'lower.MOD':modname.lower()+suffix.upper(),'UPPER.mod':modname.upper()+suffix.lower(),'UPPER':modname.upper()+suffix.upper()}[conf.env.FC_MOD_CAPITALIZATION or'lower']
 def get_fortran_tasks(tsk):
 	bld=tsk.generator.bld
 	tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator))
 	return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)]
 class fc(Task.Task):
 	color='GREEN'
-	run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}'
+	run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
 	vars=["FORTRANMODPATHFLAG"]
 	def scan(self):
 		tmp=fc_scan.fortran_parser(self.generator.includes_nodes)
 		tmp.task=self
 		tmp.start(self.inputs[0])
-		if Logs.verbose:
-			Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names))
 		return(tmp.nodes,tmp.names)
 	def runnable_status(self):
 		if getattr(self,'mod_fortran_done',None):
@@ -56,7 +59,7 @@ class fc(Task.Task):
 					name=bld.modfile(x.replace('MOD@',''))
 					node=bld.srcnode.find_or_declare(name)
 					tsk.set_outputs(node)
-					outs[id(node)].add(tsk)
+					outs[node].add(tsk)
 		for tsk in lst:
 			key=tsk.uid()
 			for x in bld.raw_deps[key]:
@@ -66,18 +69,17 @@ class fc(Task.Task):
 					if node and node not in tsk.outputs:
 						if not node in bld.node_deps[key]:
 							bld.node_deps[key].append(node)
-						ins[id(node)].add(tsk)
+						ins[node].add(tsk)
 		for k in ins.keys():
 			for a in ins[k]:
 				a.run_after.update(outs[k])
+				for x in outs[k]:
+					self.generator.bld.producer.revdeps[x].add(a)
 				tmp=[]
 				for t in outs[k]:
 					tmp.extend(t.outputs)
 				a.dep_nodes.extend(tmp)
-				try:
-					a.dep_nodes.sort(key=lambda x:x.abspath())
-				except:
-					a.dep_nodes.sort(lambda x,y:cmp(x.abspath(),y.abspath()))
+				a.dep_nodes.sort(key=lambda x:x.abspath())
 		for tsk in lst:
 			try:
 				delattr(tsk,'cache_sig')
@@ -86,14 +88,13 @@ class fc(Task.Task):
 		return super(fc,self).runnable_status()
 class fcprogram(ccroot.link_task):
 	color='YELLOW'
-	run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}'
+	run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
 	inst_to='${BINDIR}'
-	chmod=Utils.O755
 class fcshlib(fcprogram):
 	inst_to='${LIBDIR}'
+class fcstlib(ccroot.stlink_task):
+	pass
 class fcprogram_test(fcprogram):
-	def can_retrieve_cache(self):
-		return False
 	def runnable_status(self):
 		ret=super(fcprogram_test,self).runnable_status()
 		if ret==Task.SKIP_ME:
@@ -103,21 +104,15 @@ class fcprogram_test(fcprogram):
 		bld=self.generator.bld
 		kw['shell']=isinstance(cmd,str)
 		kw['stdout']=kw['stderr']=Utils.subprocess.PIPE
-		kw['cwd']=bld.variant_dir
+		kw['cwd']=self.get_cwd()
 		bld.out=bld.err=''
 		bld.to_log('command: %s\n'%cmd)
 		kw['output']=0
 		try:
 			(bld.out,bld.err)=bld.cmd_and_log(cmd,**kw)
-		except Exception ,e:
+		except Errors.WafError:
 			return-1
 		if bld.out:
-			bld.to_log("out: %s\n"%bld.out)
+			bld.to_log('out: %s\n'%bld.out)
 		if bld.err:
-			bld.to_log("err: %s\n"%bld.err)
-class fcstlib(ccroot.stlink_task):
-	pass
-
-feature('fcprogram','fcshlib','fcstlib','fcprogram_test')(dummy)
-extension('.f','.f90','.F','.F90','.for','.FOR')(fc_hook)
-conf(modfile)
\ No newline at end of file
+			bld.to_log('err: %s\n'%bld.err)
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/fc_config.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/fc_config.py
@@ -1,35 +1,44 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import re,shutil,os,sys,string,shlex
+import re,os,sys,shlex
 from waflib.Configure import conf
-from waflib.TaskGen import feature,after_method,before_method
-from waflib import Build,Utils
+from waflib.TaskGen import feature,before_method
 FC_FRAGMENT='        program main\n        end     program main\n'
 FC_FRAGMENT2='        PROGRAM MAIN\n        END\n'
+@conf
 def fc_flags(conf):
 	v=conf.env
-	v['FC_SRC_F']=[]
-	v['FC_TGT_F']=['-c','-o']
-	v['FCINCPATH_ST']='-I%s'
-	v['FCDEFINES_ST']='-D%s'
-	if not v['LINK_FC']:v['LINK_FC']=v['FC']
-	v['FCLNK_SRC_F']=[]
-	v['FCLNK_TGT_F']=['-o']
-	v['FCFLAGS_fcshlib']=['-fpic']
-	v['LINKFLAGS_fcshlib']=['-shared']
-	v['fcshlib_PATTERN']='lib%s.so'
-	v['fcstlib_PATTERN']='lib%s.a'
-	v['FCLIB_ST']='-l%s'
-	v['FCLIBPATH_ST']='-L%s'
-	v['FCSTLIB_ST']='-l%s'
-	v['FCSTLIBPATH_ST']='-L%s'
-	v['FCSTLIB_MARKER']='-Wl,-Bstatic'
-	v['FCSHLIB_MARKER']='-Wl,-Bdynamic'
-	v['SONAME_ST']='-Wl,-h,%s'
+	v.FC_SRC_F=[]
+	v.FC_TGT_F=['-c','-o']
+	v.FCINCPATH_ST='-I%s'
+	v.FCDEFINES_ST='-D%s'
+	if not v.LINK_FC:
+		v.LINK_FC=v.FC
+	v.FCLNK_SRC_F=[]
+	v.FCLNK_TGT_F=['-o']
+	v.FCFLAGS_fcshlib=['-fpic']
+	v.LINKFLAGS_fcshlib=['-shared']
+	v.fcshlib_PATTERN='lib%s.so'
+	v.fcstlib_PATTERN='lib%s.a'
+	v.FCLIB_ST='-l%s'
+	v.FCLIBPATH_ST='-L%s'
+	v.FCSTLIB_ST='-l%s'
+	v.FCSTLIBPATH_ST='-L%s'
+	v.FCSTLIB_MARKER='-Wl,-Bstatic'
+	v.FCSHLIB_MARKER='-Wl,-Bdynamic'
+	v.SONAME_ST='-Wl,-h,%s'
+@conf
+def fc_add_flags(conf):
+	conf.add_os_flags('FCPPFLAGS',dup=False)
+	conf.add_os_flags('FCFLAGS',dup=False)
+	conf.add_os_flags('LINKFLAGS',dup=False)
+	conf.add_os_flags('LDFLAGS',dup=False)
+@conf
 def check_fortran(self,*k,**kw):
 	self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app')
+@conf
 def check_fc(self,*k,**kw):
 	kw['compiler']='fc'
 	if not'compile_mode'in kw:
@@ -41,32 +50,35 @@ def check_fc(self,*k,**kw):
 	if not'code'in kw:
 		kw['code']=FC_FRAGMENT
 	return self.check(*k,**kw)
+@conf
 def fortran_modifier_darwin(conf):
 	v=conf.env
-	v['FCFLAGS_fcshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
-	v['LINKFLAGS_fcshlib']=['-dynamiclib']
-	v['fcshlib_PATTERN']='lib%s.dylib'
-	v['FRAMEWORKPATH_ST']='-F%s'
-	v['FRAMEWORK_ST']='-framework %s'
-	v['LINKFLAGS_fcstlib']=[]
-	v['FCSHLIB_MARKER']=''
-	v['FCSTLIB_MARKER']=''
-	v['SONAME_ST']=''
+	v.FCFLAGS_fcshlib=['-fPIC']
+	v.LINKFLAGS_fcshlib=['-dynamiclib']
+	v.fcshlib_PATTERN='lib%s.dylib'
+	v.FRAMEWORKPATH_ST='-F%s'
+	v.FRAMEWORK_ST=['-framework']
+	v.LINKFLAGS_fcstlib=[]
+	v.FCSHLIB_MARKER=''
+	v.FCSTLIB_MARKER=''
+	v.SONAME_ST=''
+@conf
 def fortran_modifier_win32(conf):
 	v=conf.env
-	v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe'
-	v['fcshlib_PATTERN']='%s.dll'
-	v['implib_PATTERN']='lib%s.dll.a'
-	v['IMPLIB_ST']='-Wl,--out-implib,%s'
-	v['FCFLAGS_fcshlib']=[]
-	v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT'])
+	v.fcprogram_PATTERN=v.fcprogram_test_PATTERN='%s.exe'
+	v.fcshlib_PATTERN='%s.dll'
+	v.implib_PATTERN='%s.dll.a'
+	v.IMPLIB_ST='-Wl,--out-implib,%s'
+	v.FCFLAGS_fcshlib=[]
 	v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+@conf
 def fortran_modifier_cygwin(conf):
 	fortran_modifier_win32(conf)
 	v=conf.env
-	v['fcshlib_PATTERN']='cyg%s.dll'
+	v.fcshlib_PATTERN='cyg%s.dll'
 	v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base'])
-	v['FCFLAGS_fcshlib']=[]
+	v.FCFLAGS_fcshlib=[]
+@conf
 def check_fortran_dummy_main(self,*k,**kw):
 	if not self.env.CC:
 		self.fatal('A c compiler is required for check_fortran_dummy_main')
@@ -91,8 +103,9 @@ def check_fortran_dummy_main(self,*k,**k
 		self.end_msg('not found')
 		self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
 GCC_DRIVER_LINE=re.compile('^Driving:')
-POSIX_STATIC_EXT=re.compile('\S+\.a')
-POSIX_LIB_FLAGS=re.compile('-l\S+')
+POSIX_STATIC_EXT=re.compile(r'\S+\.a')
+POSIX_LIB_FLAGS=re.compile(r'-l\S+')
+@conf
 def is_link_verbose(self,txt):
 	assert isinstance(txt,str)
 	for line in txt.splitlines():
@@ -100,9 +113,10 @@ def is_link_verbose(self,txt):
 			if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line):
 				return True
 	return False
+@conf
 def check_fortran_verbose_flag(self,*k,**kw):
 	self.start_msg('fortran link verbose flag')
-	for x in['-v','--verbose','-verbose','-V']:
+	for x in('-v','--verbose','-verbose','-V'):
 		try:
 			self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True)
 		except self.errors.ConfigurationError:
@@ -135,41 +149,42 @@ def parse_fortran_link(lines):
 	return final_flags
 SPACE_OPTS=re.compile('^-[LRuYz]$')
 NOSPACE_OPTS=re.compile('^-[RL]')
+def _parse_flink_token(lexer,token,tmp_flags):
+	if _match_ignore(token):
+		pass
+	elif token.startswith('-lkernel32')and sys.platform=='cygwin':
+		tmp_flags.append(token)
+	elif SPACE_OPTS.match(token):
+		t=lexer.get_token()
+		if t.startswith('P,'):
+			t=t[2:]
+		for opt in t.split(os.pathsep):
+			tmp_flags.append('-L%s'%opt)
+	elif NOSPACE_OPTS.match(token):
+		tmp_flags.append(token)
+	elif POSIX_LIB_FLAGS.match(token):
+		tmp_flags.append(token)
+	else:
+		pass
+	t=lexer.get_token()
+	return t
 def _parse_flink_line(line,final_flags):
 	lexer=shlex.shlex(line,posix=True)
 	lexer.whitespace_split=True
 	t=lexer.get_token()
 	tmp_flags=[]
 	while t:
-		def parse(token):
-			if _match_ignore(token):
-				pass
-			elif token.startswith('-lkernel32')and sys.platform=='cygwin':
-				tmp_flags.append(token)
-			elif SPACE_OPTS.match(token):
-				t=lexer.get_token()
-				if t.startswith('P,'):
-					t=t[2:]
-				for opt in t.split(os.pathsep):
-					tmp_flags.append('-L%s'%opt)
-			elif NOSPACE_OPTS.match(token):
-				tmp_flags.append(token)
-			elif POSIX_LIB_FLAGS.match(token):
-				tmp_flags.append(token)
-			else:
-				pass
-			t=lexer.get_token()
-			return t
-		t=parse(t)
+		t=_parse_flink_token(lexer,t,tmp_flags)
 	final_flags.extend(tmp_flags)
 	return final_flags
+@conf
 def check_fortran_clib(self,autoadd=True,*k,**kw):
 	if not self.env.FC_VERBOSE_FLAG:
 		self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
 	self.start_msg('Getting fortran runtime link flags')
 	try:
 		self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG])
-	except:
+	except Exception:
 		self.end_msg(False)
 		if kw.get('mandatory',True):
 			conf.fatal('Could not find the c library flags')
@@ -181,24 +196,24 @@ def check_fortran_clib(self,autoadd=True
 		return flags
 	return[]
 def getoutput(conf,cmd,stdin=False):
-	if stdin:
-		stdin=Utils.subprocess.PIPE
+	from waflib import Errors
+	if conf.env.env:
+		env=conf.env.env
 	else:
-		stdin=None
-	env=conf.env.env or None
+		env=dict(os.environ)
+		env['LANG']='C'
+	input=stdin and'\n'.encode()or None
 	try:
-		p=Utils.subprocess.Popen(cmd,stdin=stdin,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env)
-		if stdin:
-			p.stdin.write('\n')
-		stdout,stderr=p.communicate()
-	except:
+		out,err=conf.cmd_and_log(cmd,env=env,output=0,input=input)
+	except Errors.WafError as e:
+		if not(hasattr(e,'stderr')and hasattr(e,'stdout')):
+			raise e
+		else:
+			out=e.stdout
+			err=e.stderr
+	except Exception:
 		conf.fatal('could not determine the compiler version %r'%cmd)
-	else:
-		if not isinstance(stdout,str):
-			stdout=stdout.decode(sys.stdout.encoding)
-		if not isinstance(stderr,str):
-			stderr=stderr.decode(sys.stdout.encoding)
-		return stdout,stderr
+	return(out,err)
 ROUTINES_CODE="""\
       subroutine foobar()
       return
@@ -216,6 +231,8 @@ int %(main_func_name)s() {
   return 0;
 }
 """
+@feature('link_main_routines_func')
+@before_method('process_source')
 def link_main_routines_tg_method(self):
 	def write_test_file(task):
 		task.outputs[0].write(task.generator.code)
@@ -225,12 +242,13 @@ def link_main_routines_tg_method(self):
 	bld(features='fc fcstlib',source='test.f',target='test')
 	bld(features='c fcprogram',source='main.c',target='app',use='test')
 def mangling_schemes():
-	for u in['_','']:
-		for du in['','_']:
-			for c in["lower","upper"]:
+	for u in('_',''):
+		for du in('','_'):
+			for c in("lower","upper"):
 				yield(u,du,c)
 def mangle_name(u,du,c,name):
 	return getattr(name,c)()+u+(name.find('_')!=-1 and du or'')
+@conf
 def check_fortran_mangling(self,*k,**kw):
 	if not self.env.CC:
 		self.fatal('A c compiler is required for link_main_routines')
@@ -241,7 +259,7 @@ def check_fortran_mangling(self,*k,**kw)
 	self.start_msg('Getting fortran mangling scheme')
 	for(u,du,c)in mangling_schemes():
 		try:
-			self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN)
+			self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',dummy_func_nounder=mangle_name(u,du,c,'foobar'),dummy_func_under=mangle_name(u,du,c,'foo_bar'),main_func_name=self.env.FC_MAIN)
 		except self.errors.ConfigurationError:
 			pass
 		else:
@@ -252,10 +270,13 @@ def check_fortran_mangling(self,*k,**kw)
 		self.end_msg(False)
 		self.fatal('mangler not found')
 	return(u,du,c)
+@feature('pyext')
+@before_method('propagate_uselib_vars','apply_link')
 def set_lib_pat(self):
-	self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN']
+	self.env.fcshlib_PATTERN=self.env.pyext_PATTERN
+@conf
 def detect_openmp(self):
-	for x in['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']:
+	for x in('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
 		try:
 			self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n  call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP')
 		except self.errors.ConfigurationError:
@@ -264,20 +285,15 @@ def detect_openmp(self):
 			break
 	else:
 		self.fatal('Could not find OpenMP')
-
-conf(fc_flags)
-conf(check_fortran)
-conf(check_fc)
-conf(fortran_modifier_darwin)
-conf(fortran_modifier_win32)
-conf(fortran_modifier_cygwin)
-conf(check_fortran_dummy_main)
-conf(is_link_verbose)
-conf(check_fortran_verbose_flag)
-conf(check_fortran_clib)
-feature('link_main_routines_func')(link_main_routines_tg_method)
-before_method('process_source')(link_main_routines_tg_method)
-conf(check_fortran_mangling)
-feature('pyext')(set_lib_pat)
-before_method('propagate_uselib_vars','apply_link')(set_lib_pat)
-conf(detect_openmp)
\ No newline at end of file
+@conf
+def check_gfortran_o_space(self):
+	if self.env.FC_NAME!='GFORTRAN'or int(self.env.FC_VERSION[0])>4:
+		return
+	self.env.stash()
+	self.env.FCLNK_TGT_F=['-o','']
+	try:
+		self.check_fc(msg='Checking if the -o link must be split from arguments',fragment=FC_FRAGMENT,features='fc fcshlib')
+	except self.errors.ConfigurationError:
+		self.env.revert()
+	else:
+		self.env.commit()
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/fc_scan.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/fc_scan.py
@@ -1,17 +1,16 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import re
-from waflib import Utils,Task,TaskGen,Logs
-from waflib.TaskGen import feature,before_method,after_method,extension
-from waflib.Configure import conf
-INC_REGEX="""(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
-USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+INC_REGEX=r"""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
+USE_REGEX=r"""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+MOD_REGEX=r"""(?:^|;)\s*MODULE(?!\s+(?:PROCEDURE|SUBROUTINE|FUNCTION))\s+(\w+)"""
+SMD_REGEX=r"""(?:^|;)\s*SUBMODULE\s*\(([\w:]+)\)\s*(\w+)"""
 re_inc=re.compile(INC_REGEX,re.I)
 re_use=re.compile(USE_REGEX,re.I)
 re_mod=re.compile(MOD_REGEX,re.I)
+re_smd=re.compile(SMD_REGEX,re.I)
 class fortran_parser(object):
 	def __init__(self,incpaths):
 		self.seen=[]
@@ -33,6 +32,10 @@ class fortran_parser(object):
 			m=re_mod.search(line)
 			if m:
 				mods.append(m.group(1))
+			m=re_smd.search(line)
+			if m:
+				uses.append(m.group(1))
+				mods.append('{0}:{1}'.format(m.group(1),m.group(2)))
 		return(incs,uses,mods)
 	def start(self,node):
 		self.waiting=[node]
@@ -40,7 +43,6 @@ class fortran_parser(object):
 			nd=self.waiting.pop(0)
 			self.iter(nd)
 	def iter(self,node):
-		path=node.abspath()
 		incs,uses,mods=self.find_deps(node)
 		for x in incs:
 			if x in self.seen:
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/flex.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/flex.py
@@ -1,8 +1,10 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import waflib.TaskGen
+import os,re
+from waflib import Task,TaskGen
+from waflib.Tools import ccroot
 def decide_ext(self,node):
 	if'cxx'in self.features:
 		return['.lex.cc']
@@ -12,16 +14,25 @@ def flexfun(tsk):
 	bld=tsk.generator.bld
 	wd=bld.variant_dir
 	def to_list(xx):
-		if isinstance(xx,str):return[xx]
+		if isinstance(xx,str):
+			return[xx]
 		return xx
 	tsk.last_cmd=lst=[]
-	lst.extend(to_list(env['FLEX']))
-	lst.extend(to_list(env['FLEXFLAGS']))
-	lst.extend([a.path_from(bld.bldnode)for a in tsk.inputs])
+	lst.extend(to_list(env.FLEX))
+	lst.extend(to_list(env.FLEXFLAGS))
+	inputs=[a.path_from(tsk.get_cwd())for a in tsk.inputs]
+	if env.FLEX_MSYS:
+		inputs=[x.replace(os.sep,'/')for x in inputs]
+	lst.extend(inputs)
 	lst=[x for x in lst if x]
 	txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
-	tsk.outputs[0].write(txt)
-waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,)
+	tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n'))
+TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,)
+Task.classes['flex'].vars=['FLEXFLAGS','FLEX']
+ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
+ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')
 def configure(conf):
 	conf.find_program('flex',var='FLEX')
 	conf.env.FLEXFLAGS=['-t']
+	if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX[0]):
+		conf.env.FLEX_MSYS=True
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/g95.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/g95.py
@@ -1,32 +1,38 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import re
 from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Tools import fc,fc_config,fc_scan,ar
 from waflib.Configure import conf
+@conf
 def find_g95(conf):
 	fc=conf.find_program('g95',var='FC')
-	fc=conf.cmd_to_list(fc)
 	conf.get_g95_version(fc)
 	conf.env.FC_NAME='G95'
+@conf
 def g95_flags(conf):
 	v=conf.env
-	v['FCFLAGS_fcshlib']=['-fPIC']
-	v['FORTRANMODFLAG']=['-fmod=','']
-	v['FCFLAGS_DEBUG']=['-Werror']
+	v.FCFLAGS_fcshlib=['-fPIC']
+	v.FORTRANMODFLAG=['-fmod=','']
+	v.FCFLAGS_DEBUG=['-Werror']
+@conf
 def g95_modifier_win32(conf):
 	fc_config.fortran_modifier_win32(conf)
+@conf
 def g95_modifier_cygwin(conf):
 	fc_config.fortran_modifier_cygwin(conf)
+@conf
 def g95_modifier_darwin(conf):
 	fc_config.fortran_modifier_darwin(conf)
+@conf
 def g95_modifier_platform(conf):
-	dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
+	dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform()
 	g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None)
 	if g95_modifier_func:
 		g95_modifier_func()
+@conf
 def get_g95_version(conf,fc):
 	version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
 	cmd=fc+['--version']
@@ -38,18 +44,11 @@ def get_g95_version(conf,fc):
 	if not match:
 		conf.fatal('cannot determine g95 version')
 	k=match.groupdict()
-	conf.env['FC_VERSION']=(k['major'],k['minor'])
+	conf.env.FC_VERSION=(k['major'],k['minor'])
 def configure(conf):
 	conf.find_g95()
 	conf.find_ar()
 	conf.fc_flags()
+	conf.fc_add_flags()
 	conf.g95_flags()
 	conf.g95_modifier_platform()
-
-conf(find_g95)
-conf(g95_flags)
-conf(g95_modifier_win32)
-conf(g95_modifier_cygwin)
-conf(g95_modifier_darwin)
-conf(g95_modifier_platform)
-conf(get_g95_version)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/gas.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/gas.py
@@ -1,11 +1,13 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import waflib.Tools.asm
 from waflib.Tools import ar
 def configure(conf):
-	conf.find_program(['gas','as','gcc'],var='AS')
-	conf.env.AS_TGT_F=['-o']
+	conf.find_program(['gas','gcc'],var='AS')
+	conf.env.AS_TGT_F=['-c','-o']
 	conf.env.ASLNK_TGT_F=['-o']
 	conf.find_ar()
+	conf.load('asm')
+	conf.env.ASM_NAME='gas'
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/gcc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/gcc.py
@@ -1,80 +1,94 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys
-from waflib import Configure,Options,Utils
 from waflib.Tools import ccroot,ar
 from waflib.Configure import conf
+@conf
 def find_gcc(conf):
 	cc=conf.find_program(['gcc','cc'],var='CC')
-	cc=conf.cmd_to_list(cc)
 	conf.get_cc_version(cc,gcc=True)
 	conf.env.CC_NAME='gcc'
-	conf.env.CC=cc
+@conf
 def gcc_common_flags(conf):
 	v=conf.env
-	v['CC_SRC_F']=[]
-	v['CC_TGT_F']=['-c','-o']
-	if not v['LINK_CC']:v['LINK_CC']=v['CC']
-	v['CCLNK_SRC_F']=[]
-	v['CCLNK_TGT_F']=['-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['RPATH_ST']='-Wl,-rpath,%s'
-	v['SONAME_ST']='-Wl,-h,%s'
-	v['SHLIB_MARKER']='-Wl,-Bdynamic'
-	v['STLIB_MARKER']='-Wl,-Bstatic'
-	v['cprogram_PATTERN']='%s'
-	v['CFLAGS_cshlib']=['-fPIC']
-	v['LINKFLAGS_cshlib']=['-shared']
-	v['cshlib_PATTERN']='lib%s.so'
-	v['LINKFLAGS_cstlib']=['-Wl,-Bstatic']
-	v['cstlib_PATTERN']='lib%s.a'
-	v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
-	v['CFLAGS_MACBUNDLE']=['-fPIC']
-	v['macbundle_PATTERN']='%s.bundle'
+	v.CC_SRC_F=[]
+	v.CC_TGT_F=['-c','-o']
+	if not v.LINK_CC:
+		v.LINK_CC=v.CC
+	v.CCLNK_SRC_F=[]
+	v.CCLNK_TGT_F=['-o']
+	v.CPPPATH_ST='-I%s'
+	v.DEFINES_ST='-D%s'
+	v.LIB_ST='-l%s'
+	v.LIBPATH_ST='-L%s'
+	v.STLIB_ST='-l%s'
+	v.STLIBPATH_ST='-L%s'
+	v.RPATH_ST='-Wl,-rpath,%s'
+	v.SONAME_ST='-Wl,-h,%s'
+	v.SHLIB_MARKER='-Wl,-Bdynamic'
+	v.STLIB_MARKER='-Wl,-Bstatic'
+	v.cprogram_PATTERN='%s'
+	v.CFLAGS_cshlib=['-fPIC']
+	v.LINKFLAGS_cshlib=['-shared']
+	v.cshlib_PATTERN='lib%s.so'
+	v.LINKFLAGS_cstlib=['-Wl,-Bstatic']
+	v.cstlib_PATTERN='lib%s.a'
+	v.LINKFLAGS_MACBUNDLE=['-bundle','-undefined','dynamic_lookup']
+	v.CFLAGS_MACBUNDLE=['-fPIC']
+	v.macbundle_PATTERN='%s.bundle'
+@conf
 def gcc_modifier_win32(conf):
 	v=conf.env
-	v['cprogram_PATTERN']='%s.exe'
-	v['cshlib_PATTERN']='%s.dll'
-	v['implib_PATTERN']='lib%s.dll.a'
-	v['IMPLIB_ST']='-Wl,--out-implib,%s'
-	v['CFLAGS_cshlib']=[]
-	v.append_value('CFLAGS_cshlib',['-DDLL_EXPORT'])
+	v.cprogram_PATTERN='%s.exe'
+	v.cshlib_PATTERN='%s.dll'
+	v.implib_PATTERN='%s.dll.a'
+	v.IMPLIB_ST='-Wl,--out-implib,%s'
+	v.CFLAGS_cshlib=[]
 	v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+@conf
 def gcc_modifier_cygwin(conf):
 	gcc_modifier_win32(conf)
 	v=conf.env
-	v['cshlib_PATTERN']='cyg%s.dll'
+	v.cshlib_PATTERN='cyg%s.dll'
 	v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base'])
-	v['CFLAGS_cshlib']=[]
+	v.CFLAGS_cshlib=[]
+@conf
 def gcc_modifier_darwin(conf):
 	v=conf.env
-	v['CFLAGS_cshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
-	v['LINKFLAGS_cshlib']=['-dynamiclib']
-	v['cshlib_PATTERN']='lib%s.dylib'
-	v['FRAMEWORKPATH_ST']='-F%s'
-	v['FRAMEWORK_ST']=['-framework']
-	v['ARCH_ST']=['-arch']
-	v['LINKFLAGS_cstlib']=[]
-	v['SHLIB_MARKER']=[]
-	v['STLIB_MARKER']=[]
-	v['SONAME_ST']=[]
+	v.CFLAGS_cshlib=['-fPIC']
+	v.LINKFLAGS_cshlib=['-dynamiclib']
+	v.cshlib_PATTERN='lib%s.dylib'
+	v.FRAMEWORKPATH_ST='-F%s'
+	v.FRAMEWORK_ST=['-framework']
+	v.ARCH_ST=['-arch']
+	v.LINKFLAGS_cstlib=[]
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.SONAME_ST=[]
+@conf
 def gcc_modifier_aix(conf):
 	v=conf.env
-	v['LINKFLAGS_cprogram']=['-Wl,-brtl']
-	v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull']
-	v['SHLIB_MARKER']=[]
+	v.LINKFLAGS_cprogram=['-Wl,-brtl']
+	v.LINKFLAGS_cshlib=['-shared','-Wl,-brtl,-bexpfull']
+	v.SHLIB_MARKER=[]
+@conf
 def gcc_modifier_hpux(conf):
 	v=conf.env
-	v['SHLIB_MARKER']=[]
-	v['CFLAGS_cshlib']=['-fPIC','-DPIC']
-	v['cshlib_PATTERN']='lib%s.sl'
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.CFLAGS_cshlib=['-fPIC','-DPIC']
+	v.cshlib_PATTERN='lib%s.sl'
+@conf
+def gcc_modifier_openbsd(conf):
+	conf.env.SONAME_ST=[]
+@conf
+def gcc_modifier_osf1V(conf):
+	v=conf.env
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.SONAME_ST=[]
+@conf
 def gcc_modifier_platform(conf):
 	gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None)
 	if gcc_modifier_func:
@@ -87,12 +101,4 @@ def configure(conf):
 	conf.cc_load_tools()
 	conf.cc_add_flags()
 	conf.link_add_flags()
-
-conf(find_gcc)
-conf(gcc_common_flags)
-conf(gcc_modifier_win32)
-conf(gcc_modifier_cygwin)
-conf(gcc_modifier_darwin)
-conf(gcc_modifier_aix)
-conf(gcc_modifier_hpux)
-conf(gcc_modifier_platform)
\ No newline at end of file
+	conf.check_gcc_o_space()
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/gdc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/gdc.py
@@ -1,34 +1,35 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
 from waflib.Tools import ar,d
 from waflib.Configure import conf
+@conf
 def find_gdc(conf):
 	conf.find_program('gdc',var='D')
+	out=conf.cmd_and_log(conf.env.D+['--version'])
+	if out.find("gdc")==-1:
+		conf.fatal("detected compiler is not gdc")
+@conf
 def common_flags_gdc(conf):
 	v=conf.env
-	v['DFLAGS']=[]
-	v['D_SRC_F']=['-c']
-	v['D_TGT_F']='-o%s'
-	v['D_LINKER']=v['D']
-	v['DLNK_SRC_F']=''
-	v['DLNK_TGT_F']='-o%s'
-	v['DINC_ST']='-I%s'
-	v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
-	v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s'
-	v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s'
-	v['LINKFLAGS_dshlib']=['-shared']
-	v['DHEADER_ext']='.di'
+	v.DFLAGS=[]
+	v.D_SRC_F=['-c']
+	v.D_TGT_F='-o%s'
+	v.D_LINKER=v.D
+	v.DLNK_SRC_F=''
+	v.DLNK_TGT_F='-o%s'
+	v.DINC_ST='-I%s'
+	v.DSHLIB_MARKER=v.DSTLIB_MARKER=''
+	v.DSTLIB_ST=v.DSHLIB_ST='-l%s'
+	v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L%s'
+	v.LINKFLAGS_dshlib=['-shared']
+	v.DHEADER_ext='.di'
 	v.DFLAGS_d_with_header='-fintfc'
-	v['D_HDR_F']='-fintfc-file=%s'
+	v.D_HDR_F='-fintfc-file=%s'
 def configure(conf):
 	conf.find_gdc()
 	conf.load('ar')
 	conf.load('d')
 	conf.common_flags_gdc()
 	conf.d_platform_flags()
-
-conf(find_gdc)
-conf(common_flags_gdc)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/gfortran.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/gfortran.py
@@ -1,38 +1,46 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import re
 from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
+from waflib.Tools import fc,fc_config,fc_scan,ar
 from waflib.Configure import conf
+@conf
 def find_gfortran(conf):
 	fc=conf.find_program(['gfortran','g77'],var='FC')
-	fc=conf.cmd_to_list(fc)
 	conf.get_gfortran_version(fc)
 	conf.env.FC_NAME='GFORTRAN'
+@conf
 def gfortran_flags(conf):
 	v=conf.env
-	v['FCFLAGS_fcshlib']=['-fPIC']
-	v['FORTRANMODFLAG']=['-J','']
-	v['FCFLAGS_DEBUG']=['-Werror']
+	v.FCFLAGS_fcshlib=['-fPIC']
+	v.FORTRANMODFLAG=['-J','']
+	v.FCFLAGS_DEBUG=['-Werror']
+@conf
 def gfortran_modifier_win32(conf):
 	fc_config.fortran_modifier_win32(conf)
+@conf
 def gfortran_modifier_cygwin(conf):
 	fc_config.fortran_modifier_cygwin(conf)
+@conf
 def gfortran_modifier_darwin(conf):
 	fc_config.fortran_modifier_darwin(conf)
+@conf
 def gfortran_modifier_platform(conf):
-	dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
+	dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform()
 	gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None)
 	if gfortran_modifier_func:
 		gfortran_modifier_func()
+@conf
 def get_gfortran_version(conf,fc):
 	version_re=re.compile(r"GNU\s*Fortran",re.I).search
 	cmd=fc+['--version']
 	out,err=fc_config.getoutput(conf,cmd,stdin=False)
-	if out:match=version_re(out)
-	else:match=version_re(err)
+	if out:
+		match=version_re(out)
+	else:
+		match=version_re(err)
 	if not match:
 		conf.fatal('Could not determine the compiler type')
 	cmd=fc+['-dM','-E','-']
@@ -40,7 +48,7 @@ def get_gfortran_version(conf,fc):
 	if out.find('__GNUC__')<0:
 		conf.fatal('Could not determine the compiler type')
 	k={}
-	out=out.split('\n')
+	out=out.splitlines()
 	import shlex
 	for line in out:
 		lst=shlex.split(line)
@@ -52,18 +60,12 @@ def get_gfortran_version(conf,fc):
 		return var in k
 	def isT(var):
 		return var in k and k[var]!='0'
-	conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
+	conf.env.FC_VERSION=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
 def configure(conf):
 	conf.find_gfortran()
 	conf.find_ar()
 	conf.fc_flags()
+	conf.fc_add_flags()
 	conf.gfortran_flags()
 	conf.gfortran_modifier_platform()
-
-conf(find_gfortran)
-conf(gfortran_flags)
-conf(gfortran_modifier_win32)
-conf(gfortran_modifier_cygwin)
-conf(gfortran_modifier_darwin)
-conf(gfortran_modifier_platform)
-conf(get_gfortran_version)
\ No newline at end of file
+	conf.check_gfortran_o_space()
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/glib2.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/glib2.py
@@ -1,15 +1,19 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import os
-from waflib import Task,Utils,Options,Errors,Logs
-from waflib.TaskGen import taskgen_method,before_method,after_method,feature
+import functools
+from waflib import Context,Task,Utils,Options,Errors,Logs
+from waflib.TaskGen import taskgen_method,before_method,feature,extension
+from waflib.Configure import conf
+@taskgen_method
 def add_marshal_file(self,filename,prefix):
 	if not hasattr(self,'marshal_list'):
 		self.marshal_list=[]
 	self.meths.append('process_marshal')
 	self.marshal_list.append((filename,prefix))
+@before_method('process_source')
 def process_marshal(self):
 	for f,prefix in getattr(self,'marshal_list',[]):
 		node=self.path.find_resource(f)
@@ -22,29 +26,33 @@ def process_marshal(self):
 	self.source=self.to_nodes(getattr(self,'source',[]))
 	self.source.append(c_node)
 class glib_genmarshal(Task.Task):
+	vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL']
+	color='BLUE'
+	ext_out=['.h']
 	def run(self):
-		bld=self.inputs[0].__class__.ctx
+		bld=self.generator.bld
 		get=self.env.get_flat
 		cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath())
 		ret=bld.exec_command(cmd1)
-		if ret:return ret
+		if ret:
+			return ret
 		c='''#include "%s"\n'''%self.outputs[0].name
 		self.outputs[1].write(c)
 		cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath())
 		return bld.exec_command(cmd2)
-	vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL']
-	color='BLUE'
-	ext_out=['.h']
+@taskgen_method
 def add_enums_from_template(self,source='',target='',template='',comments=''):
 	if not hasattr(self,'enums_list'):
 		self.enums_list=[]
 	self.meths.append('process_enums')
 	self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments})
+@taskgen_method
 def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''):
 	if not hasattr(self,'enums_list'):
 		self.enums_list=[]
 	self.meths.append('process_enums')
 	self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments})
+@before_method('process_source')
 def process_enums(self):
 	for enum in getattr(self,'enums_list',[]):
 		task=self.create_task('glib_mkenums')
@@ -55,13 +63,13 @@ def process_enums(self):
 			raise Errors.WafError('missing source '+str(enum))
 		source_list=[self.path.find_resource(k)for k in source_list]
 		inputs+=source_list
-		env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
+		env.GLIB_MKENUMS_SOURCE=[k.abspath()for k in source_list]
 		if not enum['target']:
 			raise Errors.WafError('missing target '+str(enum))
 		tgt_node=self.path.find_or_declare(enum['target'])
 		if tgt_node.name.endswith('.c'):
 			self.source.append(tgt_node)
-		env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
+		env.GLIB_MKENUMS_TARGET=tgt_node.abspath()
 		options=[]
 		if enum['template']:
 			template_node=self.path.find_resource(enum['template'])
@@ -71,104 +79,164 @@ def process_enums(self):
 		for param,option in params.items():
 			if enum[param]:
 				options.append('%s %r'%(option,enum[param]))
-		env['GLIB_MKENUMS_OPTIONS']=' '.join(options)
+		env.GLIB_MKENUMS_OPTIONS=' '.join(options)
 		task.set_inputs(inputs)
 		task.set_outputs(tgt_node)
 class glib_mkenums(Task.Task):
 	run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
 	color='PINK'
 	ext_out=['.h']
+@taskgen_method
 def add_settings_schemas(self,filename_list):
 	if not hasattr(self,'settings_schema_files'):
 		self.settings_schema_files=[]
 	if not isinstance(filename_list,list):
 		filename_list=[filename_list]
 	self.settings_schema_files.extend(filename_list)
+@taskgen_method
 def add_settings_enums(self,namespace,filename_list):
 	if hasattr(self,'settings_enum_namespace'):
-		raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name)
+		raise Errors.WafError("Tried to add gsettings enums to %r more than once"%self.name)
 	self.settings_enum_namespace=namespace
-	if type(filename_list)!='list':
+	if not isinstance(filename_list,list):
 		filename_list=[filename_list]
 	self.settings_enum_files=filename_list
-def r_change_ext(self,ext):
-	name=self.name
-	k=name.rfind('.')
-	if k>=0:
-		name=name[:k]+ext
-	else:
-		name=name+ext
-	return self.parent.find_or_declare([name])
+@feature('glib2')
 def process_settings(self):
 	enums_tgt_node=[]
 	install_files=[]
 	settings_schema_files=getattr(self,'settings_schema_files',[])
-	if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']:
+	if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
 		raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
 	if hasattr(self,'settings_enum_files'):
 		enums_task=self.create_task('glib_mkenums')
 		source_list=self.settings_enum_files
 		source_list=[self.path.find_resource(k)for k in source_list]
 		enums_task.set_inputs(source_list)
-		enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
+		enums_task.env.GLIB_MKENUMS_SOURCE=[k.abspath()for k in source_list]
 		target=self.settings_enum_namespace+'.enums.xml'
 		tgt_node=self.path.find_or_declare(target)
 		enums_task.set_outputs(tgt_node)
-		enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
+		enums_task.env.GLIB_MKENUMS_TARGET=tgt_node.abspath()
 		enums_tgt_node=[tgt_node]
 		install_files.append(tgt_node)
 		options='--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead "  <@type@ id=\\"%s.@EnumName@\\">" --vprod "    <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail "  </@type@>" --ftail "</schemalist>" '%(self.settings_enum_namespace)
-		enums_task.env['GLIB_MKENUMS_OPTIONS']=options
+		enums_task.env.GLIB_MKENUMS_OPTIONS=options
 	for schema in settings_schema_files:
 		schema_task=self.create_task('glib_validate_schema')
 		schema_node=self.path.find_resource(schema)
 		if not schema_node:
-			raise Errors.WafError("Cannot find the schema file '%s'"%schema)
+			raise Errors.WafError("Cannot find the schema file %r"%schema)
 		install_files.append(schema_node)
 		source_list=enums_tgt_node+[schema_node]
 		schema_task.set_inputs(source_list)
-		schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list]
-		target_node=r_change_ext(schema_node,'.xml.valid')
+		schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS=[("--schema-file="+k.abspath())for k in source_list]
+		target_node=schema_node.change_ext('.xml.valid')
 		schema_task.set_outputs(target_node)
-		schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath()
+		schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT=target_node.abspath()
 	def compile_schemas_callback(bld):
-		if not bld.is_install:return
-		Logs.pprint('YELLOW','Updating GSettings schema cache')
-		command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env)
-		ret=self.bld.exec_command(command)
+		if not bld.is_install:
+			return
+		compile_schemas=Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
+		destdir=Options.options.destdir
+		paths=bld._compile_schemas_registered
+		if destdir:
+			paths=(os.path.join(destdir,path.lstrip(os.sep))for path in paths)
+		for path in paths:
+			Logs.pprint('YELLOW','Updating GSettings schema cache %r'%path)
+			if self.bld.exec_command(compile_schemas+[path]):
+				Logs.warn('Could not update GSettings schema cache %r'%path)
 	if self.bld.is_install:
-		if not self.env['GSETTINGSSCHEMADIR']:
+		schemadir=self.env.GSETTINGSSCHEMADIR
+		if not schemadir:
 			raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
 		if install_files:
-			self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files)
-			if not hasattr(self.bld,'_compile_schemas_registered'):
+			self.add_install_files(install_to=schemadir,install_from=install_files)
+			registered_schemas=getattr(self.bld,'_compile_schemas_registered',None)
+			if not registered_schemas:
+				registered_schemas=self.bld._compile_schemas_registered=set()
 				self.bld.add_post_fun(compile_schemas_callback)
-				self.bld._compile_schemas_registered=True
+			registered_schemas.add(schemadir)
 class glib_validate_schema(Task.Task):
 	run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
 	color='PINK'
-def configure(conf):
+@extension('.gresource.xml')
+def process_gresource_source(self,node):
+	if not self.env.GLIB_COMPILE_RESOURCES:
+		raise Errors.WafError("Unable to process GResource file - glib-compile-resources was not found during configure")
+	if'gresource'in self.features:
+		return
+	h_node=node.change_ext('_xml.h')
+	c_node=node.change_ext('_xml.c')
+	self.create_task('glib_gresource_source',node,[h_node,c_node])
+	self.source.append(c_node)
+@feature('gresource')
+def process_gresource_bundle(self):
+	for i in self.to_list(self.source):
+		node=self.path.find_resource(i)
+		task=self.create_task('glib_gresource_bundle',node,node.change_ext(''))
+		inst_to=getattr(self,'install_path',None)
+		if inst_to:
+			self.add_install_files(install_to=inst_to,install_from=task.outputs)
+class glib_gresource_base(Task.Task):
+	color='BLUE'
+	base_cmd='${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
+	def scan(self):
+		bld=self.generator.bld
+		kw={}
+		kw['cwd']=self.get_cwd()
+		kw['quiet']=Context.BOTH
+		cmd=Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s'%(self.inputs[0].parent.srcpath(),self.inputs[0].bld_dir(),self.inputs[0].bldpath()),self.env)
+		output=bld.cmd_and_log(cmd,**kw)
+		nodes=[]
+		names=[]
+		for dep in output.splitlines():
+			if dep:
+				node=bld.bldnode.find_node(dep)
+				if node:
+					nodes.append(node)
+				else:
+					names.append(dep)
+		return(nodes,names)
+class glib_gresource_source(glib_gresource_base):
+	vars=['GLIB_COMPILE_RESOURCES']
+	fun_h=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[0].abspath()} --generate-header ${SRC}')
+	fun_c=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[1].abspath()} --generate-source ${SRC}')
+	ext_out=['.h']
+	def run(self):
+		return self.fun_h[0](self)or self.fun_c[0](self)
+class glib_gresource_bundle(glib_gresource_base):
+	run_str=glib_gresource_base.base_cmd+' --target=${TGT} ${SRC}'
+	shell=True
+@conf
+def find_glib_genmarshal(conf):
 	conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')
-	conf.find_perl_program('glib-mkenums',var='GLIB_MKENUMS')
-	conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS',mandatory=False)
+@conf
+def find_glib_mkenums(conf):
+	if not conf.env.PERL:
+		conf.find_program('perl',var='PERL')
+	conf.find_program('glib-mkenums',interpreter='PERL',var='GLIB_MKENUMS')
+@conf
+def find_glib_compile_schemas(conf):
+	conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS')
 	def getstr(varname):
 		return getattr(Options.options,varname,getattr(conf.env,varname,''))
 	gsettingsschemadir=getstr('GSETTINGSSCHEMADIR')
 	if not gsettingsschemadir:
 		datadir=getstr('DATADIR')
 		if not datadir:
-			prefix=conf.env['PREFIX']
+			prefix=conf.env.PREFIX
 			datadir=os.path.join(prefix,'share')
 		gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas')
-	conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir
+	conf.env.GSETTINGSSCHEMADIR=gsettingsschemadir
+@conf
+def find_glib_compile_resources(conf):
+	conf.find_program('glib-compile-resources',var='GLIB_COMPILE_RESOURCES')
+def configure(conf):
+	conf.find_glib_genmarshal()
+	conf.find_glib_mkenums()
+	conf.find_glib_compile_schemas(mandatory=False)
+	conf.find_glib_compile_resources(mandatory=False)
 def options(opt):
-	opt.add_option('--gsettingsschemadir',help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
-
-taskgen_method(add_marshal_file)
-before_method('process_source')(process_marshal)
-taskgen_method(add_enums_from_template)
-taskgen_method(add_enums)
-before_method('process_source')(process_enums)
-taskgen_method(add_settings_schemas)
-taskgen_method(add_settings_enums)
-feature('glib2')(process_settings)
\ No newline at end of file
+	gr=opt.add_option_group('Installation directories')
+	gr.add_option('--gsettingsschemadir',help='GSettings schema location [DATADIR/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/gnu_dirs.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/gnu_dirs.py
@@ -1,37 +1,38 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os
+import os,re
 from waflib import Utils,Options,Context
-_options=[x.split(', ')for x in'''
-bindir, user executables, ${EXEC_PREFIX}/bin
-sbindir, system admin executables, ${EXEC_PREFIX}/sbin
-libexecdir, program executables, ${EXEC_PREFIX}/libexec
-sysconfdir, read-only single-machine data, ${PREFIX}/etc
-sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
-localstatedir, modifiable single-machine data, ${PREFIX}/var
-libdir, object code libraries, ${EXEC_PREFIX}/lib
-includedir, C header files, ${PREFIX}/include
-oldincludedir, C header files for non-gcc, /usr/include
-datarootdir, read-only arch.-independent data root, ${PREFIX}/share
-datadir, read-only architecture-independent data, ${DATAROOTDIR}
-infodir, info documentation, ${DATAROOTDIR}/info
+gnuopts='''
+bindir, user commands, ${EXEC_PREFIX}/bin
+sbindir, system binaries, ${EXEC_PREFIX}/sbin
+libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
+sysconfdir, host-specific configuration, ${PREFIX}/etc
+sharedstatedir, architecture-independent variable data, ${PREFIX}/com
+localstatedir, variable data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib%s
+includedir, header files, ${PREFIX}/include
+oldincludedir, header files for non-GCC compilers, /usr/include
+datarootdir, architecture-independent data root, ${PREFIX}/share
+datadir, architecture-independent data, ${DATAROOTDIR}
+infodir, GNU "info" documentation, ${DATAROOTDIR}/info
 localedir, locale-dependent data, ${DATAROOTDIR}/locale
-mandir, man documentation, ${DATAROOTDIR}/man
+mandir, manual pages, ${DATAROOTDIR}/man
 docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
-htmldir, html documentation, ${DOCDIR}
-dvidir, dvi documentation, ${DOCDIR}
-pdfdir, pdf documentation, ${DOCDIR}
-psdir, ps documentation, ${DOCDIR}
-'''.split('\n')if x]
+htmldir, HTML documentation, ${DOCDIR}
+dvidir, DVI documentation, ${DOCDIR}
+pdfdir, PDF documentation, ${DOCDIR}
+psdir, PostScript documentation, ${DOCDIR}
+'''%Utils.lib64()
+_options=[x.split(', ')for x in gnuopts.splitlines()if x]
 def configure(conf):
 	def get_param(varname,default):
 		return getattr(Options.options,varname,'')or default
 	env=conf.env
-	conf.env.LIBDIR=conf.env.BINDIR=[]
-	env['EXEC_PREFIX']=get_param('EXEC_PREFIX',env['PREFIX'])
-	env['PACKAGE']=getattr(Context.g_module,'APPNAME',None)or env['PACKAGE']
+	env.LIBDIR=env.BINDIR=[]
+	env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX)
+	env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE
 	complete=False
 	iter=0
 	while not complete and iter<len(_options)+1:
@@ -45,10 +46,10 @@ def configure(conf):
 				except TypeError:
 					complete=False
 	if not complete:
-		lst=[name for name,_,_ in _options if not env[name.upper()]]
+		lst=[x for x,_,_ in _options if not env[x.upper()]]
 		raise conf.errors.WafError('Variable substitution failure %r'%lst)
 def options(opt):
-	inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\
+	inst_dir=opt.add_option_group('Installation prefix','By default, "waf install" will put the files in\
  "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
  than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
 	for k in('--prefix','--destdir'):
@@ -56,10 +57,10 @@ def options(opt):
 		if option:
 			opt.parser.remove_option(k)
 			inst_dir.add_option(option)
-	inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX')
-	dirs_options=opt.add_option_group('Pre-defined installation directories','')
+	inst_dir.add_option('--exec-prefix',help='installation prefix for binaries [PREFIX]',default='',dest='EXEC_PREFIX')
+	dirs_options=opt.add_option_group('Installation directories')
 	for name,help,default in _options:
 		option_name='--'+name
 		str_default=default
-		str_help='%s [Default: %s]'%(help,str_default)
+		str_help='%s [%s]'%(help,re.sub(r'\$\{([^}]+)\}',r'\1',str_default))
 		dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/gxx.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/gxx.py
@@ -1,80 +1,94 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys
-from waflib import Configure,Options,Utils
 from waflib.Tools import ccroot,ar
 from waflib.Configure import conf
+@conf
 def find_gxx(conf):
 	cxx=conf.find_program(['g++','c++'],var='CXX')
-	cxx=conf.cmd_to_list(cxx)
 	conf.get_cc_version(cxx,gcc=True)
 	conf.env.CXX_NAME='gcc'
-	conf.env.CXX=cxx
+@conf
 def gxx_common_flags(conf):
 	v=conf.env
-	v['CXX_SRC_F']=[]
-	v['CXX_TGT_F']=['-c','-o']
-	if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
-	v['CXXLNK_SRC_F']=[]
-	v['CXXLNK_TGT_F']=['-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['RPATH_ST']='-Wl,-rpath,%s'
-	v['SONAME_ST']='-Wl,-h,%s'
-	v['SHLIB_MARKER']='-Wl,-Bdynamic'
-	v['STLIB_MARKER']='-Wl,-Bstatic'
-	v['cxxprogram_PATTERN']='%s'
-	v['CXXFLAGS_cxxshlib']=['-fPIC']
-	v['LINKFLAGS_cxxshlib']=['-shared']
-	v['cxxshlib_PATTERN']='lib%s.so'
-	v['LINKFLAGS_cxxstlib']=['-Wl,-Bstatic']
-	v['cxxstlib_PATTERN']='lib%s.a'
-	v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
-	v['CXXFLAGS_MACBUNDLE']=['-fPIC']
-	v['macbundle_PATTERN']='%s.bundle'
+	v.CXX_SRC_F=[]
+	v.CXX_TGT_F=['-c','-o']
+	if not v.LINK_CXX:
+		v.LINK_CXX=v.CXX
+	v.CXXLNK_SRC_F=[]
+	v.CXXLNK_TGT_F=['-o']
+	v.CPPPATH_ST='-I%s'
+	v.DEFINES_ST='-D%s'
+	v.LIB_ST='-l%s'
+	v.LIBPATH_ST='-L%s'
+	v.STLIB_ST='-l%s'
+	v.STLIBPATH_ST='-L%s'
+	v.RPATH_ST='-Wl,-rpath,%s'
+	v.SONAME_ST='-Wl,-h,%s'
+	v.SHLIB_MARKER='-Wl,-Bdynamic'
+	v.STLIB_MARKER='-Wl,-Bstatic'
+	v.cxxprogram_PATTERN='%s'
+	v.CXXFLAGS_cxxshlib=['-fPIC']
+	v.LINKFLAGS_cxxshlib=['-shared']
+	v.cxxshlib_PATTERN='lib%s.so'
+	v.LINKFLAGS_cxxstlib=['-Wl,-Bstatic']
+	v.cxxstlib_PATTERN='lib%s.a'
+	v.LINKFLAGS_MACBUNDLE=['-bundle','-undefined','dynamic_lookup']
+	v.CXXFLAGS_MACBUNDLE=['-fPIC']
+	v.macbundle_PATTERN='%s.bundle'
+@conf
 def gxx_modifier_win32(conf):
 	v=conf.env
-	v['cxxprogram_PATTERN']='%s.exe'
-	v['cxxshlib_PATTERN']='%s.dll'
-	v['implib_PATTERN']='lib%s.dll.a'
-	v['IMPLIB_ST']='-Wl,--out-implib,%s'
-	v['CXXFLAGS_cxxshlib']=[]
-	v.append_value('CXXFLAGS_cxxshlib',['-DDLL_EXPORT'])
+	v.cxxprogram_PATTERN='%s.exe'
+	v.cxxshlib_PATTERN='%s.dll'
+	v.implib_PATTERN='%s.dll.a'
+	v.IMPLIB_ST='-Wl,--out-implib,%s'
+	v.CXXFLAGS_cxxshlib=[]
 	v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+@conf
 def gxx_modifier_cygwin(conf):
 	gxx_modifier_win32(conf)
 	v=conf.env
-	v['cxxshlib_PATTERN']='cyg%s.dll'
+	v.cxxshlib_PATTERN='cyg%s.dll'
 	v.append_value('LINKFLAGS_cxxshlib',['-Wl,--enable-auto-image-base'])
-	v['CXXFLAGS_cxxshlib']=[]
+	v.CXXFLAGS_cxxshlib=[]
+@conf
 def gxx_modifier_darwin(conf):
 	v=conf.env
-	v['CXXFLAGS_cxxshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
-	v['LINKFLAGS_cxxshlib']=['-dynamiclib']
-	v['cxxshlib_PATTERN']='lib%s.dylib'
-	v['FRAMEWORKPATH_ST']='-F%s'
-	v['FRAMEWORK_ST']=['-framework']
-	v['ARCH_ST']=['-arch']
-	v['LINKFLAGS_cxxstlib']=[]
-	v['SHLIB_MARKER']=[]
-	v['STLIB_MARKER']=[]
-	v['SONAME_ST']=[]
+	v.CXXFLAGS_cxxshlib=['-fPIC']
+	v.LINKFLAGS_cxxshlib=['-dynamiclib']
+	v.cxxshlib_PATTERN='lib%s.dylib'
+	v.FRAMEWORKPATH_ST='-F%s'
+	v.FRAMEWORK_ST=['-framework']
+	v.ARCH_ST=['-arch']
+	v.LINKFLAGS_cxxstlib=[]
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.SONAME_ST=[]
+@conf
 def gxx_modifier_aix(conf):
 	v=conf.env
-	v['LINKFLAGS_cxxprogram']=['-Wl,-brtl']
-	v['LINKFLAGS_cxxshlib']=['-shared','-Wl,-brtl,-bexpfull']
-	v['SHLIB_MARKER']=[]
+	v.LINKFLAGS_cxxprogram=['-Wl,-brtl']
+	v.LINKFLAGS_cxxshlib=['-shared','-Wl,-brtl,-bexpfull']
+	v.SHLIB_MARKER=[]
+@conf
 def gxx_modifier_hpux(conf):
 	v=conf.env
-	v['SHLIB_MARKER']=[]
-	v['CFLAGS_cxxshlib']=['-fPIC','-DPIC']
-	v['cxxshlib_PATTERN']='lib%s.sl'
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.CFLAGS_cxxshlib=['-fPIC','-DPIC']
+	v.cxxshlib_PATTERN='lib%s.sl'
+@conf
+def gxx_modifier_openbsd(conf):
+	conf.env.SONAME_ST=[]
+@conf
+def gcc_modifier_osf1V(conf):
+	v=conf.env
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.SONAME_ST=[]
+@conf
 def gxx_modifier_platform(conf):
 	gxx_modifier_func=getattr(conf,'gxx_modifier_'+conf.env.DEST_OS,None)
 	if gxx_modifier_func:
@@ -87,12 +101,4 @@ def configure(conf):
 	conf.cxx_load_tools()
 	conf.cxx_add_flags()
 	conf.link_add_flags()
-
-conf(find_gxx)
-conf(gxx_common_flags)
-conf(gxx_modifier_win32)
-conf(gxx_modifier_cygwin)
-conf(gxx_modifier_darwin)
-conf(gxx_modifier_aix)
-conf(gxx_modifier_hpux)
-conf(gxx_modifier_platform)
\ No newline at end of file
+	conf.check_gcc_o_space('cxx')
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/icc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/icc.py
@@ -1,24 +1,15 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys
+import sys
 from waflib.Tools import ccroot,ar,gcc
 from waflib.Configure import conf
+@conf
 def find_icc(conf):
-	if sys.platform=='cygwin':
-		conf.fatal('The Intel compiler does not work on Cygwin')
-	v=conf.env
-	cc=None
-	if v['CC']:cc=v['CC']
-	elif'CC'in conf.environ:cc=conf.environ['CC']
-	if not cc:cc=conf.find_program('icc',var='CC')
-	if not cc:cc=conf.find_program('ICL',var='CC')
-	if not cc:conf.fatal('Intel C Compiler (icc) was not found')
-	cc=conf.cmd_to_list(cc)
+	cc=conf.find_program(['icc','ICL'],var='CC')
 	conf.get_cc_version(cc,icc=True)
-	v['CC']=cc
-	v['CC_NAME']='icc'
+	conf.env.CC_NAME='icc'
 def configure(conf):
 	conf.find_icc()
 	conf.find_ar()
@@ -27,5 +18,3 @@ def configure(conf):
 	conf.cc_load_tools()
 	conf.cc_add_flags()
 	conf.link_add_flags()
-
-conf(find_icc)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/icpc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/icpc.py
@@ -1,23 +1,15 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys
+import sys
 from waflib.Tools import ccroot,ar,gxx
 from waflib.Configure import conf
+@conf
 def find_icpc(conf):
-	if sys.platform=='cygwin':
-		conf.fatal('The Intel compiler does not work on Cygwin')
-	v=conf.env
-	cxx=None
-	if v['CXX']:cxx=v['CXX']
-	elif'CXX'in conf.environ:cxx=conf.environ['CXX']
-	if not cxx:cxx=conf.find_program('icpc',var='CXX')
-	if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found')
-	cxx=conf.cmd_to_list(cxx)
+	cxx=conf.find_program('icpc',var='CXX')
 	conf.get_cc_version(cxx,icc=True)
-	v['CXX']=cxx
-	v['CXX_NAME']='icc'
+	conf.env.CXX_NAME='icc'
 def configure(conf):
 	conf.find_icpc()
 	conf.find_ar()
@@ -26,5 +18,3 @@ def configure(conf):
 	conf.cxx_load_tools()
 	conf.cxx_add_flags()
 	conf.link_add_flags()
-
-conf(find_icpc)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/ifort.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/ifort.py
@@ -1,49 +1,303 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
+import os,re,traceback
+from waflib import Utils,Logs,Errors
+from waflib.Tools import fc,fc_config,fc_scan,ar,ccroot
 from waflib.Configure import conf
+from waflib.TaskGen import after_method,feature
+@conf
 def find_ifort(conf):
 	fc=conf.find_program('ifort',var='FC')
-	fc=conf.cmd_to_list(fc)
 	conf.get_ifort_version(fc)
 	conf.env.FC_NAME='IFORT'
-def ifort_modifier_cygwin(conf):
-	raise NotImplementedError("Ifort on cygwin not yet implemented")
-def ifort_modifier_win32(conf):
-	fc_config.fortran_modifier_win32(conf)
+@conf
+def ifort_modifier_win32(self):
+	v=self.env
+	v.IFORT_WIN32=True
+	v.FCSTLIB_MARKER=''
+	v.FCSHLIB_MARKER=''
+	v.FCLIB_ST=v.FCSTLIB_ST='%s.lib'
+	v.FCLIBPATH_ST=v.STLIBPATH_ST='/LIBPATH:%s'
+	v.FCINCPATH_ST='/I%s'
+	v.FCDEFINES_ST='/D%s'
+	v.fcprogram_PATTERN=v.fcprogram_test_PATTERN='%s.exe'
+	v.fcshlib_PATTERN='%s.dll'
+	v.fcstlib_PATTERN=v.implib_PATTERN='%s.lib'
+	v.FCLNK_TGT_F='/out:'
+	v.FC_TGT_F=['/c','/o','']
+	v.FCFLAGS_fcshlib=''
+	v.LINKFLAGS_fcshlib='/DLL'
+	v.AR_TGT_F='/out:'
+	v.IMPLIB_ST='/IMPLIB:%s'
+	v.append_value('LINKFLAGS','/subsystem:console')
+	if v.IFORT_MANIFEST:
+		v.append_value('LINKFLAGS',['/MANIFEST'])
+@conf
 def ifort_modifier_darwin(conf):
 	fc_config.fortran_modifier_darwin(conf)
+@conf
 def ifort_modifier_platform(conf):
-	dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
+	dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform()
 	ifort_modifier_func=getattr(conf,'ifort_modifier_'+dest_os,None)
 	if ifort_modifier_func:
 		ifort_modifier_func()
+@conf
 def get_ifort_version(conf,fc):
-	version_re=re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
-	cmd=fc+['--version']
-	out,err=fc_config.getoutput(conf,cmd,stdin=False)
-	if out:
-		match=version_re(out)
+	version_re=re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
+	if Utils.is_win32:
+		cmd=fc
 	else:
-		match=version_re(err)
+		cmd=fc+['-logo']
+	out,err=fc_config.getoutput(conf,cmd,stdin=False)
+	match=version_re(out)or version_re(err)
 	if not match:
 		conf.fatal('cannot determine ifort version.')
 	k=match.groupdict()
-	conf.env['FC_VERSION']=(k['major'],k['minor'])
+	conf.env.FC_VERSION=(k['major'],k['minor'])
 def configure(conf):
-	conf.find_ifort()
-	conf.find_program('xiar',var='AR')
-	conf.env.ARFLAGS='rcs'
-	conf.fc_flags()
-	conf.ifort_modifier_platform()
-
-conf(find_ifort)
-conf(ifort_modifier_cygwin)
-conf(ifort_modifier_win32)
-conf(ifort_modifier_darwin)
-conf(ifort_modifier_platform)
-conf(get_ifort_version)
\ No newline at end of file
+	if Utils.is_win32:
+		compiler,version,path,includes,libdirs,arch=conf.detect_ifort()
+		v=conf.env
+		v.DEST_CPU=arch
+		v.PATH=path
+		v.INCLUDES=includes
+		v.LIBPATH=libdirs
+		v.MSVC_COMPILER=compiler
+		try:
+			v.MSVC_VERSION=float(version)
+		except ValueError:
+			v.MSVC_VERSION=float(version[:-3])
+		conf.find_ifort_win32()
+		conf.ifort_modifier_win32()
+	else:
+		conf.find_ifort()
+		conf.find_program('xiar',var='AR')
+		conf.find_ar()
+		conf.fc_flags()
+		conf.fc_add_flags()
+		conf.ifort_modifier_platform()
+all_ifort_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
+@conf
+def gather_ifort_versions(conf,versions):
+	version_pattern=re.compile(r'^...?.?\....?.?')
+	try:
+		all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
+	except OSError:
+		try:
+			all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\Fortran')
+		except OSError:
+			return
+	index=0
+	while 1:
+		try:
+			version=Utils.winreg.EnumKey(all_versions,index)
+		except OSError:
+			break
+		index+=1
+		if not version_pattern.match(version):
+			continue
+		targets={}
+		for target,arch in all_ifort_platforms:
+			if target=='intel64':
+				targetDir='EM64T_NATIVE'
+			else:
+				targetDir=target
+			try:
+				Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+				icl_version=Utils.winreg.OpenKey(all_versions,version)
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				pass
+			else:
+				batch_file=os.path.join(path,'bin','ifortvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+		for target,arch in all_ifort_platforms:
+			try:
+				icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				continue
+			else:
+				batch_file=os.path.join(path,'bin','ifortvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+		major=version[0:2]
+		versions['intel '+major]=targets
+@conf
+def setup_ifort(conf,versiondict):
+	platforms=Utils.to_list(conf.env.MSVC_TARGETS)or[i for i,j in all_ifort_platforms]
+	desired_versions=conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
+	for version in desired_versions:
+		try:
+			targets=versiondict[version]
+		except KeyError:
+			continue
+		for arch in platforms:
+			try:
+				cfg=targets[arch]
+			except KeyError:
+				continue
+			cfg.evaluate()
+			if cfg.is_valid:
+				compiler,revision=version.rsplit(' ',1)
+				return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+	conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r'%(desired_versions,list(versiondict.keys())))
+@conf
+def get_ifort_version_win32(conf,compiler,version,target,vcvars):
+	try:
+		conf.msvc_cnt+=1
+	except AttributeError:
+		conf.msvc_cnt=1
+	batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt)
+	batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+"""%(vcvars,target))
+	sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()])
+	batfile.delete()
+	lines=sout.splitlines()
+	if not lines[0]:
+		lines.pop(0)
+	MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
+	for line in lines:
+		if line.startswith('PATH='):
+			path=line[5:]
+			MSVC_PATH=path.split(';')
+		elif line.startswith('INCLUDE='):
+			MSVC_INCDIR=[i for i in line[8:].split(';')if i]
+		elif line.startswith('LIB='):
+			MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
+	if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
+		conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')
+	env=dict(os.environ)
+	env.update(PATH=path)
+	compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+	fc=conf.find_program(compiler_name,path_list=MSVC_PATH)
+	if'CL'in env:
+		del(env['CL'])
+	try:
+		conf.cmd_and_log(fc+['/help'],env=env)
+	except UnicodeError:
+		st=traceback.format_exc()
+		if conf.logger:
+			conf.logger.error(st)
+		conf.fatal('ifort: Unicode error - check the code page?')
+	except Exception as e:
+		Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s',compiler,version,target,str(e))
+		conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
+	else:
+		Logs.debug('ifort: get_ifort_version: %r %r %r -> OK',compiler,version,target)
+	finally:
+		conf.env[compiler_name]=''
+	return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
+class target_compiler(object):
+	def __init__(self,ctx,compiler,cpu,version,bat_target,bat,callback=None):
+		self.conf=ctx
+		self.name=None
+		self.is_valid=False
+		self.is_done=False
+		self.compiler=compiler
+		self.cpu=cpu
+		self.version=version
+		self.bat_target=bat_target
+		self.bat=bat
+		self.callback=callback
+	def evaluate(self):
+		if self.is_done:
+			return
+		self.is_done=True
+		try:
+			vs=self.conf.get_ifort_version_win32(self.compiler,self.version,self.bat_target,self.bat)
+		except Errors.ConfigurationError:
+			self.is_valid=False
+			return
+		if self.callback:
+			vs=self.callback(self,vs)
+		self.is_valid=True
+		(self.bindirs,self.incdirs,self.libdirs)=vs
+	def __str__(self):
+		return str((self.bindirs,self.incdirs,self.libdirs))
+	def __repr__(self):
+		return repr((self.bindirs,self.incdirs,self.libdirs))
+@conf
+def detect_ifort(self):
+	return self.setup_ifort(self.get_ifort_versions(False))
+@conf
+def get_ifort_versions(self,eval_and_save=True):
+	dct={}
+	self.gather_ifort_versions(dct)
+	return dct
+def _get_prog_names(self,compiler):
+	if compiler=='intel':
+		compiler_name='ifort'
+		linker_name='XILINK'
+		lib_name='XILIB'
+	else:
+		compiler_name='CL'
+		linker_name='LINK'
+		lib_name='LIB'
+	return compiler_name,linker_name,lib_name
+@conf
+def find_ifort_win32(conf):
+	v=conf.env
+	path=v.PATH
+	compiler=v.MSVC_COMPILER
+	version=v.MSVC_VERSION
+	compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+	v.IFORT_MANIFEST=(compiler=='intel'and version>=11)
+	fc=conf.find_program(compiler_name,var='FC',path_list=path)
+	env=dict(conf.environ)
+	if path:
+		env.update(PATH=';'.join(path))
+	if not conf.cmd_and_log(fc+['/nologo','/help'],env=env):
+		conf.fatal('not intel fortran compiler could not be identified')
+	v.FC_NAME='IFORT'
+	if not v.LINK_FC:
+		conf.find_program(linker_name,var='LINK_FC',path_list=path,mandatory=True)
+	if not v.AR:
+		conf.find_program(lib_name,path_list=path,var='AR',mandatory=True)
+		v.ARFLAGS=['/nologo']
+	if v.IFORT_MANIFEST:
+		conf.find_program('MT',path_list=path,var='MT')
+		v.MTFLAGS=['/nologo']
+	try:
+		conf.load('winres')
+	except Errors.WafError:
+		Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+@after_method('apply_link')
+@feature('fc')
+def apply_flags_ifort(self):
+	if not self.env.IFORT_WIN32 or not getattr(self,'link_task',None):
+		return
+	is_static=isinstance(self.link_task,ccroot.stlink_task)
+	subsystem=getattr(self,'subsystem','')
+	if subsystem:
+		subsystem='/subsystem:%s'%subsystem
+		flags=is_static and'ARFLAGS'or'LINKFLAGS'
+		self.env.append_value(flags,subsystem)
+	if not is_static:
+		for f in self.env.LINKFLAGS:
+			d=f.lower()
+			if d[1:]=='debug':
+				pdbnode=self.link_task.outputs[0].change_ext('.pdb')
+				self.link_task.outputs.append(pdbnode)
+				if getattr(self,'install_task',None):
+					self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=pdbnode)
+				break
+@feature('fcprogram','fcshlib','fcprogram_test')
+@after_method('apply_link')
+def apply_manifest_ifort(self):
+	if self.env.IFORT_WIN32 and getattr(self,'link_task',None):
+		self.link_task.env.FC=self.env.LINK_FC
+	if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self,'link_task',None):
+		out_node=self.link_task.outputs[0]
+		man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
+		self.link_task.outputs.append(man_node)
+		self.env.DO_MANIFEST=True
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/intltool.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/intltool.py
@@ -1,48 +1,71 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
+from __future__ import with_statement
 import os,re
-from waflib import Configure,TaskGen,Task,Utils,Runner,Options,Build,Logs
+from waflib import Context,Task,Utils,Logs
 import waflib.Tools.ccroot
-from waflib.TaskGen import feature,before_method
+from waflib.TaskGen import feature,before_method,taskgen_method
 from waflib.Logs import error
-def apply_intltool_in_f(self):
-	try:self.meths.remove('process_source')
-	except ValueError:pass
+from waflib.Configure import conf
+_style_flags={'ba':'-b','desktop':'-d','keys':'-k','quoted':'--quoted-style','quotedxml':'--quotedxml-style','rfc822deb':'-r','schemas':'-s','xml':'-x',}
+@taskgen_method
+def ensure_localedir(self):
 	if not self.env.LOCALEDIR:
-		self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
+		if self.env.DATAROOTDIR:
+			self.env.LOCALEDIR=os.path.join(self.env.DATAROOTDIR,'locale')
+		else:
+			self.env.LOCALEDIR=os.path.join(self.env.PREFIX,'share','locale')
+@before_method('process_source')
+@feature('intltool_in')
+def apply_intltool_in_f(self):
+	try:
+		self.meths.remove('process_source')
+	except ValueError:
+		pass
+	self.ensure_localedir()
+	podir=getattr(self,'podir','.')
+	podirnode=self.path.find_dir(podir)
+	if not podirnode:
+		error("could not find the podir %r"%podir)
+		return
+	cache=getattr(self,'intlcache','.intlcache')
+	self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)]
+	self.env.INTLPODIR=podirnode.bldpath()
+	self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT))
+	if'-c'in self.env.INTLFLAGS:
+		self.bld.fatal('Redundant -c flag in intltool task %r'%self)
+	style=getattr(self,'style',None)
+	if style:
+		try:
+			style_flag=_style_flags[style]
+		except KeyError:
+			self.bld.fatal('intltool_in style "%s" is not valid'%style)
+		self.env.append_unique('INTLFLAGS',[style_flag])
 	for i in self.to_list(self.source):
 		node=self.path.find_resource(i)
-		podir=getattr(self,'podir','po')
-		podirnode=self.path.find_dir(podir)
-		if not podirnode:
-			error("could not find the podir %r"%podir)
-			continue
-		cache=getattr(self,'intlcache','.intlcache')
-		self.env['INTLCACHE']=os.path.join(self.path.bldpath(),podir,cache)
-		self.env['INTLPODIR']=podirnode.bldpath()
-		self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c'])
 		task=self.create_task('intltool',node,node.change_ext(''))
-		inst=getattr(self,'install_path','${LOCALEDIR}')
+		inst=getattr(self,'install_path',None)
 		if inst:
-			self.bld.install_files(inst,task.outputs)
+			self.add_install_files(install_to=inst,install_from=task.outputs)
+@feature('intltool_po')
 def apply_intltool_po(self):
-	try:self.meths.remove('process_source')
-	except ValueError:pass
-	if not self.env.LOCALEDIR:
-		self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
-	appname=getattr(self,'appname','set_your_app_name')
-	podir=getattr(self,'podir','')
+	try:
+		self.meths.remove('process_source')
+	except ValueError:
+		pass
+	self.ensure_localedir()
+	appname=getattr(self,'appname',getattr(Context.g_module,Context.APPNAME,'set_your_app_name'))
+	podir=getattr(self,'podir','.')
 	inst=getattr(self,'install_path','${LOCALEDIR}')
 	linguas=self.path.find_node(os.path.join(podir,'LINGUAS'))
 	if linguas:
-		file=open(linguas.abspath())
-		langs=[]
-		for line in file.readlines():
-			if not line.startswith('#'):
-				langs+=line.split()
-		file.close()
+		with open(linguas.abspath())as f:
+			langs=[]
+			for line in f.readlines():
+				if not line.startswith('#'):
+					langs+=line.split()
 		re_linguas=re.compile('[-a-zA-Z_@.]+')
 		for lang in langs:
 			if re_linguas.match(lang):
@@ -52,27 +75,27 @@ def apply_intltool_po(self):
 					filename=task.outputs[0].name
 					(langname,ext)=os.path.splitext(filename)
 					inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo'
-					self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env)
+					self.add_install_as(install_to=inst_file,install_from=task.outputs[0],chmod=getattr(self,'chmod',Utils.O644))
 	else:
 		Logs.pprint('RED',"Error no LINGUAS file found in po directory")
 class po(Task.Task):
 	run_str='${MSGFMT} -o ${TGT} ${SRC}'
 	color='BLUE'
 class intltool(Task.Task):
-	run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
+	run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
 	color='BLUE'
-def configure(conf):
+@conf
+def find_msgfmt(conf):
 	conf.find_program('msgfmt',var='MSGFMT')
-	conf.find_perl_program('intltool-merge',var='INTLTOOL')
-	prefix=conf.env.PREFIX
-	datadir=conf.env.DATADIR
-	if not datadir:
-		datadir=os.path.join(prefix,'share')
-	conf.define('LOCALEDIR',os.path.join(datadir,'locale').replace('\\','\\\\'))
-	conf.define('DATADIR',datadir.replace('\\','\\\\'))
+@conf
+def find_intltool_merge(conf):
+	if not conf.env.PERL:
+		conf.find_program('perl',var='PERL')
+	conf.env.INTLCACHE_ST='--cache=%s'
+	conf.env.INTLFLAGS_DEFAULT=['-q','-u']
+	conf.find_program('intltool-merge',interpreter='PERL',var='INTLTOOL')
+def configure(conf):
+	conf.find_msgfmt()
+	conf.find_intltool_merge()
 	if conf.env.CC or conf.env.CXX:
 		conf.check(header_name='locale.h')
-
-before_method('process_source')(apply_intltool_in_f)
-feature('intltool_in')(apply_intltool_in_f)
-feature('intltool_po')(apply_intltool_po)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/irixcc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/irixcc.py
@@ -1,41 +1,46 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os
-from waflib import Utils
+from waflib import Errors
 from waflib.Tools import ccroot,ar
 from waflib.Configure import conf
+@conf
 def find_irixcc(conf):
 	v=conf.env
 	cc=None
-	if v['CC']:cc=v['CC']
-	elif'CC'in conf.environ:cc=conf.environ['CC']
-	if not cc:cc=conf.find_program('cc',var='CC')
-	if not cc:conf.fatal('irixcc was not found')
-	cc=conf.cmd_to_list(cc)
+	if v.CC:
+		cc=v.CC
+	elif'CC'in conf.environ:
+		cc=conf.environ['CC']
+	if not cc:
+		cc=conf.find_program('cc',var='CC')
+	if not cc:
+		conf.fatal('irixcc was not found')
 	try:
 		conf.cmd_and_log(cc+['-version'])
-	except:
+	except Errors.WafError:
 		conf.fatal('%r -version could not be executed'%cc)
-	v['CC']=cc
-	v['CC_NAME']='irix'
+	v.CC=cc
+	v.CC_NAME='irix'
+@conf
 def irixcc_common_flags(conf):
 	v=conf.env
-	v['CC_SRC_F']=''
-	v['CC_TGT_F']=['-c','-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	if not v['LINK_CC']:v['LINK_CC']=v['CC']
-	v['CCLNK_SRC_F']=''
-	v['CCLNK_TGT_F']=['-o']
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['cprogram_PATTERN']='%s'
-	v['cshlib_PATTERN']='lib%s.so'
-	v['cstlib_PATTERN']='lib%s.a'
+	v.CC_SRC_F=''
+	v.CC_TGT_F=['-c','-o']
+	v.CPPPATH_ST='-I%s'
+	v.DEFINES_ST='-D%s'
+	if not v.LINK_CC:
+		v.LINK_CC=v.CC
+	v.CCLNK_SRC_F=''
+	v.CCLNK_TGT_F=['-o']
+	v.LIB_ST='-l%s'
+	v.LIBPATH_ST='-L%s'
+	v.STLIB_ST='-l%s'
+	v.STLIBPATH_ST='-L%s'
+	v.cprogram_PATTERN='%s'
+	v.cshlib_PATTERN='lib%s.so'
+	v.cstlib_PATTERN='lib%s.a'
 def configure(conf):
 	conf.find_irixcc()
 	conf.find_cpp()
@@ -44,6 +49,3 @@ def configure(conf):
 	conf.cc_load_tools()
 	conf.cc_add_flags()
 	conf.link_add_flags()
-
-conf(find_irixcc)
-conf(irixcc_common_flags)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/javaw.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/javaw.py
@@ -1,13 +1,11 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys
-if sys.hexversion < 0x020400f0: from sets import Set as set
-import os,re,tempfile,shutil
+import os,shutil
+from waflib import Task,Utils,Errors,Node
 from waflib.Configure import conf
-from waflib import TaskGen,Task,Utils,Options,Build,Errors,Node,Logs
-from waflib.TaskGen import feature,before_method,after_method
+from waflib.TaskGen import feature,before_method,after_method,taskgen_method
 from waflib.Tools import ccroot
 ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS'])
 SOURCE_RE='**/*.java'
@@ -31,9 +29,10 @@ public class Test {
 	}
 }
 '''
+@feature('javac')
+@before_method('process_source')
 def apply_java(self):
 	Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[])
-	nodes_lst=[]
 	outdir=getattr(self,'outdir',None)
 	if outdir:
 		if not isinstance(outdir,Node.Node):
@@ -42,7 +41,7 @@ def apply_java(self):
 		outdir=self.path.get_bld()
 	outdir.mkdir()
 	self.outdir=outdir
-	self.env['OUTDIR']=outdir.abspath()
+	self.env.OUTDIR=outdir.abspath()
 	self.javac_task=tsk=self.create_task('javac')
 	tmp=[]
 	srcdir=getattr(self,'srcdir','')
@@ -58,7 +57,7 @@ def apply_java(self):
 		tmp.append(y)
 	tsk.srcdir=tmp
 	if getattr(self,'compat',None):
-		tsk.env.append_value('JAVACFLAGS',['-source',self.compat])
+		tsk.env.append_value('JAVACFLAGS',['-source',str(self.compat)])
 	if hasattr(self,'sourcepath'):
 		fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)]
 		names=os.pathsep.join([x.srcpath()for x in fold])
@@ -66,26 +65,69 @@ def apply_java(self):
 		names=[x.srcpath()for x in tsk.srcdir]
 	if names:
 		tsk.env.append_value('JAVACFLAGS',['-sourcepath',names])
+@taskgen_method
+def java_use_rec(self,name,**kw):
+	if name in self.tmp_use_seen:
+		return
+	self.tmp_use_seen.append(name)
+	try:
+		y=self.bld.get_tgen_by_name(name)
+	except Errors.WafError:
+		self.uselib.append(name)
+		return
+	else:
+		y.post()
+		if hasattr(y,'jar_task'):
+			self.use_lst.append(y.jar_task.outputs[0].abspath())
+		else:
+			if hasattr(y,'outdir'):
+				self.use_lst.append(y.outdir.abspath())
+			else:
+				self.use_lst.append(y.path.get_bld().abspath())
+	for x in self.to_list(getattr(y,'use',[])):
+		self.java_use_rec(x)
+@feature('javac')
+@before_method('propagate_uselib_vars')
+@after_method('apply_java')
 def use_javac_files(self):
-	lst=[]
+	self.use_lst=[]
+	self.tmp_use_seen=[]
 	self.uselib=self.to_list(getattr(self,'uselib',[]))
 	names=self.to_list(getattr(self,'use',[]))
 	get=self.bld.get_tgen_by_name
 	for x in names:
 		try:
-			y=get(x)
-		except:
+			tg=get(x)
+		except Errors.WafError:
 			self.uselib.append(x)
 		else:
-			y.post()
-			lst.append(y.jar_task.outputs[0].abspath())
-			self.javac_task.set_run_after(y.jar_task)
-	if lst:
-		self.env.append_value('CLASSPATH',lst)
+			tg.post()
+			if hasattr(tg,'jar_task'):
+				self.use_lst.append(tg.jar_task.outputs[0].abspath())
+				self.javac_task.set_run_after(tg.jar_task)
+				self.javac_task.dep_nodes.extend(tg.jar_task.outputs)
+			else:
+				if hasattr(tg,'outdir'):
+					base_node=tg.outdir
+				else:
+					base_node=tg.path.get_bld()
+				self.use_lst.append(base_node.abspath())
+				self.javac_task.dep_nodes.extend([x for x in base_node.ant_glob(JAR_RE,remove=False,quiet=True)])
+				for tsk in tg.tasks:
+					self.javac_task.set_run_after(tsk)
+		if getattr(self,'recurse_use',False)or self.bld.env.RECURSE_JAVA:
+			self.java_use_rec(x)
+	self.env.append_value('CLASSPATH',self.use_lst)
+@feature('javac')
+@after_method('apply_java','propagate_uselib_vars','use_javac_files')
 def set_classpath(self):
-	self.env.append_value('CLASSPATH',getattr(self,'classpath',[]))
+	if getattr(self,'classpath',None):
+		self.env.append_unique('CLASSPATH',getattr(self,'classpath',[]))
 	for x in self.tasks:
 		x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep
+@feature('jar')
+@after_method('apply_java','use_javac_files')
+@before_method('process_source')
 def jar_files(self):
 	destfile=getattr(self,'destfile','test.jar')
 	jaropts=getattr(self,'jaropts',[])
@@ -101,7 +143,12 @@ def jar_files(self):
 	self.jar_task=tsk=self.create_task('jar_create')
 	if manifest:
 		jarcreate=getattr(self,'jarcreate','cfm')
-		node=self.path.find_node(manifest)
+		if not isinstance(manifest,Node.Node):
+			node=self.path.find_resource(manifest)
+		else:
+			node=manifest
+		if not node:
+			self.bld.fatal('invalid manifest file %r for %r'%(manifest,self))
 		tsk.dep_nodes.append(node)
 		jaropts.insert(0,node.abspath())
 	else:
@@ -115,24 +162,35 @@ def jar_files(self):
 	jaropts.append('-C')
 	jaropts.append(basedir.bldpath())
 	jaropts.append('.')
-	tsk.env['JAROPTS']=jaropts
-	tsk.env['JARCREATE']=jarcreate
+	tsk.env.JAROPTS=jaropts
+	tsk.env.JARCREATE=jarcreate
 	if getattr(self,'javac_task',None):
 		tsk.set_run_after(self.javac_task)
+@feature('jar')
+@after_method('jar_files')
 def use_jar_files(self):
-	lst=[]
 	self.uselib=self.to_list(getattr(self,'uselib',[]))
 	names=self.to_list(getattr(self,'use',[]))
 	get=self.bld.get_tgen_by_name
 	for x in names:
 		try:
 			y=get(x)
-		except:
+		except Errors.WafError:
 			self.uselib.append(x)
 		else:
 			y.post()
 			self.jar_task.run_after.update(y.tasks)
-class jar_create(Task.Task):
+class JTask(Task.Task):
+	def split_argfile(self,cmd):
+		inline=[cmd[0]]
+		infile=[]
+		for x in cmd[1:]:
+			if x.startswith('-J'):
+				inline.append(x)
+			else:
+				infile.append(self.quote_flag(x))
+		return(inline,infile)
+class jar_create(JTask):
 	color='GREEN'
 	run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
 	def runnable_status(self):
@@ -140,107 +198,116 @@ class jar_create(Task.Task):
 			if not t.hasrun:
 				return Task.ASK_LATER
 		if not self.inputs:
-			global JAR_RE
 			try:
-				self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])]
-			except:
+				self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False,quiet=True)if id(x)!=id(self.outputs[0])]
+			except Exception:
 				raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self))
 		return super(jar_create,self).runnable_status()
-class javac(Task.Task):
+class javac(JTask):
 	color='BLUE'
-	nocache=True
+	run_str='${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
 	vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR']
+	def uid(self):
+		lst=[self.__class__.__name__,self.generator.outdir.abspath()]
+		for x in self.srcdir:
+			lst.append(x.abspath())
+		return Utils.h_list(lst)
 	def runnable_status(self):
 		for t in self.run_after:
 			if not t.hasrun:
 				return Task.ASK_LATER
 		if not self.inputs:
-			global SOURCE_RE
 			self.inputs=[]
 			for x in self.srcdir:
-				self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False))
+				if x.exists():
+					self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False,quiet=True))
 		return super(javac,self).runnable_status()
+	def post_run(self):
+		for node in self.generator.outdir.ant_glob('**/*.class',quiet=True):
+			self.generator.bld.node_sigs[node]=self.uid()
+		self.generator.bld.task_sigs[self.uid()]=self.cache_sig
+@feature('javadoc')
+@after_method('process_rule')
+def create_javadoc(self):
+	tsk=self.create_task('javadoc')
+	tsk.classpath=getattr(self,'classpath',[])
+	self.javadoc_package=Utils.to_list(self.javadoc_package)
+	if not isinstance(self.javadoc_output,Node.Node):
+		self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output)
+class javadoc(Task.Task):
+	color='BLUE'
+	def __str__(self):
+		return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output)
 	def run(self):
 		env=self.env
-		gen=self.generator
-		bld=gen.bld
-		wd=bld.bldnode.abspath()
-		def to_list(xx):
-			if isinstance(xx,str):return[xx]
-			return xx
-		cmd=[]
-		cmd.extend(to_list(env['JAVAC']))
-		cmd.extend(['-classpath'])
-		cmd.extend(to_list(env['CLASSPATH']))
-		cmd.extend(['-d'])
-		cmd.extend(to_list(env['OUTDIR']))
-		cmd.extend(to_list(env['JAVACFLAGS']))
-		files=[a.path_from(bld.bldnode)for a in self.inputs]
-		tmp=None
-		try:
-			if len(str(files))+len(str(cmd))>8192:
-				(fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath())
-				try:
-					os.write(fd,'\n'.join(files))
-				finally:
-					if tmp:
-						os.close(fd)
-				if Logs.verbose:
-					Logs.debug('runner: %r'%(cmd+files))
-				cmd.append('@'+tmp)
-			else:
-				cmd+=files
-			ret=self.exec_command(cmd,cwd=wd,env=env.env or None)
-		finally:
-			if tmp:
-				os.unlink(tmp)
-		return ret
+		bld=self.generator.bld
+		wd=bld.bldnode
+		srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir
+		srcpath+=os.pathsep
+		srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir
+		classpath=env.CLASSPATH
+		classpath+=os.pathsep
+		classpath+=os.pathsep.join(self.classpath)
+		classpath="".join(classpath)
+		self.last_cmd=lst=[]
+		lst.extend(Utils.to_list(env.JAVADOC))
+		lst.extend(['-d',self.generator.javadoc_output.abspath()])
+		lst.extend(['-sourcepath',srcpath])
+		lst.extend(['-classpath',classpath])
+		lst.extend(['-subpackages'])
+		lst.extend(self.generator.javadoc_package)
+		lst=[x for x in lst if x]
+		self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
 	def post_run(self):
-		for n in self.generator.outdir.ant_glob('**/*.class'):
-			n.sig=Utils.h_file(n.abspath())
+		nodes=self.generator.javadoc_output.ant_glob('**',quiet=True)
+		for node in nodes:
+			self.generator.bld.node_sigs[node]=self.uid()
 		self.generator.bld.task_sigs[self.uid()]=self.cache_sig
 def configure(self):
 	java_path=self.environ['PATH'].split(os.pathsep)
 	v=self.env
 	if'JAVA_HOME'in self.environ:
 		java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path
-		self.env['JAVA_HOME']=[self.environ['JAVA_HOME']]
-	for x in'javac java jar'.split():
-		self.find_program(x,var=x.upper(),path_list=java_path)
-		self.env[x.upper()]=self.cmd_to_list(self.env[x.upper()])
+		self.env.JAVA_HOME=[self.environ['JAVA_HOME']]
+	for x in'javac java jar javadoc'.split():
+		self.find_program(x,var=x.upper(),path_list=java_path,mandatory=(x not in('javadoc')))
 	if'CLASSPATH'in self.environ:
-		v['CLASSPATH']=self.environ['CLASSPATH']
-	if not v['JAR']:self.fatal('jar is required for making java packages')
-	if not v['JAVAC']:self.fatal('javac is required for compiling java classes')
-	v['JARCREATE']='cf'
-	v['JAVACFLAGS']=[]
+		v.CLASSPATH=self.environ['CLASSPATH']
+	if not v.JAR:
+		self.fatal('jar is required for making java packages')
+	if not v.JAVAC:
+		self.fatal('javac is required for compiling java classes')
+	v.JARCREATE='cf'
+	v.JAVACFLAGS=[]
+@conf
 def check_java_class(self,classname,with_classpath=None):
 	javatestdir='.waf-javatest'
 	classpath=javatestdir
-	if self.env['CLASSPATH']:
-		classpath+=os.pathsep+self.env['CLASSPATH']
+	if self.env.CLASSPATH:
+		classpath+=os.pathsep+self.env.CLASSPATH
 	if isinstance(with_classpath,str):
 		classpath+=os.pathsep+with_classpath
 	shutil.rmtree(javatestdir,True)
 	os.mkdir(javatestdir)
-	java_file=open(os.path.join(javatestdir,'Test.java'),'w')
-	java_file.write(class_check_source)
-	java_file.close()
-	self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False)
-	cmd=self.env['JAVA']+['-cp',classpath,'Test',classname]
+	Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source)
+	self.exec_command(self.env.JAVAC+[os.path.join(javatestdir,'Test.java')],shell=False)
+	cmd=self.env.JAVA+['-cp',classpath,'Test',classname]
 	self.to_log("%s\n"%str(cmd))
 	found=self.exec_command(cmd,shell=False)
 	self.msg('Checking for java class %s'%classname,not found)
 	shutil.rmtree(javatestdir,True)
 	return found
+@conf
 def check_jni_headers(conf):
 	if not conf.env.CC_NAME and not conf.env.CXX_NAME:
 		conf.fatal('load a compiler first (gcc, g++, ..)')
 	if not conf.env.JAVA_HOME:
 		conf.fatal('set JAVA_HOME in the system environment')
-	javaHome=conf.env['JAVA_HOME'][0]
+	javaHome=conf.env.JAVA_HOME[0]
 	dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include')
 	if dir is None:
+		dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers')
+	if dir is None:
 		conf.fatal('JAVA_HOME does not seem to be set properly')
 	f=dir.ant_glob('**/(jni|jni_md).h')
 	incDirs=[x.parent.abspath()for x in f]
@@ -250,26 +317,14 @@ def check_jni_headers(conf):
 	f=dir.ant_glob('**/*jvm.(lib)')
 	if f:
 		libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f]
+	if conf.env.DEST_OS=='freebsd':
+		conf.env.append_unique('LINKFLAGS_JAVA','-pthread')
 	for d in libDirs:
 		try:
 			conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA')
-		except:
+		except Exception:
 			pass
 		else:
 			break
 	else:
 		conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs)
-
-feature('javac')(apply_java)
-before_method('process_source')(apply_java)
-feature('javac')(use_javac_files)
-after_method('apply_java')(use_javac_files)
-feature('javac')(set_classpath)
-after_method('apply_java','propagate_uselib_vars','use_javac_files')(set_classpath)
-feature('jar')(jar_files)
-after_method('apply_java','use_javac_files')(jar_files)
-before_method('process_source')(jar_files)
-feature('jar')(use_jar_files)
-after_method('jar_files')(use_jar_files)
-conf(check_java_class)
-conf(check_jni_headers)
\ No newline at end of file
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/Tools/ldc2.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ar,d
+from waflib.Configure import conf
+@conf
+def find_ldc2(conf):
+	conf.find_program(['ldc2'],var='D')
+	out=conf.cmd_and_log(conf.env.D+['-version'])
+	if out.find("based on DMD v2.")==-1:
+		conf.fatal("detected compiler is not ldc2")
+@conf
+def common_flags_ldc2(conf):
+	v=conf.env
+	v.D_SRC_F=['-c']
+	v.D_TGT_F='-of%s'
+	v.D_LINKER=v.D
+	v.DLNK_SRC_F=''
+	v.DLNK_TGT_F='-of%s'
+	v.DINC_ST='-I%s'
+	v.DSHLIB_MARKER=v.DSTLIB_MARKER=''
+	v.DSTLIB_ST=v.DSHLIB_ST='-L-l%s'
+	v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L-L%s'
+	v.LINKFLAGS_dshlib=['-L-shared']
+	v.DHEADER_ext='.di'
+	v.DFLAGS_d_with_header=['-H','-Hf']
+	v.D_HDR_F='%s'
+	v.LINKFLAGS=[]
+	v.DFLAGS_dshlib=['-relocation-model=pic']
+def configure(conf):
+	conf.find_ldc2()
+	conf.load('ar')
+	conf.load('d')
+	conf.common_flags_ldc2()
+	conf.d_platform_flags()
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/lua.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/lua.py
@@ -1,19 +1,18 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib.TaskGen import extension
-from waflib import Task,Utils
+from waflib import Task
+@extension('.lua')
 def add_lua(self,node):
 	tsk=self.create_task('luac',node,node.change_ext('.luac'))
 	inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None)
 	if inst_to:
-		self.bld.install_files(inst_to,tsk.outputs)
+		self.add_install_files(install_to=inst_to,install_from=tsk.outputs)
 	return tsk
 class luac(Task.Task):
 	run_str='${LUAC} -s -o ${TGT} ${SRC}'
 	color='PINK'
 def configure(conf):
 	conf.find_program('luac',var='LUAC')
-
-extension('.lua')(add_lua)
\ No newline at end of file
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/Tools/md5_tstamp.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,stat
+from waflib import Utils,Build,Node
+STRONGEST=True
+Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+def h_file(self):
+	filename=self.abspath()
+	st=os.stat(filename)
+	cache=self.ctx.hashes_md5_tstamp
+	if filename in cache and cache[filename][0]==st.st_mtime:
+		return cache[filename][1]
+	if STRONGEST:
+		ret=Utils.h_file(filename)
+	else:
+		if stat.S_ISDIR(st[stat.ST_MODE]):
+			raise IOError('Not a file')
+		ret=Utils.md5(str((st.st_mtime,st.st_size)).encode()).digest()
+	cache[filename]=(st.st_mtime,ret)
+	return ret
+h_file.__doc__=Node.Node.h_file.__doc__
+Node.Node.h_file=h_file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/msvc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/msvc.py
@@ -1,20 +1,12 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,re,tempfile
-try:
-	import _winreg
-except:
-	try:
-		import winreg as _winreg
-	except:
-		_winreg=None
-from waflib import Utils,TaskGen,Runner,Configure,Task,Options
-from waflib.Logs import debug,info,warn,error
-from waflib.TaskGen import after_method,before_method,feature
+import os,sys,re,traceback
+from waflib import Utils,Logs,Options,Errors
+from waflib.TaskGen import after_method,feature
 from waflib.Configure import conf
-from waflib.Tools import ccroot,c,cxx,ar,winres
+from waflib.Tools import ccroot,c,cxx,ar
 g_msvc_systemlibs='''
 aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
 cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
@@ -35,52 +27,75 @@ traffic unicows url urlmon user32 useren
 version vfw32 wbemuuid  webpost wiaguid wininet winmm winscard winspool winstrm
 wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
 '''.split()
-all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64')]
+all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm'),('x86_arm64','arm64'),('amd64_x86','x86'),('amd64_arm','arm'),('amd64_arm64','arm64')]
 all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
 all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
 def options(opt):
 	opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='')
 	opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='')
-def setup_msvc(conf,versions):
+	opt.add_option('--no-msvc-lazy',action='store_false',help='lazily check msvc target environments',default=True,dest='msvc_lazy')
+@conf
+def setup_msvc(conf,versiondict):
 	platforms=getattr(Options.options,'msvc_targets','').split(',')
 	if platforms==['']:
-		platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
+		platforms=Utils.to_list(conf.env.MSVC_TARGETS)or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
 	desired_versions=getattr(Options.options,'msvc_version','').split(',')
 	if desired_versions==['']:
-		desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1]
-	versiondict=dict(versions)
+		desired_versions=conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys())))
+	lazy_detect=getattr(Options.options,'msvc_lazy',True)
+	if conf.env.MSVC_LAZY_AUTODETECT is False:
+		lazy_detect=False
+	if not lazy_detect:
+		for val in versiondict.values():
+			for arch in list(val.keys()):
+				cfg=val[arch]
+				cfg.evaluate()
+				if not cfg.is_valid:
+					del val[arch]
+		conf.env.MSVC_INSTALLED_VERSIONS=versiondict
 	for version in desired_versions:
+		Logs.debug('msvc: detecting %r - %r',version,desired_versions)
 		try:
-			targets=dict(versiondict[version])
-			for target in platforms:
-				try:
-					arch,(p1,p2,p3)=targets[target]
-					compiler,revision=version.rsplit(' ',1)
-					return compiler,revision,p1,p2,p3
-				except KeyError:continue
-		except KeyError:continue
-	conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
+			targets=versiondict[version]
+		except KeyError:
+			continue
+		seen=set()
+		for arch in platforms:
+			if arch in seen:
+				continue
+			else:
+				seen.add(arch)
+			try:
+				cfg=targets[arch]
+			except KeyError:
+				continue
+			cfg.evaluate()
+			if cfg.is_valid:
+				compiler,revision=version.rsplit(' ',1)
+				return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+	conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r'%(desired_versions,list(versiondict.keys())))
+@conf
 def get_msvc_version(conf,compiler,version,target,vcvars):
-	debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
-	batfile=conf.bldnode.make_node('waf-print-msvc.bat')
+	Logs.debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
+	try:
+		conf.msvc_cnt+=1
+	except AttributeError:
+		conf.msvc_cnt=1
+	batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt)
 	batfile.write("""@echo off
 set INCLUDE=
 set LIB=
 call "%s" %s
 echo PATH=%%PATH%%
 echo INCLUDE=%%INCLUDE%%
-echo LIB=%%LIB%%
+echo LIB=%%LIB%%;%%LIBPATH%%
 """%(vcvars,target))
-	sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()])
+	sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()])
 	lines=sout.splitlines()
-	if not lines[0]:lines=lines[1:]
-	for x in('Setting environment','Setting SDK environment','Intel(R) C++ Compiler','Intel Parallel Studio'):
-		if lines[0].find(x)!=-1:
-			break
-	else:
-		debug('msvc: get_msvc_version: %r %r %r -> not found',compiler,version,target)
-		conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
-	for line in lines[1:]:
+	if not lines[0]:
+		lines.pop(0)
+	MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
+	for line in lines:
 		if line.startswith('PATH='):
 			path=line[5:]
 			MSVC_PATH=path.split(';')
@@ -88,177 +103,241 @@ echo LIB=%%LIB%%
 			MSVC_INCDIR=[i for i in line[8:].split(';')if i]
 		elif line.startswith('LIB='):
 			MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
-	env={}
-	env.update(os.environ)
+	if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
+		conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
+	env=dict(os.environ)
 	env.update(PATH=path)
 	compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
 	cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
-	cxx=conf.cmd_to_list(cxx)
 	if'CL'in env:
 		del(env['CL'])
 	try:
-		try:
-			conf.cmd_and_log(cxx+['/help'],env=env)
-		except Exception ,e:
-			debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target))
-			debug(str(e))
-			conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
-		else:
-			debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
+		conf.cmd_and_log(cxx+['/help'],env=env)
+	except UnicodeError:
+		st=traceback.format_exc()
+		if conf.logger:
+			conf.logger.error(st)
+		conf.fatal('msvc: Unicode error - check the code page?')
+	except Exception as e:
+		Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s',compiler,version,target,str(e))
+		conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
+	else:
+		Logs.debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
 	finally:
 		conf.env[compiler_name]=''
 	return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
-def gather_wsdk_versions(conf,versions):
-	version_pattern=re.compile('^v..?.?\...?.?')
-	try:
-		all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
-	except WindowsError:
-		try:
-			all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
-		except WindowsError:
-			return
-	index=0
-	while 1:
-		try:
-			version=_winreg.EnumKey(all_versions,index)
-		except WindowsError:
-			break
-		index=index+1
-		if not version_pattern.match(version):
-			continue
-		try:
-			msvc_version=_winreg.OpenKey(all_versions,version)
-			path,type=_winreg.QueryValueEx(msvc_version,'InstallationFolder')
-		except WindowsError:
-			continue
-		if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
-			targets=[]
-			for target,arch in all_msvc_platforms:
-				try:
-					targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')))))
-				except conf.errors.ConfigurationError:
-					pass
-			versions.append(('wsdk '+version[1:],targets))
 def gather_wince_supported_platforms():
 	supported_wince_platforms=[]
 	try:
-		ce_sdk=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
-	except WindowsError:
+		ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
+	except OSError:
 		try:
-			ce_sdk=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
-		except WindowsError:
+			ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
+		except OSError:
 			ce_sdk=''
 	if not ce_sdk:
 		return supported_wince_platforms
-	ce_index=0
+	index=0
 	while 1:
 		try:
-			sdk_device=_winreg.EnumKey(ce_sdk,ce_index)
-		except WindowsError:
+			sdk_device=Utils.winreg.EnumKey(ce_sdk,index)
+			sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device)
+		except OSError:
 			break
-		ce_index=ce_index+1
-		sdk=_winreg.OpenKey(ce_sdk,sdk_device)
+		index+=1
 		try:
-			path,type=_winreg.QueryValueEx(sdk,'SDKRootDir')
-		except WindowsError:
+			path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir')
+		except OSError:
 			try:
-				path,type=_winreg.QueryValueEx(sdk,'SDKInformation')
-				path,xml=os.path.split(path)
-			except WindowsError:
+				path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation')
+			except OSError:
 				continue
+			path,xml=os.path.split(path)
 		path=str(path)
 		path,device=os.path.split(path)
 		if not device:
 			path,device=os.path.split(path)
+		platforms=[]
 		for arch,compiler in all_wince_platforms:
-			platforms=[]
 			if os.path.isdir(os.path.join(path,device,'Lib',arch)):
 				platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
-			if platforms:
-				supported_wince_platforms.append((device,platforms))
+		if platforms:
+			supported_wince_platforms.append((device,platforms))
 	return supported_wince_platforms
 def gather_msvc_detected_versions():
-	version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')
+	version_pattern=re.compile(r'^(\d\d?\.\d\d?)(Exp)?$')
 	detected_versions=[]
-	for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]:
+	for vcver,vcvar in(('VCExpress','Exp'),('VisualStudio','')):
+		prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
 		try:
-			prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
-			all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,prefix)
-		except WindowsError:
+			all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
+		except OSError:
+			prefix='SOFTWARE\\Microsoft\\'+vcver
 			try:
-				prefix='SOFTWARE\\Microsoft\\'+vcver
-				all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,prefix)
-			except WindowsError:
+				all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
+			except OSError:
 				continue
 		index=0
 		while 1:
 			try:
-				version=_winreg.EnumKey(all_versions,index)
-			except WindowsError:
+				version=Utils.winreg.EnumKey(all_versions,index)
+			except OSError:
 				break
-			index=index+1
+			index+=1
 			match=version_pattern.match(version)
-			if not match:
-				continue
-			else:
+			if match:
 				versionnumber=float(match.group(1))
-			detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version))
+			else:
+				continue
+			detected_versions.append((versionnumber,version+vcvar,prefix+'\\'+version))
 	def fun(tup):
 		return tup[0]
-	try:
-		detected_versions.sort(key=fun)
-	except:
-		detected_versions.sort(lambda x,y:cmp(x[0],y[0]))
+	detected_versions.sort(key=fun)
 	return detected_versions
+class target_compiler(object):
+	def __init__(self,ctx,compiler,cpu,version,bat_target,bat,callback=None):
+		self.conf=ctx
+		self.name=None
+		self.is_valid=False
+		self.is_done=False
+		self.compiler=compiler
+		self.cpu=cpu
+		self.version=version
+		self.bat_target=bat_target
+		self.bat=bat
+		self.callback=callback
+	def evaluate(self):
+		if self.is_done:
+			return
+		self.is_done=True
+		try:
+			vs=self.conf.get_msvc_version(self.compiler,self.version,self.bat_target,self.bat)
+		except Errors.ConfigurationError:
+			self.is_valid=False
+			return
+		if self.callback:
+			vs=self.callback(self,vs)
+		self.is_valid=True
+		(self.bindirs,self.incdirs,self.libdirs)=vs
+	def __str__(self):
+		return str((self.compiler,self.cpu,self.version,self.bat_target,self.bat))
+	def __repr__(self):
+		return repr((self.compiler,self.cpu,self.version,self.bat_target,self.bat))
+@conf
+def gather_wsdk_versions(conf,versions):
+	version_pattern=re.compile(r'^v..?.?\...?.?')
+	try:
+		all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
+	except OSError:
+		try:
+			all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
+		except OSError:
+			return
+	index=0
+	while 1:
+		try:
+			version=Utils.winreg.EnumKey(all_versions,index)
+		except OSError:
+			break
+		index+=1
+		if not version_pattern.match(version):
+			continue
+		try:
+			msvc_version=Utils.winreg.OpenKey(all_versions,version)
+			path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
+		except OSError:
+			continue
+		if path and os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
+			targets={}
+			for target,arch in all_msvc_platforms:
+				targets[target]=target_compiler(conf,'wsdk',arch,version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))
+			versions['wsdk '+version[1:]]=targets
+@conf
 def gather_msvc_targets(conf,versions,version,vc_path):
-	targets=[]
-	if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
+	targets={}
+	if os.path.isfile(os.path.join(vc_path,'VC','Auxiliary','Build','vcvarsall.bat')):
 		for target,realtarget in all_msvc_platforms[::-1]:
-			try:
-				targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat')))))
-			except conf.errors.ConfigurationError:
-				pass
+			targets[target]=target_compiler(conf,'msvc',realtarget,version,target,os.path.join(vc_path,'VC','Auxiliary','Build','vcvarsall.bat'))
+	elif os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
+		for target,realtarget in all_msvc_platforms[::-1]:
+			targets[target]=target_compiler(conf,'msvc',realtarget,version,target,os.path.join(vc_path,'vcvarsall.bat'))
 	elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')):
-		try:
-			targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat')))))
-		except conf.errors.ConfigurationError:
-			pass
+		targets['x86']=target_compiler(conf,'msvc','x86',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat'))
 	elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')):
-		try:
-			targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat')))))
-		except conf.errors.ConfigurationError:
-			pass
-	versions.append(('msvc '+version,targets))
+		targets['x86']=target_compiler(conf,'msvc','x86',version,'',os.path.join(vc_path,'Bin','vcvars32.bat'))
+	if targets:
+		versions['msvc %s'%version]=targets
+@conf
 def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms):
 	for device,platforms in supported_platforms:
-		cetargets=[]
+		targets={}
 		for platform,compiler,include,lib in platforms:
 			winCEpath=os.path.join(vc_path,'ce')
 			if not os.path.isdir(winCEpath):
 				continue
-			try:
-				common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars)
-			except conf.errors.ConfigurationError:
-				continue
 			if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
-				bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs
+				bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]
 				incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include]
 				libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib]
-				cetargets.append((platform,(platform,(bindirs,incdirs,libdirs))))
-		if cetargets:
-			versions.append((device+' '+version,cetargets))
+				def combine_common(obj,compiler_env):
+					(common_bindirs,_1,_2)=compiler_env
+					return(bindirs+common_bindirs,incdirs,libdirs)
+				targets[platform]=target_compiler(conf,'msvc',platform,version,'x86',vsvars,combine_common)
+		if targets:
+			versions[device+' '+version]=targets
+@conf
+def gather_winphone_targets(conf,versions,version,vc_path,vsvars):
+	targets={}
+	for target,realtarget in all_msvc_platforms[::-1]:
+		targets[target]=target_compiler(conf,'winphone',realtarget,version,target,vsvars)
+	if targets:
+		versions['winphone '+version]=targets
+@conf
+def gather_vswhere_versions(conf,versions):
+	try:
+		import json
+	except ImportError:
+		Logs.error('Visual Studio 2017 detection requires Python 2.6')
+		return
+	prg_path=os.environ.get('ProgramFiles(x86)',os.environ.get('ProgramFiles','C:\\Program Files (x86)'))
+	vswhere=os.path.join(prg_path,'Microsoft Visual Studio','Installer','vswhere.exe')
+	args=[vswhere,'-products','*','-legacy','-format','json']
+	try:
+		txt=conf.cmd_and_log(args)
+	except Errors.WafError as e:
+		Logs.debug('msvc: vswhere.exe failed %s',e)
+		return
+	if sys.version_info[0]<3:
+		txt=txt.decode(Utils.console_encoding())
+	arr=json.loads(txt)
+	arr.sort(key=lambda x:x['installationVersion'])
+	for entry in arr:
+		ver=entry['installationVersion']
+		ver=str('.'.join(ver.split('.')[:2]))
+		path=str(os.path.abspath(entry['installationPath']))
+		if os.path.exists(path)and('msvc %s'%ver)not in versions:
+			conf.gather_msvc_targets(versions,ver,path)
+@conf
 def gather_msvc_versions(conf,versions):
 	vc_paths=[]
 	for(v,version,reg)in gather_msvc_detected_versions():
 		try:
 			try:
-				msvc_version=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
-			except WindowsError:
-				msvc_version=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
-			path,type=_winreg.QueryValueEx(msvc_version,'ProductDir')
-			vc_paths.append((version,os.path.abspath(str(path))))
-		except WindowsError:
+				msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
+			except OSError:
+				msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
+			path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir')
+		except OSError:
+			try:
+				msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,"SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
+				path,type=Utils.winreg.QueryValueEx(msvc_version,version)
+			except OSError:
+				continue
+			else:
+				vc_paths.append((version,os.path.abspath(str(path))))
 			continue
+		else:
+			vc_paths.append((version,os.path.abspath(str(path))))
 	wince_supported_platforms=gather_wince_supported_platforms()
 	for version,vc_path in vc_paths:
 		vs_path=os.path.dirname(vc_path)
@@ -267,72 +346,133 @@ def gather_msvc_versions(conf,versions):
 			conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms)
 	for version,vc_path in vc_paths:
 		vs_path=os.path.dirname(vc_path)
+		vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat')
+		if os.path.isfile(vsvars):
+			conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars)
+			break
+	for version,vc_path in vc_paths:
+		vs_path=os.path.dirname(vc_path)
 		conf.gather_msvc_targets(versions,version,vc_path)
+@conf
 def gather_icl_versions(conf,versions):
-	version_pattern=re.compile('^...?.?\....?.?')
+	version_pattern=re.compile(r'^...?.?\....?.?')
 	try:
-		all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
-	except WindowsError:
+		all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
+	except OSError:
 		try:
-			all_versions=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
-		except WindowsError:
+			all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
+		except OSError:
 			return
 	index=0
 	while 1:
 		try:
-			version=_winreg.EnumKey(all_versions,index)
-		except WindowsError:
+			version=Utils.winreg.EnumKey(all_versions,index)
+		except OSError:
 			break
-		index=index+1
+		index+=1
 		if not version_pattern.match(version):
 			continue
-		targets=[]
+		targets={}
 		for target,arch in all_icl_platforms:
+			if target=='intel64':
+				targetDir='EM64T_NATIVE'
+			else:
+				targetDir=target
 			try:
-				if target=='intel64':targetDir='EM64T_NATIVE'
-				else:targetDir=target
-				_winreg.OpenKey(all_versions,version+'\\'+targetDir)
-				icl_version=_winreg.OpenKey(all_versions,version)
-				path,type=_winreg.QueryValueEx(icl_version,'ProductDir')
-				if os.path.isfile(os.path.join(path,'bin','iclvars.bat')):
-					try:
-						targets.append((target,(arch,conf.get_msvc_version('intel',version,target,os.path.join(path,'bin','iclvars.bat')))))
-					except conf.errors.ConfigurationError:
-						pass
-			except WindowsError:
+				Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+				icl_version=Utils.winreg.OpenKey(all_versions,version)
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
 				pass
+			else:
+				batch_file=os.path.join(path,'bin','iclvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
 		for target,arch in all_icl_platforms:
 			try:
-				icl_version=_winreg.OpenKey(all_versions,version+'\\'+target)
-				path,type=_winreg.QueryValueEx(icl_version,'ProductDir')
-				if os.path.isfile(os.path.join(path,'bin','iclvars.bat')):
-					try:
-						targets.append((target,(arch,conf.get_msvc_version('intel',version,target,os.path.join(path,'bin','iclvars.bat')))))
-					except conf.errors.ConfigurationError:
-						pass
-			except WindowsError:
+				icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
 				continue
+			else:
+				batch_file=os.path.join(path,'bin','iclvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+		major=version[0:2]
+		versions['intel '+major]=targets
+@conf
+def gather_intel_composer_versions(conf,versions):
+	version_pattern=re.compile(r'^...?.?\...?.?.?')
+	try:
+		all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites')
+	except OSError:
+		try:
+			all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites')
+		except OSError:
+			return
+	index=0
+	while 1:
+		try:
+			version=Utils.winreg.EnumKey(all_versions,index)
+		except OSError:
+			break
+		index+=1
+		if not version_pattern.match(version):
+			continue
+		targets={}
+		for target,arch in all_icl_platforms:
+			if target=='intel64':
+				targetDir='EM64T_NATIVE'
+			else:
+				targetDir=target
+			try:
+				try:
+					defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
+				except OSError:
+					if targetDir=='EM64T_NATIVE':
+						defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
+					else:
+						raise
+				uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey')
+				Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
+				icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				pass
+			else:
+				batch_file=os.path.join(path,'bin','iclvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+				compilervars_warning_attr='_compilervars_warning_key'
+				if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True):
+					setattr(conf,compilervars_warning_attr,False)
+					patch_url='http://software.intel.com/en-us/forums/topic/328487'
+					compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
+					for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'):
+						if vscomntool in os.environ:
+							vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe'
+							dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe'
+							if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)):
+								Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url))
 		major=version[0:2]
-		versions.append(('intel '+major,targets))
-def get_msvc_versions(conf):
-	if not conf.env['MSVC_INSTALLED_VERSIONS']:
-		lst=[]
-		conf.gather_icl_versions(lst)
-		conf.gather_wsdk_versions(lst)
-		conf.gather_msvc_versions(lst)
-		conf.env['MSVC_INSTALLED_VERSIONS']=lst
-	return conf.env['MSVC_INSTALLED_VERSIONS']
-def print_all_msvc_detected(conf):
-	for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
-		info(version)
-		for target,l in targets:
-			info("\t"+target)
-def detect_msvc(conf):
-	versions=get_msvc_versions(conf)
-	return setup_msvc(conf,versions)
+		versions['intel '+major]=targets
+@conf
+def detect_msvc(self):
+	return self.setup_msvc(self.get_msvc_versions())
+@conf
+def get_msvc_versions(self):
+	dct=Utils.ordered_iter_dict()
+	self.gather_icl_versions(dct)
+	self.gather_intel_composer_versions(dct)
+	self.gather_wsdk_versions(dct)
+	self.gather_msvc_versions(dct)
+	self.gather_vswhere_versions(dct)
+	Logs.debug('msvc: detected versions %r',list(dct.keys()))
+	return dct
+@conf
 def find_lt_names_msvc(self,libname,is_static=False):
 	lt_names=['lib%s.la'%libname,'%s.la'%libname,]
-	for path in self.env['LIBPATH']:
+	for path in self.env.LIBPATH:
 		for la in lt_names:
 			laf=os.path.join(path,la)
 			dll=None
@@ -344,7 +484,7 @@ def find_lt_names_msvc(self,libname,is_s
 				if not is_static and ltdict.get('library_names',''):
 					dllnames=ltdict['library_names'].split()
 					dll=dllnames[0].lower()
-					dll=re.sub('\.dll$','',dll)
+					dll=re.sub(r'\.dll$','',dll)
 					return(lt_libdir,dll,False)
 				elif ltdict.get('old_library',''):
 					olib=ltdict['old_library']
@@ -357,9 +497,10 @@ def find_lt_names_msvc(self,libname,is_s
 				else:
 					raise self.errors.WafError('invalid libtool object file: %s'%laf)
 	return(None,None,None)
+@conf
 def libname_msvc(self,libname,is_static=False):
 	lib=libname.lower()
-	lib=re.sub('\.lib$','',lib)
+	lib=re.sub(r'\.lib$','',lib)
 	if lib in g_msvc_systemlibs:
 		return lib
 	lib=re.sub('^lib','',lib)
@@ -367,12 +508,12 @@ def libname_msvc(self,libname,is_static=
 		return None
 	(lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
 	if lt_path!=None and lt_libname!=None:
-		if lt_static==True:
+		if lt_static:
 			return os.path.join(lt_path,lt_libname)
 	if lt_path!=None:
-		_libpaths=[lt_path]+self.env['LIBPATH']
+		_libpaths=[lt_path]+self.env.LIBPATH
 	else:
-		_libpaths=self.env['LIBPATH']
+		_libpaths=self.env.LIBPATH
 	static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
 	dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
 	libnames=static_libs
@@ -381,10 +522,11 @@ def libname_msvc(self,libname,is_static=
 	for path in _libpaths:
 		for libn in libnames:
 			if os.path.exists(os.path.join(path,libn)):
-				debug('msvc: lib found: %s'%os.path.join(path,libn))
-				return re.sub('\.lib$','',libn)
-	self.fatal("The library %r could not be found"%libname)
-	return re.sub('\.lib$','',libname)
+				Logs.debug('msvc: lib found: %s',os.path.join(path,libn))
+				return re.sub(r'\.lib$','',libn)
+	self.fatal('The library %r could not be found'%libname)
+	return re.sub(r'\.lib$','',libname)
+@conf
 def check_lib_msvc(self,libname,is_static=False,uselib_store=None):
 	libn=self.libname_msvc(libname,is_static)
 	if not uselib_store:
@@ -393,11 +535,12 @@ def check_lib_msvc(self,libname,is_stati
 		self.env['STLIB_'+uselib_store]=[libn]
 	else:
 		self.env['LIB_'+uselib_store]=[libn]
+@conf
 def check_libs_msvc(self,libnames,is_static=False):
 	for libname in Utils.to_list(libnames):
 		self.check_lib_msvc(libname,is_static)
 def configure(conf):
-	conf.autodetect()
+	conf.autodetect(True)
 	conf.find_msvc()
 	conf.msvc_common_flags()
 	conf.cc_load_tools()
@@ -406,22 +549,26 @@ def configure(conf):
 	conf.cxx_add_flags()
 	conf.link_add_flags()
 	conf.visual_studio_add_flags()
+@conf
 def no_autodetect(conf):
 	conf.env.NO_MSVC_DETECT=1
 	configure(conf)
-def autodetect(conf):
+@conf
+def autodetect(conf,arch=False):
 	v=conf.env
 	if v.NO_MSVC_DETECT:
 		return
-	compiler,version,path,includes,libdirs=conf.detect_msvc()
-	v['PATH']=path
-	v['INCLUDES']=includes
-	v['LIBPATH']=libdirs
-	v['MSVC_COMPILER']=compiler
+	compiler,version,path,includes,libdirs,cpu=conf.detect_msvc()
+	if arch:
+		v.DEST_CPU=cpu
+	v.PATH=path
+	v.INCLUDES=includes
+	v.LIBPATH=libdirs
+	v.MSVC_COMPILER=compiler
 	try:
-		v['MSVC_VERSION']=float(version)
-	except:
-		v['MSVC_VERSION']=float(version[:-3])
+		v.MSVC_VERSION=float(version)
+	except ValueError:
+		v.MSVC_VERSION=float(version[:-3])
 def _get_prog_names(conf,compiler):
 	if compiler=='intel':
 		compiler_name='ICL'
@@ -432,90 +579,86 @@ def _get_prog_names(conf,compiler):
 		linker_name='LINK'
 		lib_name='LIB'
 	return compiler_name,linker_name,lib_name
+@conf
 def find_msvc(conf):
 	if sys.platform=='cygwin':
 		conf.fatal('MSVC module does not work under cygwin Python!')
 	v=conf.env
-	path=v['PATH']
-	compiler=v['MSVC_COMPILER']
-	version=v['MSVC_VERSION']
+	path=v.PATH
+	compiler=v.MSVC_COMPILER
+	version=v.MSVC_VERSION
 	compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
 	v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11)
-	cxx=None
-	if v['CXX']:cxx=v['CXX']
-	elif'CXX'in conf.environ:cxx=conf.environ['CXX']
 	cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
-	cxx=conf.cmd_to_list(cxx)
 	env=dict(conf.environ)
-	if path:env.update(PATH=';'.join(path))
+	if path:
+		env.update(PATH=';'.join(path))
 	if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
 		conf.fatal('the msvc compiler could not be identified')
-	v['CC']=v['CXX']=cxx
-	v['CC_NAME']=v['CXX_NAME']='msvc'
-	if not v['LINK_CXX']:
-		link=conf.find_program(linker_name,path_list=path)
-		if link:v['LINK_CXX']=link
-		else:conf.fatal('%s was not found (linker)'%linker_name)
-		v['LINK']=link
-	if not v['LINK_CC']:
-		v['LINK_CC']=v['LINK_CXX']
-	if not v['AR']:
+	v.CC=v.CXX=cxx
+	v.CC_NAME=v.CXX_NAME='msvc'
+	if not v.LINK_CXX:
+		conf.find_program(linker_name,path_list=path,errmsg='%s was not found (linker)'%linker_name,var='LINK_CXX')
+	if not v.LINK_CC:
+		v.LINK_CC=v.LINK_CXX
+	if not v.AR:
 		stliblink=conf.find_program(lib_name,path_list=path,var='AR')
-		if not stliblink:return
-		v['ARFLAGS']=['/NOLOGO']
+		if not stliblink:
+			return
+		v.ARFLAGS=['/nologo']
 	if v.MSVC_MANIFEST:
-		mt=conf.find_program('MT',path_list=path,var='MT')
-		v['MTFLAGS']=['/NOLOGO']
-	conf.load('winres')
-	if not conf.env['WINRC']:
-		warn('Resource compiler not found. Compiling resource file is disabled')
+		conf.find_program('MT',path_list=path,var='MT')
+		v.MTFLAGS=['/nologo']
+	try:
+		conf.load('winres')
+	except Errors.ConfigurationError:
+		Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+@conf
 def visual_studio_add_flags(self):
 	v=self.env
-	try:v.prepend_value('INCLUDES',self.environ['INCLUDE'].split(';'))
-	except:pass
-	try:v.prepend_value('LIBPATH',self.environ['LIB'].split(';'))
-	except:pass
+	if self.environ.get('INCLUDE'):
+		v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x])
+	if self.environ.get('LIB'):
+		v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x])
+@conf
 def msvc_common_flags(conf):
 	v=conf.env
-	v['DEST_BINFMT']='pe'
+	v.DEST_BINFMT='pe'
 	v.append_value('CFLAGS',['/nologo'])
 	v.append_value('CXXFLAGS',['/nologo'])
-	v['DEFINES_ST']='/D%s'
-	v['CC_SRC_F']=''
-	v['CC_TGT_F']=['/c','/Fo']
-	if v['MSVC_VERSION']>=8:
-		v['CC_TGT_F']=['/FC']+v['CC_TGT_F']
-	v['CXX_SRC_F']=''
-	v['CXX_TGT_F']=['/c','/Fo']
-	if v['MSVC_VERSION']>=8:
-		v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F']
-	v['CPPPATH_ST']='/I%s'
-	v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:'
-	v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE']
-	v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE']
-	v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX']
-	v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS']
-	v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE']
-	v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT']
-	v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD']
-	v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd']
-	v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd']
-	v['LIB_ST']='%s.lib'
-	v['LIBPATH_ST']='/LIBPATH:%s'
-	v['STLIB_ST']='lib%s.lib'
-	v['STLIBPATH_ST']='/LIBPATH:%s'
-	v.append_value('LINKFLAGS',['/NOLOGO'])
-	if v['MSVC_MANIFEST']:
+	v.append_value('LINKFLAGS',['/nologo'])
+	v.DEFINES_ST='/D%s'
+	v.CC_SRC_F=''
+	v.CC_TGT_F=['/c','/Fo']
+	v.CXX_SRC_F=''
+	v.CXX_TGT_F=['/c','/Fo']
+	if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6):
+		v.CC_TGT_F=['/FC']+v.CC_TGT_F
+		v.CXX_TGT_F=['/FC']+v.CXX_TGT_F
+	v.CPPPATH_ST='/I%s'
+	v.AR_TGT_F=v.CCLNK_TGT_F=v.CXXLNK_TGT_F='/OUT:'
+	v.CFLAGS_CRT_MULTITHREADED=v.CXXFLAGS_CRT_MULTITHREADED=['/MT']
+	v.CFLAGS_CRT_MULTITHREADED_DLL=v.CXXFLAGS_CRT_MULTITHREADED_DLL=['/MD']
+	v.CFLAGS_CRT_MULTITHREADED_DBG=v.CXXFLAGS_CRT_MULTITHREADED_DBG=['/MTd']
+	v.CFLAGS_CRT_MULTITHREADED_DLL_DBG=v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG=['/MDd']
+	v.LIB_ST='%s.lib'
+	v.LIBPATH_ST='/LIBPATH:%s'
+	v.STLIB_ST='%s.lib'
+	v.STLIBPATH_ST='/LIBPATH:%s'
+	if v.MSVC_MANIFEST:
 		v.append_value('LINKFLAGS',['/MANIFEST'])
-	v['CFLAGS_cshlib']=[]
-	v['CXXFLAGS_cxxshlib']=[]
-	v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL']
-	v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll'
-	v['implib_PATTERN']='%s.lib'
-	v['IMPLIB_ST']='/IMPLIB:%s'
-	v['LINKFLAGS_cstlib']=[]
-	v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='lib%s.lib'
-	v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe'
+	v.CFLAGS_cshlib=[]
+	v.CXXFLAGS_cxxshlib=[]
+	v.LINKFLAGS_cshlib=v.LINKFLAGS_cxxshlib=['/DLL']
+	v.cshlib_PATTERN=v.cxxshlib_PATTERN='%s.dll'
+	v.implib_PATTERN='%s.lib'
+	v.IMPLIB_ST='/IMPLIB:%s'
+	v.LINKFLAGS_cstlib=[]
+	v.cstlib_PATTERN=v.cxxstlib_PATTERN='%s.lib'
+	v.cprogram_PATTERN=v.cxxprogram_PATTERN='%s.exe'
+	v.def_PATTERN='/def:%s'
+@after_method('apply_link')
+@feature('c','cxx')
 def apply_flags_msvc(self):
 	if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None):
 		return
@@ -528,127 +671,34 @@ def apply_flags_msvc(self):
 	if not is_static:
 		for f in self.env.LINKFLAGS:
 			d=f.lower()
-			if d[1:]=='debug':
+			if d[1:]in('debug','debug:full','debug:fastlink'):
 				pdbnode=self.link_task.outputs[0].change_ext('.pdb')
 				self.link_task.outputs.append(pdbnode)
-				try:
-					self.install_task.source.append(pdbnode)
-				except AttributeError:
-					pass
+				if getattr(self,'install_task',None):
+					self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=pdbnode)
 				break
+@feature('cprogram','cshlib','cxxprogram','cxxshlib')
+@after_method('apply_link')
 def apply_manifest(self):
 	if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None):
 		out_node=self.link_task.outputs[0]
 		man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
 		self.link_task.outputs.append(man_node)
-		self.link_task.do_manifest=True
-def exec_mf(self):
-	env=self.env
-	mtool=env['MT']
-	if not mtool:
-		return 0
-	self.do_manifest=False
-	outfile=self.outputs[0].abspath()
-	manifest=None
-	for out_node in self.outputs:
-		if out_node.name.endswith('.manifest'):
-			manifest=out_node.abspath()
-			break
-	if manifest is None:
-		return 0
-	mode=''
-	if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features:
-		mode='1'
-	elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
-		mode='2'
-	debug('msvc: embedding manifest in mode %r'%mode)
-	lst=[]
-	lst.append(env['MT'])
-	lst.extend(Utils.to_list(env['MTFLAGS']))
-	lst.extend(['-manifest',manifest])
-	lst.append('-outputresource:%s;%s'%(outfile,mode))
-	lst=[lst]
-	return self.exec_command(*lst)
-def quote_response_command(self,flag):
-	if flag.find(' ')>-1:
-		for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'):
-			if flag.startswith(x):
-				flag='%s"%s"'%(x,flag[len(x):])
-				break
-		else:
-			flag='"%s"'%flag
-	return flag
-def exec_response_command(self,cmd,**kw):
-	try:
-		tmp=None
-		if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192:
-			program=cmd[0]
-			cmd=[self.quote_response_command(x)for x in cmd]
-			(fd,tmp)=tempfile.mkstemp()
-			os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
-			os.close(fd)
-			cmd=[program,'@'+tmp]
-		ret=self.generator.bld.exec_command(cmd,**kw)
-	finally:
-		if tmp:
-			try:
-				os.remove(tmp)
-			except:
-				pass
-	return ret
-def exec_command_msvc(self,*k,**kw):
-	if self.env['CC_NAME']=='msvc':
-		if isinstance(k[0],list):
-			lst=[]
-			carry=''
-			for a in k[0]:
-				if a=='/Fo'or a=='/doc'or a[-1]==':':
-					carry=a
-				else:
-					lst.append(carry+a)
-					carry=''
-			k=[lst]
-		if self.env['PATH']:
-			env=dict(os.environ)
-			env.update(PATH=';'.join(self.env['PATH']))
-			kw['env']=env
-	bld=self.generator.bld
-	try:
-		if not kw.get('cwd',None):
-			kw['cwd']=bld.cwd
-	except AttributeError:
-		bld.cwd=kw['cwd']=bld.variant_dir
-	ret=self.exec_response_command(k[0],**kw)
-	if not ret and getattr(self,'do_manifest',None):
-		ret=self.exec_mf()
-	return ret
-for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
-	cls=Task.classes.get(k,None)
-	if cls:
-		cls.exec_command=exec_command_msvc
-		cls.exec_response_command=exec_response_command
-		cls.quote_response_command=quote_response_command
-		cls.exec_mf=exec_mf
-
-conf(get_msvc_version)
-conf(gather_wsdk_versions)
-conf(gather_msvc_targets)
-conf(gather_wince_targets)
-conf(gather_msvc_versions)
-conf(gather_icl_versions)
-conf(get_msvc_versions)
-conf(print_all_msvc_detected)
-conf(detect_msvc)
-conf(find_lt_names_msvc)
-conf(libname_msvc)
-conf(check_lib_msvc)
-conf(check_libs_msvc)
-conf(no_autodetect)
-conf(autodetect)
-conf(find_msvc)
-conf(visual_studio_add_flags)
-conf(msvc_common_flags)
-after_method('apply_link')(apply_flags_msvc)
-feature('c','cxx')(apply_flags_msvc)
-feature('cprogram','cshlib','cxxprogram','cxxshlib')(apply_manifest)
-after_method('apply_link')(apply_manifest)
\ No newline at end of file
+		self.env.DO_MANIFEST=True
+def make_winapp(self,family):
+	append=self.env.append_unique
+	append('DEFINES','WINAPI_FAMILY=%s'%family)
+	append('CXXFLAGS',['/ZW','/TP'])
+	for lib_path in self.env.LIBPATH:
+		append('CXXFLAGS','/AI%s'%lib_path)
+@feature('winphoneapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_winphone_app(self):
+	make_winapp(self,'WINAPI_FAMILY_PHONE_APP')
+	self.env.append_unique('LINKFLAGS',['/NODEFAULTLIB:ole32.lib','PhoneAppModelHost.lib'])
+@feature('winapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_windows_app(self):
+	make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP')
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/nasm.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/nasm.py
@@ -1,14 +1,21 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
+import os
 import waflib.Tools.asm
 from waflib.TaskGen import feature
+@feature('asm')
 def apply_nasm_vars(self):
 	self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[])))
 def configure(conf):
-	nasm=conf.find_program(['nasm','yasm'],var='AS')
+	conf.find_program(['nasm','yasm'],var='AS')
 	conf.env.AS_TGT_F=['-o']
 	conf.env.ASLNK_TGT_F=['-o']
-
-feature('asm')(apply_nasm_vars)
\ No newline at end of file
+	conf.load('asm')
+	conf.env.ASMPATH_ST='-I%s'+os.sep
+	txt=conf.cmd_and_log(conf.env.AS+['--version'])
+	if'yasm'in txt.lower():
+		conf.env.ASM_NAME='yasm'
+	else:
+		conf.env.ASM_NAME='nasm'
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/Tools/nobuild.py
@@ -0,0 +1,11 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Task
+def build(bld):
+	def run(self):
+		for x in self.outputs:
+			x.write('')
+	for(name,cls)in Task.classes.items():
+		cls.run=run
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/perl.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/perl.py
@@ -1,15 +1,19 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import os
-from waflib import Task,Options,Utils
+from waflib import Task,Options,Utils,Errors
 from waflib.Configure import conf
 from waflib.TaskGen import extension,feature,before_method
+@before_method('apply_incpaths','apply_link','propagate_uselib_vars')
+@feature('perlext')
 def init_perlext(self):
 	self.uselib=self.to_list(getattr(self,'uselib',[]))
-	if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT')
-	self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN']
+	if not'PERLEXT'in self.uselib:
+		self.uselib.append('PERLEXT')
+	self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.perlext_PATTERN
+@extension('.xs')
 def xsubpp_file(self,node):
 	outnode=node.change_ext('.c')
 	self.create_task('xsubpp',node,outnode)
@@ -18,6 +22,7 @@ class xsubpp(Task.Task):
 	run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
 	color='BLUE'
 	ext_out=['.h']
+@conf
 def check_perl_version(self,minver=None):
 	res=True
 	if minver:
@@ -25,14 +30,8 @@ def check_perl_version(self,minver=None)
 	else:
 		cver=''
 	self.start_msg('Checking for minimum perl version %s'%cver)
-	perl=getattr(Options.options,'perlbinary',None)
-	if not perl:
-		perl=self.find_program('perl',var='PERL')
-	if not perl:
-		self.end_msg("Perl not found",color="YELLOW")
-		return False
-	self.env['PERL']=perl
-	version=self.cmd_and_log([perl,"-e",'printf \"%vd\", $^V'])
+	perl=self.find_program('perl',var='PERL',value=getattr(Options.options,'perlbinary',None))
+	version=self.cmd_and_log(perl+["-e",'printf \"%vd\", $^V'])
 	if not version:
 		res=False
 		version="Unknown"
@@ -40,42 +39,47 @@ def check_perl_version(self,minver=None)
 		ver=tuple(map(int,version.split(".")))
 		if ver<minver:
 			res=False
-	self.end_msg(version,color=res and"GREEN"or"YELLOW")
+	self.end_msg(version,color=res and'GREEN'or'YELLOW')
 	return res
+@conf
 def check_perl_module(self,module):
-	cmd=[self.env['PERL'],'-e','use %s'%module]
+	cmd=self.env.PERL+['-e','use %s'%module]
 	self.start_msg('perl module %s'%module)
 	try:
 		r=self.cmd_and_log(cmd)
-	except:
+	except Errors.WafError:
 		self.end_msg(False)
 		return None
 	self.end_msg(r or True)
 	return r
+@conf
 def check_perl_ext_devel(self):
 	env=self.env
 	perl=env.PERL
 	if not perl:
 		self.fatal('find perl first')
-	def read_out(cmd):
-		return Utils.to_list(self.cmd_and_log(perl+cmd))
-	env['LINKFLAGS_PERLEXT']=read_out(" -MConfig -e'print $Config{lddlflags}'")
-	env['INCLUDES_PERLEXT']=read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
-	env['CFLAGS_PERLEXT']=read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
-	env['XSUBPP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
-	env['EXTUTILS_TYPEMAP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
+	def cmd_perl_config(s):
+		return perl+['-MConfig','-e','print \"%s\"'%s]
+	def cfg_str(cfg):
+		return self.cmd_and_log(cmd_perl_config(cfg))
+	def cfg_lst(cfg):
+		return Utils.to_list(cfg_str(cfg))
+	def find_xsubpp():
+		for var in('privlib','vendorlib'):
+			xsubpp=cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}'%var)
+			if xsubpp and os.path.isfile(xsubpp[0]):
+				return xsubpp
+		return self.find_program('xsubpp')
+	env.LINKFLAGS_PERLEXT=cfg_lst('$Config{lddlflags}')
+	env.INCLUDES_PERLEXT=cfg_lst('$Config{archlib}/CORE')
+	env.CFLAGS_PERLEXT=cfg_lst('$Config{ccflags} $Config{cccdlflags}')
+	env.EXTUTILS_TYPEMAP=cfg_lst('$Config{privlib}/ExtUtils/typemap')
+	env.XSUBPP=find_xsubpp()
 	if not getattr(Options.options,'perlarchdir',None):
-		env['ARCHDIR_PERL']=self.cmd_and_log(perl+" -MConfig -e'print $Config{sitearch}'")
+		env.ARCHDIR_PERL=cfg_str('$Config{sitearch}')
 	else:
-		env['ARCHDIR_PERL']=getattr(Options.options,'perlarchdir')
-	env['perlext_PATTERN']='%s.'+self.cmd_and_log(perl+" -MConfig -e'print $Config{dlext}'")
+		env.ARCHDIR_PERL=getattr(Options.options,'perlarchdir')
+	env.perlext_PATTERN='%s.'+cfg_str('$Config{dlext}')
 def options(opt):
 	opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None)
 	opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None)
-
-before_method('apply_incpaths','apply_link','propagate_uselib_vars')(init_perlext)
-feature('perlext')(init_perlext)
-extension('.xs')(xsubpp_file)
-conf(check_perl_version)
-conf(check_perl_module)
-conf(check_perl_ext_devel)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/python.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/python.py
@@ -1,10 +1,9 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import os,sys
-from waflib import Utils,Options,Errors
-from waflib.Logs import debug,warn,info,error
+from waflib import Errors,Logs,Node,Options,Task,Utils
 from waflib.TaskGen import extension,before_method,after_method,feature
 from waflib.Configure import conf
 FRAG='''
@@ -17,8 +16,9 @@ extern "C" {
 #ifdef __cplusplus
 }
 #endif
-int main()
+int main(int argc, char **argv)
 {
+   (void)argc; (void)argv;
    Py_Initialize();
    Py_Finalize();
    return 0;
@@ -26,79 +26,103 @@ int main()
 '''
 INST='''
 import sys, py_compile
-py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
 '''
 DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib']
-def process_py(self,node):
-	try:
-		if not self.bld.is_install:
-			return
-	except:
-		return
-	try:
-		if not self.install_path:
-			return
-	except AttributeError:
-		self.install_path='${PYTHONDIR}'
-	def inst_py(ctx):
-		install_from=getattr(self,'install_from',None)
-		if install_from:
-			install_from=self.path.find_dir(install_from)
-		install_pyfile(self,node,install_from)
-	self.bld.add_post_fun(inst_py)
-def install_pyfile(self,node,install_from=None):
-	from_node=install_from or node.parent
-	tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False)
-	path=tsk.get_install_path()
-	if self.bld.is_install<0:
-		info("+ removing byte compiled python files")
-		for x in'co':
-			try:
-				os.remove(path+x)
-			except OSError:
-				pass
-	if self.bld.is_install>0:
-		try:
-			st1=os.stat(path)
-		except:
-			error('The python file is missing, this should not happen')
-		for x in['c','o']:
-			do_inst=self.env['PY'+x.upper()]
-			try:
-				st2=os.stat(path+x)
-			except OSError:
-				pass
-			else:
-				if st1.st_mtime<=st2.st_mtime:
-					do_inst=False
-			if do_inst:
-				lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[]
-				(a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x)
-				argv=self.env['PYTHON']+lst+['-c',INST,a,b,c]
-				info('+ byte compiling %r'%(path+x))
-				env=self.env.env or None
-				ret=Utils.subprocess.Popen(argv,env=env).wait()
-				if ret:
-					raise Errors.WafError('py%s compilation failed %r'%(x,path))
+@before_method('process_source')
+@feature('py')
 def feature_py(self):
-	pass
+	self.install_path=getattr(self,'install_path','${PYTHONDIR}')
+	install_from=getattr(self,'install_from',None)
+	if install_from and not isinstance(install_from,Node.Node):
+		install_from=self.path.find_dir(install_from)
+	self.install_from=install_from
+	ver=self.env.PYTHON_VERSION
+	if not ver:
+		self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
+	if int(ver.replace('.',''))>31:
+		self.install_32=True
+@extension('.py')
+def process_py(self,node):
+	assert(hasattr(self,'install_path')),'add features="py" for target "%s" in "%s/wscript".'%(self.target,self.path.nice_path())
+	self.install_from=getattr(self,'install_from',None)
+	relative_trick=getattr(self,'relative_trick',True)
+	if self.install_from:
+		assert isinstance(self.install_from,Node.Node),'add features="py" for target "%s" in "%s/wscript" (%s).'%(self.target,self.path.nice_path(),type(self.install_from))
+	if self.install_path:
+		if self.install_from:
+			self.add_install_files(install_to=self.install_path,install_from=node,cwd=self.install_from,relative_trick=relative_trick)
+		else:
+			self.add_install_files(install_to=self.install_path,install_from=node,relative_trick=relative_trick)
+	lst=[]
+	if self.env.PYC:
+		lst.append('pyc')
+	if self.env.PYO:
+		lst.append('pyo')
+	if self.install_path:
+		if self.install_from:
+			target_dir=node.path_from(self.install_from)if relative_trick else node.name
+			pyd=Utils.subst_vars("%s/%s"%(self.install_path,target_dir),self.env)
+		else:
+			target_dir=node.path_from(self.path)if relative_trick else node.name
+			pyd=Utils.subst_vars("%s/%s"%(self.install_path,target_dir),self.env)
+	else:
+		pyd=node.abspath()
+	for ext in lst:
+		if self.env.PYTAG and not self.env.NOPYCACHE:
+			name=node.name[:-3]
+			pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext))
+			pyobj.parent.mkdir()
+		else:
+			pyobj=node.change_ext(".%s"%ext)
+		tsk=self.create_task(ext,node,pyobj)
+		tsk.pyd=pyd
+		if self.install_path:
+			self.add_install_files(install_to=os.path.dirname(pyd),install_from=pyobj,cwd=node.parent.get_bld(),relative_trick=relative_trick)
+class pyc(Task.Task):
+	color='PINK'
+	def __str__(self):
+		node=self.outputs[0]
+		return node.path_from(node.ctx.launch_node())
+	def run(self):
+		cmd=[Utils.subst_vars('${PYTHON}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd]
+		ret=self.generator.bld.exec_command(cmd)
+		return ret
+class pyo(Task.Task):
+	color='PINK'
+	def __str__(self):
+		node=self.outputs[0]
+		return node.path_from(node.ctx.launch_node())
+	def run(self):
+		cmd=[Utils.subst_vars('${PYTHON}',self.env),Utils.subst_vars('${PYFLAGS_OPT}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd]
+		ret=self.generator.bld.exec_command(cmd)
+		return ret
+@feature('pyext')
+@before_method('propagate_uselib_vars','apply_link')
+@after_method('apply_bundle')
 def init_pyext(self):
+	self.uselib=self.to_list(getattr(self,'uselib',[]))
+	if not'PYEXT'in self.uselib:
+		self.uselib.append('PYEXT')
+	self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN
+	self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN
 	try:
 		if not self.install_path:
 			return
 	except AttributeError:
 		self.install_path='${PYTHONARCHDIR}'
-	self.uselib=self.to_list(getattr(self,'uselib',[]))
-	if not'PYEXT'in self.uselib:
-		self.uselib.append('PYEXT')
-	self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN']=self.env['pyext_PATTERN']
+@feature('pyext')
+@before_method('apply_link','apply_bundle')
 def set_bundle(self):
 	if Utils.unversioned_sys_platform()=='darwin':
 		self.mac_bundle=True
+@before_method('propagate_uselib_vars')
+@feature('pyembed')
 def init_pyembed(self):
 	self.uselib=self.to_list(getattr(self,'uselib',[]))
 	if not'PYEMBED'in self.uselib:
 		self.uselib.append('PYEMBED')
+@conf
 def get_python_variables(self,variables,imports=None):
 	if not imports:
 		try:
@@ -118,48 +142,125 @@ def get_python_variables(self,variables,
 		out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env)
 	except Errors.WafError:
 		self.fatal('The distutils module is unusable: install "python-devel"?')
+	self.to_log(out)
 	return_values=[]
-	for s in out.split('\n'):
+	for s in out.splitlines():
 		s=s.strip()
 		if not s:
 			continue
 		if s=='None':
 			return_values.append(None)
-		elif s[0]=="'"and s[-1]=="'":
-			return_values.append(s[1:-1])
+		elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'):
+			return_values.append(eval(s))
 		elif s[0].isdigit():
 			return_values.append(int(s))
 		else:break
 	return return_values
-def check_python_headers(conf):
-	if not conf.env['CC_NAME']and not conf.env['CXX_NAME']:
+@conf
+def test_pyembed(self,mode,msg='Testing pyembed configuration'):
+	self.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg=msg,fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(mode,mode))
+@conf
+def test_pyext(self,mode,msg='Testing pyext configuration'):
+	self.check(header_name='Python.h',define_name='HAVE_PYEXT',msg=msg,fragment=FRAG,errmsg='Could not build python extensions',features='%s %sshlib pyext'%(mode,mode))
+@conf
+def python_cross_compile(self,features='pyembed pyext'):
+	features=Utils.to_list(features)
+	if not('PYTHON_LDFLAGS'in self.environ or'PYTHON_PYEXT_LDFLAGS'in self.environ or'PYTHON_PYEMBED_LDFLAGS'in self.environ):
+		return False
+	for x in'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
+		if not x in self.environ:
+			self.fatal('Please set %s in the os environment'%x)
+		else:
+			self.env[x]=self.environ[x]
+	xx=self.env.CXX_NAME and'cxx'or'c'
+	if'pyext'in features:
+		flags=self.environ.get('PYTHON_PYEXT_LDFLAGS',self.environ.get('PYTHON_LDFLAGS'))
+		if flags is None:
+			self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
+		else:
+			self.parse_flags(flags,'PYEXT')
+		self.test_pyext(xx)
+	if'pyembed'in features:
+		flags=self.environ.get('PYTHON_PYEMBED_LDFLAGS',self.environ.get('PYTHON_LDFLAGS'))
+		if flags is None:
+			self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
+		else:
+			self.parse_flags(flags,'PYEMBED')
+		self.test_pyembed(xx)
+	return True
+@conf
+def check_python_headers(conf,features='pyembed pyext'):
+	features=Utils.to_list(features)
+	assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'"
+	env=conf.env
+	if not env.CC_NAME and not env.CXX_NAME:
 		conf.fatal('load a compiler first (gcc, g++, ..)')
-	if not conf.env['PYTHON_VERSION']:
+	if conf.python_cross_compile(features):
+		return
+	if not env.PYTHON_VERSION:
 		conf.check_python_version()
-	env=conf.env
-	pybin=conf.env.PYTHON
+	pybin=env.PYTHON
 	if not pybin:
-		conf.fatal('could not find the python executable')
-	v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
+		conf.fatal('Could not find the python executable')
+	v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
 	try:
 		lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v])
 	except RuntimeError:
 		conf.fatal("Python development headers not found (-v for details).")
 	vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)]
-	conf.to_log("Configuration returned from %r:\n%r\n"%(pybin,'\n'.join(vals)))
+	conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals)))
 	dct=dict(zip(v,lst))
 	x='MACOSX_DEPLOYMENT_TARGET'
 	if dct[x]:
-		conf.env[x]=conf.environ[x]=dct[x]
-	env['pyext_PATTERN']='%s'+dct['SO']
+		env[x]=conf.environ[x]=dct[x]
+	env.pyext_PATTERN='%s'+dct['SO']
+	num='.'.join(env.PYTHON_VERSION.split('.')[:2])
+	conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False)
+	if env.PYTHON_CONFIG:
+		if conf.env.HAVE_PYTHON_H:
+			return
+		all_flags=[['--cflags','--libs','--ldflags']]
+		if sys.hexversion<0x2070000:
+			all_flags=[[k]for k in all_flags[0]]
+		xx=env.CXX_NAME and'cxx'or'c'
+		if'pyembed'in features:
+			for flags in all_flags:
+				embedflags=flags+['--embed']
+				try:
+					conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(embedflags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=embedflags)
+				except conf.errors.ConfigurationError:
+					conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags)
+			try:
+				conf.test_pyembed(xx)
+			except conf.errors.ConfigurationError:
+				if dct['Py_ENABLE_SHARED']and dct['LIBDIR']:
+					env.append_unique('LIBPATH_PYEMBED',[dct['LIBDIR']])
+					conf.test_pyembed(xx)
+				else:
+					raise
+		if'pyext'in features:
+			for flags in all_flags:
+				conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags)
+			try:
+				conf.test_pyext(xx)
+			except conf.errors.ConfigurationError:
+				if dct['Py_ENABLE_SHARED']and dct['LIBDIR']:
+					env.append_unique('LIBPATH_PYEXT',[dct['LIBDIR']])
+					conf.test_pyext(xx)
+				else:
+					raise
+		conf.define('HAVE_PYTHON_H',1)
+		return
 	all_flags=dct['LDFLAGS']+' '+dct['CFLAGS']
 	conf.parse_flags(all_flags,'PYEMBED')
 	all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS']
 	conf.parse_flags(all_flags,'PYEXT')
 	result=None
-	for name in('python'+env['PYTHON_VERSION'],'python'+env['PYTHON_VERSION'].replace('.','')):
-		if not result and env['LIBPATH_PYEMBED']:
-			path=env['LIBPATH_PYEMBED']
+	if not dct["LDVERSION"]:
+		dct["LDVERSION"]=env.PYTHON_VERSION
+	for name in('python'+dct['LDVERSION'],'python'+env.PYTHON_VERSION+'m','python'+env.PYTHON_VERSION.replace('.','')):
+		if not result and env.LIBPATH_PYEMBED:
+			path=env.LIBPATH_PYEMBED
 			conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path)
 			result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name)
 		if not result and dct['LIBDIR']:
@@ -177,35 +278,22 @@ def check_python_headers(conf):
 		if result:
 			break
 	if result:
-		env['LIBPATH_PYEMBED']=path
+		env.LIBPATH_PYEMBED=path
 		env.append_value('LIB_PYEMBED',[name])
 	else:
 		conf.to_log("\n\n### LIB NOT FOUND\n")
-	if(Utils.is_win32 or sys.platform.startswith('os2')or dct['Py_ENABLE_SHARED']):
-		env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
-		env['LIB_PYEXT']=env['LIB_PYEMBED']
-	num='.'.join(env['PYTHON_VERSION'].split('.')[:2])
-	conf.find_program(['python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',mandatory=False)
-	includes=[]
-	if conf.env.PYTHON_CONFIG:
-		for incstr in conf.cmd_and_log([conf.env.PYTHON_CONFIG,'--includes']).strip().split():
-			if(incstr.startswith('-I')or incstr.startswith('/I')):
-				incstr=incstr[2:]
-			if incstr not in includes:
-				includes.append(incstr)
-		conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n"%(includes,))
-		env['INCLUDES_PYEXT']=includes
-		env['INCLUDES_PYEMBED']=includes
-	else:
-		conf.to_log("Include path for Python extensions ""(found via distutils module): %r\n"%(dct['INCLUDEPY'],))
-		env['INCLUDES_PYEXT']=[dct['INCLUDEPY']]
-		env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']]
-	if env['CC_NAME']=='gcc':
-		env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing'])
-		env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing'])
-	if env['CXX_NAME']=='gcc':
-		env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing'])
-		env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing'])
+	if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
+		env.LIBPATH_PYEXT=env.LIBPATH_PYEMBED
+		env.LIB_PYEXT=env.LIB_PYEMBED
+	conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],))
+	env.INCLUDES_PYEXT=[dct['INCLUDEPY']]
+	env.INCLUDES_PYEMBED=[dct['INCLUDEPY']]
+	if env.CC_NAME=='gcc':
+		env.append_unique('CFLAGS_PYEMBED',['-fno-strict-aliasing'])
+		env.append_unique('CFLAGS_PYEXT',['-fno-strict-aliasing'])
+	if env.CXX_NAME=='gcc':
+		env.append_unique('CXXFLAGS_PYEMBED',['-fno-strict-aliasing'])
+		env.append_unique('CXXFLAGS_PYEXT',['-fno-strict-aliasing'])
 	if env.CC_NAME=="msvc":
 		from distutils.msvccompiler import MSVCCompiler
 		dist_compiler=MSVCCompiler()
@@ -213,73 +301,75 @@ def check_python_headers(conf):
 		env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options)
 		env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options)
 		env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared)
-	try:
-		conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Could not find the python development headers')
-	except conf.errors.ConfigurationError:
-		conf.check_cfg(path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=['--cflags','--libs'])
-		conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting the python flags from python-config',uselib='PYEMBED',fragment=FRAG,errmsg='Could not find the python development headers elsewhere')
+	conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!')
+@conf
 def check_python_version(conf,minver=None):
 	assert minver is None or isinstance(minver,tuple)
-	pybin=conf.env['PYTHON']
+	pybin=conf.env.PYTHON
 	if not pybin:
 		conf.fatal('could not find the python executable')
 	cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))']
-	debug('python: Running python command %r'%cmd)
+	Logs.debug('python: Running python command %r',cmd)
 	lines=conf.cmd_and_log(cmd).split()
-	assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines)
+	assert len(lines)==5,"found %r lines, expected 5: %r"%(len(lines),lines)
 	pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4]))
 	result=(minver is None)or(pyver_tuple>=minver)
 	if result:
 		pyver='.'.join([str(x)for x in pyver_tuple[:2]])
-		conf.env['PYTHON_VERSION']=pyver
-		if'PYTHONDIR'in conf.environ:
+		conf.env.PYTHON_VERSION=pyver
+		if'PYTHONDIR'in conf.env:
+			pydir=conf.env.PYTHONDIR
+		elif'PYTHONDIR'in conf.environ:
 			pydir=conf.environ['PYTHONDIR']
 		else:
 			if Utils.is_win32:
-				(python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+				(python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0) or ''"])
 			else:
 				python_LIBDEST=None
-				(pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+				(pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX])
 			if python_LIBDEST is None:
-				if conf.env['LIBDIR']:
-					python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver)
+				if conf.env.LIBDIR:
+					python_LIBDEST=os.path.join(conf.env.LIBDIR,'python'+pyver)
 				else:
-					python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver)
-		if'PYTHONARCHDIR'in conf.environ:
+					python_LIBDEST=os.path.join(conf.env.PREFIX,'lib','python'+pyver)
+		if'PYTHONARCHDIR'in conf.env:
+			pyarchdir=conf.env.PYTHONARCHDIR
+		elif'PYTHONARCHDIR'in conf.environ:
 			pyarchdir=conf.environ['PYTHONARCHDIR']
 		else:
-			(pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+			(pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX])
 			if not pyarchdir:
 				pyarchdir=pydir
 		if hasattr(conf,'define'):
 			conf.define('PYTHONDIR',pydir)
 			conf.define('PYTHONARCHDIR',pyarchdir)
-		conf.env['PYTHONDIR']=pydir
-		conf.env['PYTHONARCHDIR']=pyarchdir
+		conf.env.PYTHONDIR=pydir
+		conf.env.PYTHONARCHDIR=pyarchdir
 	pyver_full='.'.join(map(str,pyver_tuple[:3]))
 	if minver is None:
 		conf.msg('Checking for python version',pyver_full)
 	else:
 		minver_str='.'.join(map(str,minver))
-		conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW')
+		conf.msg('Checking for python version >= %s'%(minver_str,),pyver_full,color=result and'GREEN'or'YELLOW')
 	if not result:
 		conf.fatal('The python version is too old, expecting %r'%(minver,))
 PYTHON_MODULE_TEMPLATE='''
 import %s as current_module
 version = getattr(current_module, '__version__', None)
 if version is not None:
-    print(str(version))
+	print(str(version))
 else:
-    print('unknown version')
+	print('unknown version')
 '''
+@conf
 def check_python_module(conf,module_name,condition=''):
-	msg='Python module %s'%module_name
+	msg="Checking for python module %r"%module_name
 	if condition:
 		msg='%s (%s)'%(msg,condition)
 	conf.start_msg(msg)
 	try:
-		ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name])
-	except Exception:
+		ret=conf.cmd_and_log(conf.env.PYTHON+['-c',PYTHON_MODULE_TEMPLATE%module_name])
+	except Errors.WafError:
 		conf.end_msg(False)
 		conf.fatal('Could not find the python module %r'%module_name)
 	ret=ret.strip()
@@ -303,34 +393,30 @@ def check_python_module(conf,module_name
 		else:
 			conf.end_msg(ret)
 def configure(conf):
-	try:
-		conf.find_program('python',var='PYTHON')
-	except conf.errors.ConfigurationError:
-		warn("could not find a python executable, setting to sys.executable '%s'"%sys.executable)
-		conf.env.PYTHON=sys.executable
-	if conf.env.PYTHON!=sys.executable:
-		warn("python executable '%s' different from sys.executable '%s'"%(conf.env.PYTHON,sys.executable))
-	conf.env.PYTHON=conf.cmd_to_list(conf.env.PYTHON)
 	v=conf.env
-	v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
-	v['PYFLAGS']=''
-	v['PYFLAGS_OPT']='-O'
-	v['PYC']=getattr(Options.options,'pyc',1)
-	v['PYO']=getattr(Options.options,'pyo',1)
+	if getattr(Options.options,'pythondir',None):
+		v.PYTHONDIR=Options.options.pythondir
+	if getattr(Options.options,'pythonarchdir',None):
+		v.PYTHONARCHDIR=Options.options.pythonarchdir
+	if getattr(Options.options,'nopycache',None):
+		v.NOPYCACHE=Options.options.nopycache
+	if not v.PYTHON:
+		v.PYTHON=[getattr(Options.options,'python',None)or sys.executable]
+	v.PYTHON=Utils.to_list(v.PYTHON)
+	conf.find_program('python',var='PYTHON')
+	v.PYFLAGS=''
+	v.PYFLAGS_OPT='-O'
+	v.PYC=getattr(Options.options,'pyc',1)
+	v.PYO=getattr(Options.options,'pyo',1)
+	try:
+		v.PYTAG=conf.cmd_and_log(conf.env.PYTHON+['-c',"import sys\ntry:\n print(sys.implementation.cache_tag)\nexcept AttributeError:\n import imp\n print(imp.get_tag())\n"]).strip()
+	except Errors.WafError:
+		pass
 def options(opt):
-	opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc')
-	opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo')
-
-extension('.py')(process_py)
-feature('py')(feature_py)
-feature('pyext')(init_pyext)
-before_method('propagate_uselib_vars','apply_link')(init_pyext)
-after_method('apply_bundle')(init_pyext)
-feature('pyext')(set_bundle)
-before_method('apply_link','apply_bundle')(set_bundle)
-before_method('propagate_uselib_vars')(init_pyembed)
-feature('pyembed')(init_pyembed)
-conf(get_python_variables)
-conf(check_python_headers)
-conf(check_python_version)
-conf(check_python_module)
\ No newline at end of file
+	pyopt=opt.add_option_group("Python Options")
+	pyopt.add_option('--nopyc',dest='pyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]')
+	pyopt.add_option('--nopyo',dest='pyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
+	pyopt.add_option('--nopycache',dest='nopycache',action='store_true',help='Do not use __pycache__ directory to install objects [Default:auto]')
+	pyopt.add_option('--python',dest="python",help='python binary to be used [Default: %s]'%sys.executable)
+	pyopt.add_option('--pythondir',dest='pythondir',help='Installation path for python modules (py, platform-independent .py and .pyc files)')
+	pyopt.add_option('--pythonarchdir',dest='pythonarchdir',help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/Tools/qt5.py
@@ -0,0 +1,499 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from __future__ import with_statement
+try:
+	from xml.sax import make_parser
+	from xml.sax.handler import ContentHandler
+except ImportError:
+	has_xml=False
+	ContentHandler=object
+else:
+	has_xml=True
+import os,sys,re
+from waflib.Tools import cxx
+from waflib import Build,Task,Utils,Options,Errors,Context
+from waflib.TaskGen import feature,after_method,extension,before_method
+from waflib.Configure import conf
+from waflib import Logs
+MOC_H=['.h','.hpp','.hxx','.hh']
+EXT_RCC=['.qrc']
+EXT_UI=['.ui']
+EXT_QT5=['.cpp','.cc','.cxx','.C']
+class qxx(Task.classes['cxx']):
+	def __init__(self,*k,**kw):
+		Task.Task.__init__(self,*k,**kw)
+		self.moc_done=0
+	def runnable_status(self):
+		if self.moc_done:
+			return Task.Task.runnable_status(self)
+		else:
+			for t in self.run_after:
+				if not t.hasrun:
+					return Task.ASK_LATER
+			self.add_moc_tasks()
+			return Task.Task.runnable_status(self)
+	def create_moc_task(self,h_node,m_node):
+		try:
+			moc_cache=self.generator.bld.moc_cache
+		except AttributeError:
+			moc_cache=self.generator.bld.moc_cache={}
+		try:
+			return moc_cache[h_node]
+		except KeyError:
+			tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator)
+			tsk.set_inputs(h_node)
+			tsk.set_outputs(m_node)
+			tsk.env.append_unique('MOC_FLAGS','-i')
+			if self.generator:
+				self.generator.tasks.append(tsk)
+			gen=self.generator.bld.producer
+			gen.outstanding.append(tsk)
+			gen.total+=1
+			return tsk
+		else:
+			delattr(self,'cache_sig')
+	def add_moc_tasks(self):
+		node=self.inputs[0]
+		bld=self.generator.bld
+		if bld.is_install==Build.UNINSTALL:
+			return
+		try:
+			self.signature()
+		except KeyError:
+			pass
+		else:
+			delattr(self,'cache_sig')
+		include_nodes=[node.parent]+self.generator.includes_nodes
+		moctasks=[]
+		mocfiles=set()
+		for d in bld.raw_deps.get(self.uid(),[]):
+			if not d.endswith('.moc'):
+				continue
+			if d in mocfiles:
+				continue
+			mocfiles.add(d)
+			h_node=None
+			base2=d[:-4]
+			prefix=node.name[:node.name.rfind('.')]
+			if base2==prefix:
+				h_node=node
+			else:
+				for x in include_nodes:
+					for e in MOC_H:
+						h_node=x.find_node(base2+e)
+						if h_node:
+							break
+					else:
+						continue
+					break
+			if h_node:
+				m_node=h_node.change_ext('.moc')
+			else:
+				raise Errors.WafError('No source found for %r which is a moc file'%d)
+			task=self.create_moc_task(h_node,m_node)
+			moctasks.append(task)
+		self.run_after.update(set(moctasks))
+		self.moc_done=1
+class trans_update(Task.Task):
+	run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'
+	color='BLUE'
+class XMLHandler(ContentHandler):
+	def __init__(self):
+		ContentHandler.__init__(self)
+		self.buf=[]
+		self.files=[]
+	def startElement(self,name,attrs):
+		if name=='file':
+			self.buf=[]
+	def endElement(self,name):
+		if name=='file':
+			self.files.append(str(''.join(self.buf)))
+	def characters(self,cars):
+		self.buf.append(cars)
+@extension(*EXT_RCC)
+def create_rcc_task(self,node):
+	rcnode=node.change_ext('_rc.%d.cpp'%self.idx)
+	self.create_task('rcc',node,rcnode)
+	cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))
+	try:
+		self.compiled_tasks.append(cpptask)
+	except AttributeError:
+		self.compiled_tasks=[cpptask]
+	return cpptask
+@extension(*EXT_UI)
+def create_uic_task(self,node):
+	try:
+		uic_cache=self.bld.uic_cache
+	except AttributeError:
+		uic_cache=self.bld.uic_cache={}
+	if node not in uic_cache:
+		uictask=uic_cache[node]=self.create_task('ui5',node)
+		uictask.outputs=[node.parent.find_or_declare(self.env.ui_PATTERN%node.name[:-3])]
+@extension('.ts')
+def add_lang(self,node):
+	self.lang=self.to_list(getattr(self,'lang',[]))+[node]
+@feature('qt5')
+@before_method('process_source')
+def process_mocs(self):
+	lst=self.to_nodes(getattr(self,'moc',[]))
+	self.source=self.to_list(getattr(self,'source',[]))
+	for x in lst:
+		prefix=x.name[:x.name.rfind('.')]
+		moc_target='moc_%s.%d.cpp'%(prefix,self.idx)
+		moc_node=x.parent.find_or_declare(moc_target)
+		self.source.append(moc_node)
+		self.create_task('moc',x,moc_node)
+@feature('qt5')
+@after_method('apply_link')
+def apply_qt5(self):
+	if getattr(self,'lang',None):
+		qmtasks=[]
+		for x in self.to_list(self.lang):
+			if isinstance(x,str):
+				x=self.path.find_resource(x+'.ts')
+			qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.%d.qm'%self.idx)))
+		if getattr(self,'update',None)and Options.options.trans_qt5:
+			cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
+			for x in qmtasks:
+				self.create_task('trans_update',cxxnodes,x.inputs)
+		if getattr(self,'langname',None):
+			qmnodes=[x.outputs[0]for x in qmtasks]
+			rcnode=self.langname
+			if isinstance(rcnode,str):
+				rcnode=self.path.find_or_declare(rcnode+('.%d.qrc'%self.idx))
+			t=self.create_task('qm2rcc',qmnodes,rcnode)
+			k=create_rcc_task(self,t.outputs[0])
+			self.link_task.inputs.append(k.outputs[0])
+	lst=[]
+	for flag in self.to_list(self.env.CXXFLAGS):
+		if len(flag)<2:
+			continue
+		f=flag[0:2]
+		if f in('-D','-I','/D','/I'):
+			if(f[0]=='/'):
+				lst.append('-'+flag[1:])
+			else:
+				lst.append(flag)
+	self.env.append_value('MOC_FLAGS',lst)
+@extension(*EXT_QT5)
+def cxx_hook(self,node):
+	return self.create_compiled_task('qxx',node)
+class rcc(Task.Task):
+	color='BLUE'
+	run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+	ext_out=['.h']
+	def rcname(self):
+		return os.path.splitext(self.inputs[0].name)[0]
+	def scan(self):
+		if not has_xml:
+			Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+			return([],[])
+		parser=make_parser()
+		curHandler=XMLHandler()
+		parser.setContentHandler(curHandler)
+		with open(self.inputs[0].abspath(),'r')as f:
+			parser.parse(f)
+		nodes=[]
+		names=[]
+		root=self.inputs[0].parent
+		for x in curHandler.files:
+			nd=root.find_resource(x)
+			if nd:
+				nodes.append(nd)
+			else:
+				names.append(x)
+		return(nodes,names)
+	def quote_flag(self,x):
+		return x
+class moc(Task.Task):
+	color='BLUE'
+	run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+	def quote_flag(self,x):
+		return x
+class ui5(Task.Task):
+	color='BLUE'
+	run_str='${QT_UIC} ${SRC} -o ${TGT}'
+	ext_out=['.h']
+class ts2qm(Task.Task):
+	color='BLUE'
+	run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+class qm2rcc(Task.Task):
+	color='BLUE'
+	after='ts2qm'
+	def run(self):
+		txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs])
+		code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt
+		self.outputs[0].write(code)
+def configure(self):
+	self.find_qt5_binaries()
+	self.set_qt5_libs_dir()
+	self.set_qt5_libs_to_check()
+	self.set_qt5_defines()
+	self.find_qt5_libraries()
+	self.add_qt5_rpath()
+	self.simplify_qt5_libs()
+	if not has_xml:
+		Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+	if'COMPILER_CXX'not in self.env:
+		self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
+	frag='#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
+	uses='QT5CORE'
+	for flag in[[],'-fPIE','-fPIC','-std=c++11',['-std=c++11','-fPIE'],['-std=c++11','-fPIC']]:
+		msg='See if Qt files compile '
+		if flag:
+			msg+='with %s'%flag
+		try:
+			self.check(features='qt5 cxx',use=uses,uselib_store='qt5',cxxflags=flag,fragment=frag,msg=msg)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			break
+	else:
+		self.fatal('Could not build a simple Qt application')
+	if Utils.unversioned_sys_platform()=='freebsd':
+		frag='#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
+		try:
+			self.check(features='qt5 cxx cxxprogram',use=uses,fragment=frag,msg='Can we link Qt programs on FreeBSD directly?')
+		except self.errors.ConfigurationError:
+			self.check(features='qt5 cxx cxxprogram',use=uses,uselib_store='qt5',libpath='/usr/local/lib',fragment=frag,msg='Is /usr/local/lib required?')
+@conf
+def find_qt5_binaries(self):
+	env=self.env
+	opt=Options.options
+	qtdir=getattr(opt,'qtdir','')
+	qtbin=getattr(opt,'qtbin','')
+	paths=[]
+	if qtdir:
+		qtbin=os.path.join(qtdir,'bin')
+	if not qtdir:
+		qtdir=self.environ.get('QT5_ROOT','')
+		qtbin=self.environ.get('QT5_BIN')or os.path.join(qtdir,'bin')
+	if qtbin:
+		paths=[qtbin]
+	if not qtdir:
+		paths=self.environ.get('PATH','').split(os.pathsep)
+		paths.extend(['/usr/share/qt5/bin','/usr/local/lib/qt5/bin'])
+		try:
+			lst=Utils.listdir('/usr/local/Trolltech/')
+		except OSError:
+			pass
+		else:
+			if lst:
+				lst.sort()
+				lst.reverse()
+				qtdir='/usr/local/Trolltech/%s/'%lst[0]
+				qtbin=os.path.join(qtdir,'bin')
+				paths.append(qtbin)
+	cand=None
+	prev_ver=['5','0','0']
+	for qmk in('qmake-qt5','qmake5','qmake'):
+		try:
+			qmake=self.find_program(qmk,path_list=paths)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			try:
+				version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip()
+			except self.errors.WafError:
+				pass
+			else:
+				if version:
+					new_ver=version.split('.')
+					if new_ver>prev_ver:
+						cand=qmake
+						prev_ver=new_ver
+	if not cand:
+		try:
+			self.find_program('qtchooser')
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			cmd=self.env.QTCHOOSER+['-qt=5','-run-tool=qmake']
+			try:
+				version=self.cmd_and_log(cmd+['-query','QT_VERSION'])
+			except self.errors.WafError:
+				pass
+			else:
+				cand=cmd
+	if cand:
+		self.env.QMAKE=cand
+	else:
+		self.fatal('Could not find qmake for qt5')
+	self.env.QT_HOST_BINS=qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_HOST_BINS']).strip()
+	paths.insert(0,qtbin)
+	def find_bin(lst,var):
+		if var in env:
+			return
+		for f in lst:
+			try:
+				ret=self.find_program(f,path_list=paths)
+			except self.errors.ConfigurationError:
+				pass
+			else:
+				env[var]=ret
+				break
+	find_bin(['uic-qt5','uic'],'QT_UIC')
+	if not env.QT_UIC:
+		self.fatal('cannot find the uic compiler for qt5')
+	self.start_msg('Checking for uic version')
+	uicver=self.cmd_and_log(env.QT_UIC+['-version'],output=Context.BOTH)
+	uicver=''.join(uicver).strip()
+	uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','')
+	self.end_msg(uicver)
+	if uicver.find(' 3.')!=-1 or uicver.find(' 4.')!=-1:
+		self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
+	find_bin(['moc-qt5','moc'],'QT_MOC')
+	find_bin(['rcc-qt5','rcc'],'QT_RCC')
+	find_bin(['lrelease-qt5','lrelease'],'QT_LRELEASE')
+	find_bin(['lupdate-qt5','lupdate'],'QT_LUPDATE')
+	env.UIC_ST='%s -o %s'
+	env.MOC_ST='-o'
+	env.ui_PATTERN='ui_%s.h'
+	env.QT_LRELEASE_FLAGS=['-silent']
+	env.MOCCPPPATH_ST='-I%s'
+	env.MOCDEFINES_ST='-D%s'
+@conf
+def set_qt5_libs_dir(self):
+	env=self.env
+	qtlibs=getattr(Options.options,'qtlibs',None)or self.environ.get('QT5_LIBDIR')
+	if not qtlibs:
+		try:
+			qtlibs=self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_LIBS']).strip()
+		except Errors.WafError:
+			qtdir=self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()
+			qtlibs=os.path.join(qtdir,'lib')
+	self.msg('Found the Qt5 libraries in',qtlibs)
+	env.QTLIBS=qtlibs
+@conf
+def find_single_qt5_lib(self,name,uselib,qtlibs,qtincludes,force_static):
+	env=self.env
+	if force_static:
+		exts=('.a','.lib')
+		prefix='STLIB'
+	else:
+		exts=('.so','.lib')
+		prefix='LIB'
+	def lib_names():
+		for x in exts:
+			for k in('','5')if Utils.is_win32 else['']:
+				for p in('lib',''):
+					yield(p,name,k,x)
+	for tup in lib_names():
+		k=''.join(tup)
+		path=os.path.join(qtlibs,k)
+		if os.path.exists(path):
+			if env.DEST_OS=='win32':
+				libval=''.join(tup[:-1])
+			else:
+				libval=name
+			env.append_unique(prefix+'_'+uselib,libval)
+			env.append_unique('%sPATH_%s'%(prefix,uselib),qtlibs)
+			env.append_unique('INCLUDES_'+uselib,qtincludes)
+			env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,name.replace('Qt5','Qt')))
+			return k
+	return False
+@conf
+def find_qt5_libraries(self):
+	env=self.env
+	qtincludes=self.environ.get('QT5_INCLUDES')or self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip()
+	force_static=self.environ.get('QT5_FORCE_STATIC')
+	try:
+		if self.environ.get('QT5_XCOMPILE'):
+			self.fatal('QT5_XCOMPILE Disables pkg-config detection')
+		self.check_cfg(atleast_pkgconfig_version='0.1')
+	except self.errors.ConfigurationError:
+		for i in self.qt5_vars:
+			uselib=i.upper()
+			if Utils.unversioned_sys_platform()=='darwin':
+				fwk=i.replace('Qt5','Qt')
+				frameworkName=fwk+'.framework'
+				qtDynamicLib=os.path.join(env.QTLIBS,frameworkName,fwk)
+				if os.path.exists(qtDynamicLib):
+					env.append_unique('FRAMEWORK_'+uselib,fwk)
+					env.append_unique('FRAMEWORKPATH_'+uselib,env.QTLIBS)
+					self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')
+				else:
+					self.msg('Checking for %s'%i,False,'YELLOW')
+				env.append_unique('INCLUDES_'+uselib,os.path.join(env.QTLIBS,frameworkName,'Headers'))
+			else:
+				ret=self.find_single_qt5_lib(i,uselib,env.QTLIBS,qtincludes,force_static)
+				if not force_static and not ret:
+					ret=self.find_single_qt5_lib(i,uselib,env.QTLIBS,qtincludes,True)
+				self.msg('Checking for %s'%i,ret,'GREEN'if ret else'YELLOW')
+	else:
+		path='%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib'%(self.environ.get('PKG_CONFIG_PATH',''),env.QTLIBS,env.QTLIBS)
+		for i in self.qt5_vars:
+			self.check_cfg(package=i,args='--cflags --libs',mandatory=False,force_static=force_static,pkg_config_path=path)
+@conf
+def simplify_qt5_libs(self):
+	env=self.env
+	def process_lib(vars_,coreval):
+		for d in vars_:
+			var=d.upper()
+			if var=='QTCORE':
+				continue
+			value=env['LIBPATH_'+var]
+			if value:
+				core=env[coreval]
+				accu=[]
+				for lib in value:
+					if lib in core:
+						continue
+					accu.append(lib)
+				env['LIBPATH_'+var]=accu
+	process_lib(self.qt5_vars,'LIBPATH_QTCORE')
+@conf
+def add_qt5_rpath(self):
+	env=self.env
+	if getattr(Options.options,'want_rpath',False):
+		def process_rpath(vars_,coreval):
+			for d in vars_:
+				var=d.upper()
+				value=env['LIBPATH_'+var]
+				if value:
+					core=env[coreval]
+					accu=[]
+					for lib in value:
+						if var!='QTCORE':
+							if lib in core:
+								continue
+						accu.append('-Wl,--rpath='+lib)
+					env['RPATH_'+var]=accu
+		process_rpath(self.qt5_vars,'LIBPATH_QTCORE')
+@conf
+def set_qt5_libs_to_check(self):
+	self.qt5_vars=Utils.to_list(getattr(self,'qt5_vars',[]))
+	if not self.qt5_vars:
+		dirlst=Utils.listdir(self.env.QTLIBS)
+		pat=self.env.cxxshlib_PATTERN
+		if Utils.is_win32:
+			pat=pat.replace('.dll','.lib')
+		if self.environ.get('QT5_FORCE_STATIC'):
+			pat=self.env.cxxstlib_PATTERN
+		if Utils.unversioned_sys_platform()=='darwin':
+			pat=r"%s\.framework"
+		re_qt=re.compile(pat%'Qt5?(?P<name>.*)'+'$')
+		for x in dirlst:
+			m=re_qt.match(x)
+			if m:
+				self.qt5_vars.append("Qt5%s"%m.group('name'))
+		if not self.qt5_vars:
+			self.fatal('cannot find any Qt5 library (%r)'%self.env.QTLIBS)
+	qtextralibs=getattr(Options.options,'qtextralibs',None)
+	if qtextralibs:
+		self.qt5_vars.extend(qtextralibs.split(','))
+@conf
+def set_qt5_defines(self):
+	if sys.platform!='win32':
+		return
+	for x in self.qt5_vars:
+		y=x.replace('Qt5','Qt')[2:].upper()
+		self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y)
+def options(opt):
+	opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries')
+	for i in'qtdir qtbin qtlibs'.split():
+		opt.add_option('--'+i,type='string',default='',dest=i)
+	opt.add_option('--translate',action='store_true',help='collect translation strings',dest='trans_qt5',default=False)
+	opt.add_option('--qtextralibs',type='string',default='',dest='qtextralibs',help='additional qt libraries on the system to add to default ones, comma separated')
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/ruby.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/ruby.py
@@ -1,11 +1,13 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import os
-from waflib import Task,Options,Utils
-from waflib.TaskGen import before_method,feature,after_method,Task,extension
+from waflib import Errors,Options,Task,Utils
+from waflib.TaskGen import before_method,feature,extension
 from waflib.Configure import conf
+@feature('rubyext')
+@before_method('apply_incpaths','process_source','apply_bundle','apply_link')
 def init_rubyext(self):
 	self.install_path='${ARCHDIR_RUBY}'
 	self.uselib=self.to_list(getattr(self,'uselib',''))
@@ -13,31 +15,29 @@ def init_rubyext(self):
 		self.uselib.append('RUBY')
 	if not'RUBYEXT'in self.uselib:
 		self.uselib.append('RUBYEXT')
+@feature('rubyext')
+@before_method('apply_link','propagate_uselib_vars')
 def apply_ruby_so_name(self):
-	self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN']
+	self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.rubyext_PATTERN
+@conf
 def check_ruby_version(self,minver=()):
-	if Options.options.rubybinary:
-		self.env.RUBY=Options.options.rubybinary
-	else:
-		self.find_program('ruby',var='RUBY')
-	ruby=self.env.RUBY
+	ruby=self.find_program('ruby',var='RUBY',value=Options.options.rubybinary)
 	try:
-		version=self.cmd_and_log([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
-	except:
+		version=self.cmd_and_log(ruby+['-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
+	except Errors.WafError:
 		self.fatal('could not determine ruby version')
 	self.env.RUBY_VERSION=version
 	try:
-		ver=tuple(map(int,version.split(".")))
-	except:
+		ver=tuple(map(int,version.split('.')))
+	except Errors.WafError:
 		self.fatal('unsupported ruby version %r'%version)
 	cver=''
 	if minver:
+		cver='> '+'.'.join(str(x)for x in minver)
 		if ver<minver:
 			self.fatal('ruby is too old %r'%ver)
-		cver='.'.join([str(x)for x in minver])
-	else:
-		cver=ver
-	self.msg('Checking for ruby version %s'%str(minver or''),cver)
+	self.msg('Checking for ruby version %s'%cver,version)
+@conf
 def check_ruby_ext_devel(self):
 	if not self.env.RUBY:
 		self.fatal('ruby detection is required first')
@@ -45,17 +45,17 @@ def check_ruby_ext_devel(self):
 		self.fatal('load a c/c++ compiler first')
 	version=tuple(map(int,self.env.RUBY_VERSION.split(".")))
 	def read_out(cmd):
-		return Utils.to_list(self.cmd_and_log([self.env.RUBY,'-rrbconfig','-e',cmd]))
+		return Utils.to_list(self.cmd_and_log(self.env.RUBY+['-rrbconfig','-e',cmd]))
 	def read_config(key):
-		return read_out('puts Config::CONFIG[%r]'%key)
-	ruby=self.env['RUBY']
-	archdir=read_config('archdir')
-	cpppath=archdir
+		return read_out('puts RbConfig::CONFIG[%r]'%key)
+	cpppath=archdir=read_config('archdir')
 	if version>=(1,9,0):
 		ruby_hdrdir=read_config('rubyhdrdir')
 		cpppath+=ruby_hdrdir
+		if version>=(2,0,0):
+			cpppath+=read_config('rubyarchhdrdir')
 		cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])]
-	self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file')
+	self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file',link_header_test=False)
 	self.env.LIBPATH_RUBYEXT=read_config('libdir')
 	self.env.LIBPATH_RUBYEXT+=archdir
 	self.env.INCLUDES_RUBYEXT=cpppath
@@ -77,28 +77,21 @@ def check_ruby_ext_devel(self):
 		self.env.LIBDIR_RUBY=Options.options.rubylibdir
 	else:
 		self.env.LIBDIR_RUBY=read_config('sitelibdir')[0]
+@conf
 def check_ruby_module(self,module_name):
 	self.start_msg('Ruby module %s'%module_name)
 	try:
-		self.cmd_and_log([self.env['RUBY'],'-e','require \'%s\';puts 1'%module_name])
-	except:
+		self.cmd_and_log(self.env.RUBY+['-e','require \'%s\';puts 1'%module_name])
+	except Errors.WafError:
 		self.end_msg(False)
 		self.fatal('Could not find the ruby module %r'%module_name)
 	self.end_msg(True)
+@extension('.rb')
 def process(self,node):
-	tsk=self.create_task('run_ruby',node)
+	return self.create_task('run_ruby',node)
 class run_ruby(Task.Task):
 	run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
 def options(opt):
 	opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files')
 	opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path')
 	opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary')
-
-feature('rubyext')(init_rubyext)
-before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link')(init_rubyext)
-feature('rubyext')(apply_ruby_so_name)
-before_method('apply_link','propagate_uselib')(apply_ruby_so_name)
-conf(check_ruby_version)
-conf(check_ruby_ext_devel)
-conf(check_ruby_module)
-extension('.rb')(process)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/suncc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/suncc.py
@@ -1,47 +1,44 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os
-from waflib import Utils
+from waflib import Errors
 from waflib.Tools import ccroot,ar
 from waflib.Configure import conf
+@conf
 def find_scc(conf):
 	v=conf.env
-	cc=None
-	if v['CC']:cc=v['CC']
-	elif'CC'in conf.environ:cc=conf.environ['CC']
-	if not cc:cc=conf.find_program('cc',var='CC')
-	if not cc:conf.fatal('Could not find a Sun C compiler')
-	cc=conf.cmd_to_list(cc)
+	cc=conf.find_program('cc',var='CC')
 	try:
 		conf.cmd_and_log(cc+['-flags'])
-	except:
+	except Errors.WafError:
 		conf.fatal('%r is not a Sun compiler'%cc)
-	v['CC']=cc
-	v['CC_NAME']='sun'
+	v.CC_NAME='sun'
+	conf.get_suncc_version(cc)
+@conf
 def scc_common_flags(conf):
 	v=conf.env
-	v['CC_SRC_F']=[]
-	v['CC_TGT_F']=['-c','-o']
-	if not v['LINK_CC']:v['LINK_CC']=v['CC']
-	v['CCLNK_SRC_F']=''
-	v['CCLNK_TGT_F']=['-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['SONAME_ST']='-Wl,-h,%s'
-	v['SHLIB_MARKER']='-Bdynamic'
-	v['STLIB_MARKER']='-Bstatic'
-	v['cprogram_PATTERN']='%s'
-	v['CFLAGS_cshlib']=['-Kpic','-DPIC']
-	v['LINKFLAGS_cshlib']=['-G']
-	v['cshlib_PATTERN']='lib%s.so'
-	v['LINKFLAGS_cstlib']=['-Bstatic']
-	v['cstlib_PATTERN']='lib%s.a'
+	v.CC_SRC_F=[]
+	v.CC_TGT_F=['-c','-o','']
+	if not v.LINK_CC:
+		v.LINK_CC=v.CC
+	v.CCLNK_SRC_F=''
+	v.CCLNK_TGT_F=['-o','']
+	v.CPPPATH_ST='-I%s'
+	v.DEFINES_ST='-D%s'
+	v.LIB_ST='-l%s'
+	v.LIBPATH_ST='-L%s'
+	v.STLIB_ST='-l%s'
+	v.STLIBPATH_ST='-L%s'
+	v.SONAME_ST='-Wl,-h,%s'
+	v.SHLIB_MARKER='-Bdynamic'
+	v.STLIB_MARKER='-Bstatic'
+	v.cprogram_PATTERN='%s'
+	v.CFLAGS_cshlib=['-xcode=pic32','-DPIC']
+	v.LINKFLAGS_cshlib=['-G']
+	v.cshlib_PATTERN='lib%s.so'
+	v.LINKFLAGS_cstlib=['-Bstatic']
+	v.cstlib_PATTERN='lib%s.a'
 def configure(conf):
 	conf.find_scc()
 	conf.find_ar()
@@ -49,6 +46,3 @@ def configure(conf):
 	conf.cc_load_tools()
 	conf.cc_add_flags()
 	conf.link_add_flags()
-
-conf(find_scc)
-conf(scc_common_flags)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/suncxx.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/suncxx.py
@@ -1,48 +1,44 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os
-from waflib import Utils
+from waflib import Errors
 from waflib.Tools import ccroot,ar
 from waflib.Configure import conf
+@conf
 def find_sxx(conf):
 	v=conf.env
-	cc=None
-	if v['CXX']:cc=v['CXX']
-	elif'CXX'in conf.environ:cc=conf.environ['CXX']
-	if not cc:cc=conf.find_program('CC',var='CXX')
-	if not cc:cc=conf.find_program('c++',var='CXX')
-	if not cc:conf.fatal('Could not find a Sun C++ compiler')
-	cc=conf.cmd_to_list(cc)
+	cc=conf.find_program(['CC','c++'],var='CXX')
 	try:
 		conf.cmd_and_log(cc+['-flags'])
-	except:
+	except Errors.WafError:
 		conf.fatal('%r is not a Sun compiler'%cc)
-	v['CXX']=cc
-	v['CXX_NAME']='sun'
+	v.CXX_NAME='sun'
+	conf.get_suncc_version(cc)
+@conf
 def sxx_common_flags(conf):
 	v=conf.env
-	v['CXX_SRC_F']=[]
-	v['CXX_TGT_F']=['-c','-o']
-	if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
-	v['CXXLNK_SRC_F']=[]
-	v['CXXLNK_TGT_F']=['-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['SONAME_ST']='-Wl,-h,%s'
-	v['SHLIB_MARKER']='-Bdynamic'
-	v['STLIB_MARKER']='-Bstatic'
-	v['cxxprogram_PATTERN']='%s'
-	v['CXXFLAGS_cxxshlib']=['-Kpic','-DPIC']
-	v['LINKFLAGS_cxxshlib']=['-G']
-	v['cxxshlib_PATTERN']='lib%s.so'
-	v['LINKFLAGS_cxxstlib']=['-Bstatic']
-	v['cxxstlib_PATTERN']='lib%s.a'
+	v.CXX_SRC_F=[]
+	v.CXX_TGT_F=['-c','-o','']
+	if not v.LINK_CXX:
+		v.LINK_CXX=v.CXX
+	v.CXXLNK_SRC_F=[]
+	v.CXXLNK_TGT_F=['-o','']
+	v.CPPPATH_ST='-I%s'
+	v.DEFINES_ST='-D%s'
+	v.LIB_ST='-l%s'
+	v.LIBPATH_ST='-L%s'
+	v.STLIB_ST='-l%s'
+	v.STLIBPATH_ST='-L%s'
+	v.SONAME_ST='-Wl,-h,%s'
+	v.SHLIB_MARKER='-Bdynamic'
+	v.STLIB_MARKER='-Bstatic'
+	v.cxxprogram_PATTERN='%s'
+	v.CXXFLAGS_cxxshlib=['-xcode=pic32','-DPIC']
+	v.LINKFLAGS_cxxshlib=['-G']
+	v.cxxshlib_PATTERN='lib%s.so'
+	v.LINKFLAGS_cxxstlib=['-Bstatic']
+	v.cxxstlib_PATTERN='lib%s.a'
 def configure(conf):
 	conf.find_sxx()
 	conf.find_ar()
@@ -50,6 +46,3 @@ def configure(conf):
 	conf.cxx_load_tools()
 	conf.cxx_add_flags()
 	conf.link_add_flags()
-
-conf(find_sxx)
-conf(sxx_common_flags)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/tex.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/tex.py
@@ -1,33 +1,36 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 import os,re
-from waflib import Utils,Task,Errors
+from waflib import Utils,Task,Errors,Logs,Node
 from waflib.TaskGen import feature,before_method
-from waflib.Logs import error,warn,debug
 re_bibunit=re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
 def bibunitscan(self):
 	node=self.inputs[0]
 	nodes=[]
-	if not node:return nodes
-	code=Utils.readf(node.abspath())
+	if not node:
+		return nodes
+	code=node.read()
 	for match in re_bibunit.finditer(code):
 		path=match.group('file')
 		if path:
-			for k in['','.bib']:
-				debug('tex: trying %s%s'%(path,k))
+			found=None
+			for k in('','.bib'):
+				Logs.debug('tex: trying %s%s',path,k)
 				fi=node.parent.find_resource(path+k)
 				if fi:
+					found=True
 					nodes.append(fi)
-			else:
-				debug('tex: could not find %s'%path)
-	debug("tex: found the following bibunit files: %s"%nodes)
+			if not found:
+				Logs.debug('tex: could not find %s',path)
+	Logs.debug('tex: found the following bibunit files: %s',nodes)
 	return nodes
-exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps']
+exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps','.sty']
 exts_tex=['.ltx','.tex']
-re_tex=re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
+re_tex=re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
 g_bibtex_re=re.compile('bibdata',re.M)
+g_glossaries_re=re.compile('\\@newglossary',re.M)
 class tex(Task.Task):
 	bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False)
 	bibtex_fun.__doc__="""
@@ -37,6 +40,14 @@ class tex(Task.Task):
 	makeindex_fun.__doc__="""
 	Execute the program **makeindex**
 	"""
+	makeglossaries_fun,_=Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',shell=False)
+	makeglossaries_fun.__doc__="""
+	Execute the program **makeglossaries**
+	"""
+	def exec_command(self,cmd,**kw):
+		if self.env.PROMPT_LATEX:
+			kw['stdout']=kw['stderr']=None
+		return super(tex,self).exec_command(cmd,**kw)
 	def scan_aux(self,node):
 		nodes=[node]
 		re_aux=re.compile(r'\\@input{(?P<file>[^{}]*)}',re.M)
@@ -46,7 +57,7 @@ class tex(Task.Task):
 				path=match.group('file')
 				found=node.parent.find_or_declare(path)
 				if found and found not in nodes:
-					debug('tex: found aux node '+found.abspath())
+					Logs.debug('tex: found aux node %r',found)
 					nodes.append(found)
 					parse_node(found)
 		parse_node(node)
@@ -56,122 +67,171 @@ class tex(Task.Task):
 		nodes=[]
 		names=[]
 		seen=[]
-		if not node:return(nodes,names)
+		if not node:
+			return(nodes,names)
 		def parse_node(node):
 			if node in seen:
 				return
 			seen.append(node)
 			code=node.read()
-			global re_tex
 			for match in re_tex.finditer(code):
+				multibib=match.group('type')
+				if multibib and multibib.startswith('bibliography'):
+					multibib=multibib[len('bibliography'):]
+					if multibib.startswith('style'):
+						continue
+				else:
+					multibib=None
 				for path in match.group('file').split(','):
 					if path:
 						add_name=True
 						found=None
 						for k in exts_deps_tex:
-							debug('tex: trying %s%s'%(path,k))
-							found=node.parent.find_resource(path+k)
-							if found and not found in self.outputs:
+							for up in self.texinputs_nodes:
+								Logs.debug('tex: trying %s%s',path,k)
+								found=up.find_resource(path+k)
+								if found:
+									break
+							for tsk in self.generator.tasks:
+								if not found or found in tsk.outputs:
+									break
+							else:
 								nodes.append(found)
 								add_name=False
 								for ext in exts_tex:
 									if found.name.endswith(ext):
 										parse_node(found)
 										break
+							if found and multibib and found.name.endswith('.bib'):
+								try:
+									self.multibibs.append(found)
+								except AttributeError:
+									self.multibibs=[found]
 						if add_name:
 							names.append(path)
 		parse_node(node)
 		for x in nodes:
 			x.parent.get_bld().mkdir()
-		debug("tex: found the following : %s and names %s"%(nodes,names))
+		Logs.debug("tex: found the following : %s and names %s",nodes,names)
 		return(nodes,names)
 	def check_status(self,msg,retcode):
 		if retcode!=0:
-			raise Errors.WafError("%r command exit status %r"%(msg,retcode))
-	def bibfile(self):
-		need_bibtex=False
+			raise Errors.WafError('%r command exit status %r'%(msg,retcode))
+	def info(self,*k,**kw):
 		try:
-			for aux_node in self.aux_nodes:
+			info=self.generator.bld.conf.logger.info
+		except AttributeError:
+			info=Logs.info
+		info(*k,**kw)
+	def bibfile(self):
+		for aux_node in self.aux_nodes:
+			try:
 				ct=aux_node.read()
-				if g_bibtex_re.findall(ct):
-					need_bibtex=True
-					break
-		except(OSError,IOError):
-			error('error bibtex scan')
-		else:
-			if need_bibtex:
-				warn('calling bibtex')
+			except EnvironmentError:
+				Logs.error('Error reading %s: %r',aux_node.abspath())
+				continue
+			if g_bibtex_re.findall(ct):
+				self.info('calling bibtex')
 				self.env.env={}
 				self.env.env.update(os.environ)
-				self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS})
-				self.env.SRCFILE=self.aux_nodes[0].name[:-4]
+				self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()})
+				self.env.SRCFILE=aux_node.name[:-4]
 				self.check_status('error when calling bibtex',self.bibtex_fun())
+		for node in getattr(self,'multibibs',[]):
+			self.env.env={}
+			self.env.env.update(os.environ)
+			self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()})
+			self.env.SRCFILE=node.name[:-4]
+			self.check_status('error when calling bibtex',self.bibtex_fun())
 	def bibunits(self):
 		try:
 			bibunits=bibunitscan(self)
-		except FSError:
-			error('error bibunitscan')
+		except OSError:
+			Logs.error('error bibunitscan')
 		else:
 			if bibunits:
-				fn=['bu'+str(i)for i in xrange(1,len(bibunits)+1)]
+				fn=['bu'+str(i)for i in range(1,len(bibunits)+1)]
 				if fn:
-					warn('calling bibtex on bibunits')
+					self.info('calling bibtex on bibunits')
 				for f in fn:
-					self.env.env={'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}
+					self.env.env={'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}
 					self.env.SRCFILE=f
 					self.check_status('error when calling bibtex',self.bibtex_fun())
 	def makeindex(self):
+		self.idx_node=self.inputs[0].change_ext('.idx')
 		try:
 			idx_path=self.idx_node.abspath()
 			os.stat(idx_path)
 		except OSError:
-			warn('index file %s absent, not calling makeindex'%idx_path)
+			self.info('index file %s absent, not calling makeindex',idx_path)
 		else:
-			warn('calling makeindex')
+			self.info('calling makeindex')
 			self.env.SRCFILE=self.idx_node.name
 			self.env.env={}
 			self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun())
+	def bibtopic(self):
+		p=self.inputs[0].parent.get_bld()
+		if os.path.exists(os.path.join(p.abspath(),'btaux.aux')):
+			self.aux_nodes+=p.ant_glob('*[0-9].aux')
+	def makeglossaries(self):
+		src_file=self.inputs[0].abspath()
+		base_file=os.path.basename(src_file)
+		base,_=os.path.splitext(base_file)
+		for aux_node in self.aux_nodes:
+			try:
+				ct=aux_node.read()
+			except EnvironmentError:
+				Logs.error('Error reading %s: %r',aux_node.abspath())
+				continue
+			if g_glossaries_re.findall(ct):
+				if not self.env.MAKEGLOSSARIES:
+					raise Errors.WafError("The program 'makeglossaries' is missing!")
+				Logs.warn('calling makeglossaries')
+				self.env.SRCFILE=base
+				self.check_status('error when calling makeglossaries %s'%base,self.makeglossaries_fun())
+				return
+	def texinputs(self):
+		return os.pathsep.join([k.abspath()for k in self.texinputs_nodes])+os.pathsep
 	def run(self):
 		env=self.env
-		if not env['PROMPT_LATEX']:
+		if not env.PROMPT_LATEX:
 			env.append_value('LATEXFLAGS','-interaction=batchmode')
 			env.append_value('PDFLATEXFLAGS','-interaction=batchmode')
 			env.append_value('XELATEXFLAGS','-interaction=batchmode')
-		fun=self.texfun
-		node=self.inputs[0]
-		srcfile=node.abspath()
-		texinputs=self.env.TEXINPUTS or''
-		self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep
-		self.aux_node=node.change_ext('.aux')
-		self.cwd=self.inputs[0].parent.get_bld().abspath()
-		warn('first pass on %s'%self.__class__.__name__)
-		self.env.env={}
-		self.env.env.update(os.environ)
-		self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
-		self.env.SRCFILE=srcfile
-		self.check_status('error when calling latex',fun())
-		self.aux_nodes=self.scan_aux(node.change_ext('.aux'))
-		self.idx_node=node.change_ext('.idx')
+		self.cwd=self.inputs[0].parent.get_bld()
+		self.info('first pass on %s',self.__class__.__name__)
+		cur_hash=self.hash_aux_nodes()
+		self.call_latex()
+		self.hash_aux_nodes()
+		self.bibtopic()
 		self.bibfile()
 		self.bibunits()
 		self.makeindex()
-		hash=''
+		self.makeglossaries()
 		for i in range(10):
-			prev_hash=hash
-			try:
-				hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes]
-				hash=Utils.h_list(hashes)
-			except(OSError,IOError):
-				error('could not read aux.h')
-				pass
-			if hash and hash==prev_hash:
+			prev_hash=cur_hash
+			cur_hash=self.hash_aux_nodes()
+			if not cur_hash:
+				Logs.error('No aux.h to process')
+			if cur_hash and cur_hash==prev_hash:
 				break
-			warn('calling %s'%self.__class__.__name__)
-			self.env.env={}
-			self.env.env.update(os.environ)
-			self.env.env.update({'TEXINPUTS':self.TEXINPUTS})
-			self.env.SRCFILE=srcfile
-			self.check_status('error when calling %s'%self.__class__.__name__,fun())
+			self.info('calling %s',self.__class__.__name__)
+			self.call_latex()
+	def hash_aux_nodes(self):
+		try:
+			self.aux_nodes
+		except AttributeError:
+			try:
+				self.aux_nodes=self.scan_aux(self.inputs[0].change_ext('.aux'))
+			except IOError:
+				return None
+		return Utils.h_list([Utils.h_file(x.abspath())for x in self.aux_nodes])
+	def call_latex(self):
+		self.env.env={}
+		self.env.env.update(os.environ)
+		self.env.env.update({'TEXINPUTS':self.texinputs()})
+		self.env.SRCFILE=self.inputs[0].abspath()
+		self.check_status('error when calling latex',self.texfun())
 class latex(tex):
 	texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False)
 class pdflatex(tex):
@@ -190,18 +250,31 @@ class pdf2ps(Task.Task):
 	run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
 	color='BLUE'
 	after=['latex','pdflatex','xelatex']
+@feature('tex')
+@before_method('process_source')
 def apply_tex(self):
-	if not getattr(self,'type',None)in['latex','pdflatex','xelatex']:
+	if not getattr(self,'type',None)in('latex','pdflatex','xelatex'):
 		self.type='pdflatex'
-	tree=self.bld
 	outs=Utils.to_list(getattr(self,'outs',[]))
-	self.env['PROMPT_LATEX']=getattr(self,'prompt',1)
+	try:
+		self.generator.bld.conf
+	except AttributeError:
+		default_prompt=False
+	else:
+		default_prompt=True
+	self.env.PROMPT_LATEX=getattr(self,'prompt',default_prompt)
 	deps_lst=[]
 	if getattr(self,'deps',None):
 		deps=self.to_list(self.deps)
-		for filename in deps:
-			n=self.path.find_resource(filename)
-			if not n in deps_lst:deps_lst.append(n)
+		for dep in deps:
+			if isinstance(dep,str):
+				n=self.path.find_resource(dep)
+				if not n:
+					self.bld.fatal('Could not find %r for %r'%(dep,self))
+				if not n in deps_lst:
+					deps_lst.append(n)
+			elif isinstance(dep,Node.Node):
+				deps_lst.append(dep)
 	for node in self.to_nodes(self.source):
 		if self.type=='latex':
 			task=self.create_task('latex',node,node.change_ext('.dvi'))
@@ -211,32 +284,44 @@ def apply_tex(self):
 			task=self.create_task('xelatex',node,node.change_ext('.pdf'))
 		task.env=self.env
 		if deps_lst:
-			try:
-				lst=tree.node_deps[task.uid()]
-				for n in deps_lst:
-					if not n in lst:
-						lst.append(n)
-			except KeyError:
-				tree.node_deps[task.uid()]=deps_lst
+			for n in deps_lst:
+				if not n in task.dep_nodes:
+					task.dep_nodes.append(n)
+		if hasattr(self,'texinputs_nodes'):
+			task.texinputs_nodes=self.texinputs_nodes
+		else:
+			task.texinputs_nodes=[node.parent,node.parent.get_bld(),self.path,self.path.get_bld()]
+			lst=os.environ.get('TEXINPUTS','')
+			if self.env.TEXINPUTS:
+				lst+=os.pathsep+self.env.TEXINPUTS
+			if lst:
+				lst=lst.split(os.pathsep)
+			for x in lst:
+				if x:
+					if os.path.isabs(x):
+						p=self.bld.root.find_node(x)
+						if p:
+							task.texinputs_nodes.append(p)
+						else:
+							Logs.error('Invalid TEXINPUTS folder %s',x)
+					else:
+						Logs.error('Cannot resolve relative paths in TEXINPUTS %s',x)
 		if self.type=='latex':
 			if'ps'in outs:
 				tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps'))
-				tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()}
+				tsk.env.env=dict(os.environ)
 			if'pdf'in outs:
 				tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf'))
-				tsk.env.env={'TEXINPUTS':node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()}
+				tsk.env.env=dict(os.environ)
 		elif self.type=='pdflatex':
 			if'ps'in outs:
 				self.create_task('pdf2ps',task.outputs,node.change_ext('.ps'))
 	self.source=[]
 def configure(self):
 	v=self.env
-	for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+	for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
 		try:
 			self.find_program(p,var=p.upper())
 		except self.errors.ConfigurationError:
 			pass
-	v['DVIPSFLAGS']='-Ppdf'
-
-feature('tex')(apply_tex)
-before_method('process_source')(apply_tex)
\ No newline at end of file
+	v.DVIPSFLAGS='-Ppdf'
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/vala.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/vala.py
@@ -1,202 +1,207 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os.path,shutil,re
-from waflib import Context,Task,Utils,Logs,Options,Errors
-from waflib.TaskGen import extension
+import re
+from waflib import Build,Context,Errors,Logs,Node,Options,Task,Utils
+from waflib.TaskGen import extension,taskgen_method
 from waflib.Configure import conf
 class valac(Task.Task):
 	vars=["VALAC","VALAC_VERSION","VALAFLAGS"]
 	ext_out=['.h']
 	def run(self):
-		env=self.env
-		cmd=[env['VALAC'],'-C','--quiet']
-		cmd.extend(Utils.to_list(env['VALAFLAGS']))
-		if self.threading:
-			cmd.append('--thread')
-		if self.profile:
-			cmd.append('--profile=%s'%self.profile)
-		if self.target_glib:
-			cmd.append('--target-glib=%s'%self.target_glib)
-		if self.is_lib:
-			cmd.append('--library='+self.target)
-			for x in self.outputs:
-				if x.name.endswith('.h'):
-					cmd.append('--header='+x.name)
-			if self.gir:
-				cmd.append('--gir=%s.gir'%self.gir)
-		for vapi_dir in self.vapi_dirs:
-			cmd.append('--vapidir=%s'%vapi_dir)
-		for package in self.packages:
-			cmd.append('--pkg=%s'%package)
-		for package in self.packages_private:
-			cmd.append('--pkg=%s'%package)
-		for define in self.vala_defines:
-			cmd.append('--define=%s'%define)
-		cmd.extend([a.abspath()for a in self.inputs])
-		ret=self.exec_command(cmd,cwd=self.outputs[0].parent.abspath())
+		cmd=self.env.VALAC+self.env.VALAFLAGS
+		resources=getattr(self,'vala_exclude',[])
+		cmd.extend([a.abspath()for a in self.inputs if a not in resources])
+		ret=self.exec_command(cmd,cwd=self.vala_dir_node.abspath())
 		if ret:
 			return ret
-		for x in self.outputs:
-			if id(x.parent)!=id(self.outputs[0].parent):
-				shutil.move(self.outputs[0].parent.abspath()+os.sep+x.name,x.abspath())
-		if self.packages and getattr(self,'deps_node',None):
-			self.deps_node.write('\n'.join(self.packages))
+		if self.generator.dump_deps_node:
+			self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
 		return ret
-def vala_file(self,node):
-	valatask=getattr(self,"valatask",None)
-	if not valatask:
-		def _get_api_version():
-			api_version='1.0'
-			if hasattr(Context.g_module,'API_VERSION'):
-				version=Context.g_module.API_VERSION.split(".")
-				if version[0]=="0":
-					api_version="0."+version[1]
-				else:
-					api_version=version[0]+".0"
-			return api_version
-		valatask=self.create_task('valac')
-		self.valatask=valatask
-		self.includes=Utils.to_list(getattr(self,'includes',[]))
-		self.uselib=self.to_list(getattr(self,'uselib',[]))
-		valatask.packages=[]
-		valatask.packages_private=Utils.to_list(getattr(self,'packages_private',[]))
-		valatask.vapi_dirs=[]
-		valatask.target=self.target
-		valatask.threading=False
-		valatask.install_path=getattr(self,'install_path','')
-		valatask.profile=getattr(self,'profile','gobject')
-		valatask.vala_defines=getattr(self,'vala_defines',[])
-		valatask.target_glib=None
-		valatask.gir=getattr(self,'gir',None)
-		valatask.gir_path=getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0')
-		valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi')
-		valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE'])
-		valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version()))
-		valatask.install_binding=getattr(self,'install_binding',True)
-		valatask.is_lib=False
-		if not'cprogram'in self.features:
-			valatask.is_lib=True
-		packages=Utils.to_list(getattr(self,'packages',[]))
-		vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[]))
-		includes=[]
-		if hasattr(self,'use'):
-			local_packages=Utils.to_list(self.use)[:]
-			seen=[]
-			while len(local_packages)>0:
-				package=local_packages.pop()
-				if package in seen:
-					continue
-				seen.append(package)
-				try:
-					package_obj=self.bld.get_tgen_by_name(package)
-				except Errors.WafError:
-					continue
-				package_name=package_obj.target
-				package_node=package_obj.path
-				package_dir=package_node.path_from(self.path)
-				for task in package_obj.tasks:
-					for output in task.outputs:
-						if output.name==package_name+".vapi":
-							valatask.set_run_after(task)
-							if package_name not in packages:
-								packages.append(package_name)
-							if package_dir not in vapi_dirs:
-								vapi_dirs.append(package_dir)
-							if package_dir not in includes:
-								includes.append(package_dir)
-				if hasattr(package_obj,'use'):
-					lst=self.to_list(package_obj.use)
-					lst.reverse()
-					local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages
-		valatask.packages=packages
-		for vapi_dir in vapi_dirs:
-			try:
-				valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
-				valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).get_bld().abspath())
-			except AttributeError:
-				Logs.warn("Unable to locate Vala API directory: '%s'"%vapi_dir)
-		self.includes.append(self.bld.srcnode.abspath())
-		self.includes.append(self.bld.bldnode.abspath())
-		for include in includes:
+@taskgen_method
+def init_vala_task(self):
+	self.profile=getattr(self,'profile','gobject')
+	self.packages=packages=Utils.to_list(getattr(self,'packages',[]))
+	self.use=Utils.to_list(getattr(self,'use',[]))
+	if packages and not self.use:
+		self.use=packages[:]
+	if self.profile=='gobject':
+		if not'GOBJECT'in self.use:
+			self.use.append('GOBJECT')
+	def addflags(flags):
+		self.env.append_value('VALAFLAGS',flags)
+	if self.profile:
+		addflags('--profile=%s'%self.profile)
+	valatask=self.valatask
+	if hasattr(self,'vala_dir'):
+		if isinstance(self.vala_dir,str):
+			valatask.vala_dir_node=self.path.get_bld().make_node(self.vala_dir)
 			try:
-				self.includes.append(self.path.find_dir(include).abspath())
-				self.includes.append(self.path.find_dir(include).get_bld().abspath())
-			except AttributeError:
-				Logs.warn("Unable to locate include directory: '%s'"%include)
-		if valatask.profile=='gobject':
-			if hasattr(self,'target_glib'):
-				Logs.warn('target_glib on vala tasks is not supported --vala-target-glib=MAJOR.MINOR from the vala tool options')
-			if getattr(Options.options,'vala_target_glib',None):
-				valatask.target_glib=Options.options.vala_target_glib
-			if not'GOBJECT'in self.uselib:
-				self.uselib.append('GOBJECT')
-		if hasattr(self,'threading'):
-			if valatask.profile=='gobject':
-				valatask.threading=self.threading
-				if not'GTHREAD'in self.uselib:
-					self.uselib.append('GTHREAD')
+				valatask.vala_dir_node.mkdir()
+			except OSError:
+				raise self.bld.fatal('Cannot create the vala dir %r'%valatask.vala_dir_node)
+		else:
+			valatask.vala_dir_node=self.vala_dir
+	else:
+		valatask.vala_dir_node=self.path.get_bld()
+	addflags('--directory=%s'%valatask.vala_dir_node.abspath())
+	if hasattr(self,'thread'):
+		if self.profile=='gobject':
+			if not'GTHREAD'in self.use:
+				self.use.append('GTHREAD')
+		else:
+			Logs.warn('Profile %s means no threading support',self.profile)
+			self.thread=False
+		if self.thread:
+			addflags('--thread')
+	self.is_lib='cprogram'not in self.features
+	if self.is_lib:
+		addflags('--library=%s'%self.target)
+		h_node=valatask.vala_dir_node.find_or_declare('%s.h'%self.target)
+		valatask.outputs.append(h_node)
+		addflags('--header=%s'%h_node.name)
+		valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi'%self.target))
+		if getattr(self,'gir',None):
+			gir_node=valatask.vala_dir_node.find_or_declare('%s.gir'%self.gir)
+			addflags('--gir=%s'%gir_node.name)
+			valatask.outputs.append(gir_node)
+	self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None))
+	if self.vala_target_glib:
+		addflags('--target-glib=%s'%self.vala_target_glib)
+	addflags(['--define=%s'%x for x in Utils.to_list(getattr(self,'vala_defines',[]))])
+	packages_private=Utils.to_list(getattr(self,'packages_private',[]))
+	addflags(['--pkg=%s'%x for x in packages_private])
+	def _get_api_version():
+		api_version='1.0'
+		if hasattr(Context.g_module,'API_VERSION'):
+			version=Context.g_module.API_VERSION.split(".")
+			if version[0]=="0":
+				api_version="0."+version[1]
 			else:
-				Logs.warn("Profile %s does not have threading support"%valatask.profile)
-		if valatask.is_lib:
-			valatask.outputs.append(self.path.find_or_declare('%s.h'%self.target))
-			valatask.outputs.append(self.path.find_or_declare('%s.vapi'%self.target))
-			if valatask.gir:
-				valatask.outputs.append(self.path.find_or_declare('%s.gir'%self.gir))
-			if valatask.packages:
-				d=self.path.find_or_declare('%s.deps'%self.target)
-				valatask.outputs.append(d)
-				valatask.deps_node=d
+				api_version=version[0]+".0"
+		return api_version
+	self.includes=Utils.to_list(getattr(self,'includes',[]))
+	valatask.install_path=getattr(self,'install_path','')
+	valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi')
+	valatask.pkg_name=getattr(self,'pkg_name',self.env.PACKAGE)
+	valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version()))
+	valatask.install_binding=getattr(self,'install_binding',True)
+	self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[]))
+	if hasattr(self,'use'):
+		local_packages=Utils.to_list(self.use)[:]
+		seen=[]
+		while len(local_packages)>0:
+			package=local_packages.pop()
+			if package in seen:
+				continue
+			seen.append(package)
+			try:
+				package_obj=self.bld.get_tgen_by_name(package)
+			except Errors.WafError:
+				continue
+			package_obj.post()
+			package_name=package_obj.target
+			task=getattr(package_obj,'valatask',None)
+			if task:
+				for output in task.outputs:
+					if output.name==package_name+".vapi":
+						valatask.set_run_after(task)
+						if package_name not in packages:
+							packages.append(package_name)
+						if output.parent not in vapi_dirs:
+							vapi_dirs.append(output.parent)
+						if output.parent not in self.includes:
+							self.includes.append(output.parent)
+			if hasattr(package_obj,'use'):
+				lst=self.to_list(package_obj.use)
+				lst.reverse()
+				local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages
+	addflags(['--pkg=%s'%p for p in packages])
+	for vapi_dir in vapi_dirs:
+		if isinstance(vapi_dir,Node.Node):
+			v_node=vapi_dir
+		else:
+			v_node=self.path.find_dir(vapi_dir)
+		if not v_node:
+			Logs.warn('Unable to locate Vala API directory: %r',vapi_dir)
+		else:
+			addflags('--vapidir=%s'%v_node.abspath())
+	self.dump_deps_node=None
+	if self.is_lib and self.packages:
+		self.dump_deps_node=valatask.vala_dir_node.find_or_declare('%s.deps'%self.target)
+		valatask.outputs.append(self.dump_deps_node)
+	if self.is_lib and valatask.install_binding:
+		headers_list=[o for o in valatask.outputs if o.suffix()==".h"]
+		if headers_list:
+			self.install_vheader=self.add_install_files(install_to=valatask.header_path,install_from=headers_list)
+		vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))]
+		if vapi_list:
+			self.install_vapi=self.add_install_files(install_to=valatask.vapi_path,install_from=vapi_list)
+		gir_list=[o for o in valatask.outputs if o.suffix()=='.gir']
+		if gir_list:
+			self.install_gir=self.add_install_files(install_to=getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),install_from=gir_list)
+	if hasattr(self,'vala_resources'):
+		nodes=self.to_nodes(self.vala_resources)
+		valatask.vala_exclude=getattr(valatask,'vala_exclude',[])+nodes
+		valatask.inputs.extend(nodes)
+		for x in nodes:
+			addflags(['--gresources',x.abspath()])
+@extension('.vala','.gs')
+def vala_file(self,node):
+	try:
+		valatask=self.valatask
+	except AttributeError:
+		valatask=self.valatask=self.create_task('valac')
+		self.init_vala_task()
 	valatask.inputs.append(node)
-	c_node=node.change_ext('.c')
+	name=node.name[:node.name.rfind('.')]+'.c'
+	c_node=valatask.vala_dir_node.find_or_declare(name)
 	valatask.outputs.append(c_node)
 	self.source.append(c_node)
-	if valatask.is_lib and valatask.install_binding:
-		headers_list=[o for o in valatask.outputs if o.suffix()==".h"]
-		try:
-			self.install_vheader.source=headers_list
-		except AttributeError:
-			self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env)
-		vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))]
-		try:
-			self.install_vapi.source=vapi_list
-		except AttributeError:
-			self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env)
-		gir_list=[o for o in valatask.outputs if o.suffix()==".gir"]
-		try:
-			self.install_gir.source=gir_list
-		except AttributeError:
-			self.install_gir=self.bld.install_files(valatask.gir_path,gir_list,self.env)
-valac=Task.update_outputs(valac)
+@extension('.vapi')
+def vapi_file(self,node):
+	try:
+		valatask=self.valatask
+	except AttributeError:
+		valatask=self.valatask=self.create_task('valac')
+		self.init_vala_task()
+	valatask.inputs.append(node)
+@conf
 def find_valac(self,valac_name,min_version):
 	valac=self.find_program(valac_name,var='VALAC')
 	try:
-		output=self.cmd_and_log(valac+' --version')
-	except Exception:
+		output=self.cmd_and_log(valac+['--version'])
+	except Errors.WafError:
 		valac_version=None
 	else:
-		ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.')
+		ver=re.search(r'\d+.\d+.\d+',output).group().split('.')
 		valac_version=tuple([int(x)for x in ver])
 	self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version)
 	if valac and valac_version<min_version:
 		self.fatal("%s version %r is too old, need >= %r"%(valac_name,valac_version,min_version))
-	self.env['VALAC_VERSION']=valac_version
+	self.env.VALAC_VERSION=valac_version
 	return valac
+@conf
 def check_vala(self,min_version=(0,8,0),branch=None):
+	if self.env.VALA_MINVER:
+		min_version=self.env.VALA_MINVER
+	if self.env.VALA_MINVER_BRANCH:
+		branch=self.env.VALA_MINVER_BRANCH
 	if not branch:
 		branch=min_version[:2]
 	try:
 		find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version)
 	except self.errors.ConfigurationError:
 		find_valac(self,'valac',min_version)
+@conf
 def check_vala_deps(self):
-	if not self.env['HAVE_GOBJECT']:
+	if not self.env.HAVE_GOBJECT:
 		pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'}
 		if getattr(Options.options,'vala_target_glib',None):
 			pkg_args['atleast_version']=Options.options.vala_target_glib
 		self.check_cfg(**pkg_args)
-	if not self.env['HAVE_GTHREAD']:
+	if not self.env.HAVE_GTHREAD:
 		pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'}
 		if getattr(Options.options,'vala_target_glib',None):
 			pkg_args['atleast_version']=Options.options.vala_target_glib
@@ -205,12 +210,9 @@ def configure(self):
 	self.load('gnu_dirs')
 	self.check_vala_deps()
 	self.check_vala()
+	self.add_os_flags('VALAFLAGS')
+	self.env.append_unique('VALAFLAGS',['-C'])
 def options(opt):
 	opt.load('gnu_dirs')
 	valaopts=opt.add_option_group('Vala Compiler Options')
 	valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation')
-
-extension('.vala','.gs')(vala_file)
-conf(find_valac)
-conf(check_vala)
-conf(check_vala_deps)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/waf_unit_test.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/waf_unit_test.py
@@ -1,79 +1,172 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys
-from waflib.TaskGen import feature,after_method
+import os,shlex,sys
+from waflib.TaskGen import feature,after_method,taskgen_method
 from waflib import Utils,Task,Logs,Options
+from waflib.Tools import ccroot
 testlock=Utils.threading.Lock()
+SCRIPT_TEMPLATE="""#! %(python)s
+import subprocess, sys
+cmd = %(cmd)r
+# if you want to debug with gdb:
+#cmd = ['gdb', '-args'] + cmd
+env = %(env)r
+status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
+sys.exit(status)
+"""
+@taskgen_method
+def handle_ut_cwd(self,key):
+	cwd=getattr(self,key,None)
+	if cwd:
+		if isinstance(cwd,str):
+			if os.path.isabs(cwd):
+				self.ut_cwd=self.bld.root.make_node(cwd)
+			else:
+				self.ut_cwd=self.path.make_node(cwd)
+@feature('test_scripts')
+def make_interpreted_test(self):
+	for x in['test_scripts_source','test_scripts_template']:
+		if not hasattr(self,x):
+			Logs.warn('a test_scripts taskgen i missing %s'%x)
+			return
+	self.ut_run,lst=Task.compile_fun(self.test_scripts_template,shell=getattr(self,'test_scripts_shell',False))
+	script_nodes=self.to_nodes(self.test_scripts_source)
+	for script_node in script_nodes:
+		tsk=self.create_task('utest',[script_node])
+		tsk.vars=lst+tsk.vars
+		tsk.env['SCRIPT']=script_node.path_from(tsk.get_cwd())
+	self.handle_ut_cwd('test_scripts_cwd')
+	env=getattr(self,'test_scripts_env',None)
+	if env:
+		self.ut_env=env
+	else:
+		self.ut_env=dict(os.environ)
+	paths=getattr(self,'test_scripts_paths',{})
+	for(k,v)in paths.items():
+		p=self.ut_env.get(k,'').split(os.pathsep)
+		if isinstance(v,str):
+			v=v.split(os.pathsep)
+		self.ut_env[k]=os.pathsep.join(p+v)
+@feature('test')
+@after_method('apply_link','process_use')
 def make_test(self):
-	if getattr(self,'link_task',None):
-		self.create_task('utest',self.link_task.outputs)
+	if not getattr(self,'link_task',None):
+		return
+	tsk=self.create_task('utest',self.link_task.outputs)
+	if getattr(self,'ut_str',None):
+		self.ut_run,lst=Task.compile_fun(self.ut_str,shell=getattr(self,'ut_shell',False))
+		tsk.vars=lst+tsk.vars
+	self.handle_ut_cwd('ut_cwd')
+	if not hasattr(self,'ut_paths'):
+		paths=[]
+		for x in self.tmp_use_sorted:
+			try:
+				y=self.bld.get_tgen_by_name(x).link_task
+			except AttributeError:
+				pass
+			else:
+				if not isinstance(y,ccroot.stlink_task):
+					paths.append(y.outputs[0].parent.abspath())
+		self.ut_paths=os.pathsep.join(paths)+os.pathsep
+	if not hasattr(self,'ut_env'):
+		self.ut_env=dct=dict(os.environ)
+		def add_path(var):
+			dct[var]=self.ut_paths+dct.get(var,'')
+		if Utils.is_win32:
+			add_path('PATH')
+		elif Utils.unversioned_sys_platform()=='darwin':
+			add_path('DYLD_LIBRARY_PATH')
+			add_path('LD_LIBRARY_PATH')
+		else:
+			add_path('LD_LIBRARY_PATH')
+	if not hasattr(self,'ut_cmd'):
+		self.ut_cmd=getattr(Options.options,'testcmd',False)
+@taskgen_method
+def add_test_results(self,tup):
+	Logs.debug("ut: %r",tup)
+	try:
+		self.utest_results.append(tup)
+	except AttributeError:
+		self.utest_results=[tup]
+	try:
+		self.bld.utest_results.append(tup)
+	except AttributeError:
+		self.bld.utest_results=[tup]
+@Task.deep_inputs
 class utest(Task.Task):
 	color='PINK'
 	after=['vnum','inst']
 	vars=[]
 	def runnable_status(self):
+		if getattr(Options.options,'no_tests',False):
+			return Task.SKIP_ME
 		ret=super(utest,self).runnable_status()
 		if ret==Task.SKIP_ME:
 			if getattr(Options.options,'all_tests',False):
 				return Task.RUN_ME
 		return ret
+	def get_test_env(self):
+		return self.generator.ut_env
+	def post_run(self):
+		super(utest,self).post_run()
+		if getattr(Options.options,'clear_failed_tests',False)and self.waf_unit_test_results[1]:
+			self.generator.bld.task_sigs[self.uid()]=None
 	def run(self):
-		filename=self.inputs[0].abspath()
-		self.ut_exec=getattr(self,'ut_exec',[filename])
-		if getattr(self.generator,'ut_fun',None):
-			self.generator.ut_fun(self)
-		try:
-			fu=getattr(self.generator.bld,'all_test_paths')
-		except AttributeError:
-			fu=os.environ.copy()
-			self.generator.bld.all_test_paths=fu
-			lst=[]
-			for g in self.generator.bld.groups:
-				for tg in g:
-					if getattr(tg,'link_task',None):
-						lst.append(tg.link_task.outputs[0].parent.abspath())
-			def add_path(dct,path,var):
-				dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')])
-			if Utils.is_win32:
-				add_path(fu,lst,'PATH')
-			elif Utils.unversioned_sys_platform()=='darwin':
-				add_path(fu,lst,'DYLD_LIBRARY_PATH')
-				add_path(fu,lst,'LD_LIBRARY_PATH')
-			else:
-				add_path(fu,lst,'LD_LIBRARY_PATH')
-		cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath()
-		proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=fu,stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE)
+		if hasattr(self.generator,'ut_run'):
+			return self.generator.ut_run(self)
+		self.ut_exec=getattr(self.generator,'ut_exec',[self.inputs[0].abspath()])
+		ut_cmd=getattr(self.generator,'ut_cmd',False)
+		if ut_cmd:
+			self.ut_exec=shlex.split(ut_cmd%' '.join(self.ut_exec))
+		return self.exec_command(self.ut_exec)
+	def exec_command(self,cmd,**kw):
+		self.generator.bld.log_command(cmd,kw)
+		if getattr(Options.options,'dump_test_scripts',False):
+			script_code=SCRIPT_TEMPLATE%{'python':sys.executable,'env':self.get_test_env(),'cwd':self.get_cwd().abspath(),'cmd':cmd}
+			script_file=self.inputs[0].abspath()+'_run.py'
+			Utils.writef(script_file,script_code,encoding='utf-8')
+			os.chmod(script_file,Utils.O755)
+			if Logs.verbose>1:
+				Logs.info('Test debug file written as %r'%script_file)
+		proc=Utils.subprocess.Popen(cmd,cwd=self.get_cwd().abspath(),env=self.get_test_env(),stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,shell=isinstance(cmd,str))
 		(stdout,stderr)=proc.communicate()
-		tup=(filename,proc.returncode,stdout,stderr)
-		self.generator.utest_result=tup
+		self.waf_unit_test_results=tup=(self.inputs[0].abspath(),proc.returncode,stdout,stderr)
 		testlock.acquire()
 		try:
-			bld=self.generator.bld
-			Logs.debug("ut: %r",tup)
-			try:
-				bld.utest_results.append(tup)
-			except AttributeError:
-				bld.utest_results=[tup]
+			return self.generator.add_test_results(tup)
 		finally:
 			testlock.release()
+	def get_cwd(self):
+		return getattr(self.generator,'ut_cwd',self.inputs[0].parent)
 def summary(bld):
 	lst=getattr(bld,'utest_results',[])
 	if lst:
 		Logs.pprint('CYAN','execution summary')
 		total=len(lst)
 		tfail=len([x for x in lst if x[1]])
-		Logs.pprint('CYAN','  tests that pass %d/%d'%(total-tfail,total))
+		Logs.pprint('GREEN','  tests that pass %d/%d'%(total-tfail,total))
 		for(f,code,out,err)in lst:
 			if not code:
-				Logs.pprint('CYAN','    %s'%f)
-		Logs.pprint('CYAN','  tests that fail %d/%d'%(tfail,total))
+				Logs.pprint('GREEN','    %s'%f)
+		Logs.pprint('GREEN'if tfail==0 else'RED','  tests that fail %d/%d'%(tfail,total))
 		for(f,code,out,err)in lst:
 			if code:
-				Logs.pprint('CYAN','    %s'%f)
+				Logs.pprint('RED','    %s'%f)
+def set_exit_code(bld):
+	lst=getattr(bld,'utest_results',[])
+	for(f,code,out,err)in lst:
+		if code:
+			msg=[]
+			if out:
+				msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8')))
+			if err:
+				msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8')))
+			bld.fatal(os.linesep.join(msg))
 def options(opt):
+	opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests')
 	opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests')
-
-feature('test')(make_test)
-after_method('apply_link')(make_test)
\ No newline at end of file
+	opt.add_option('--clear-failed',action='store_true',default=False,help='Force failed unit tests to run again next time',dest='clear_failed_tests')
+	opt.add_option('--testcmd',action='store',default=False,dest='testcmd',help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
+	opt.add_option('--dump-test-scripts',action='store_true',default=False,help='Create python scripts to help debug tests',dest='dump_test_scripts')
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/winres.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/winres.py
@@ -1,34 +1,52 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
+import re
 from waflib import Task
 from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+@extension('.rc')
 def rc_file(self,node):
 	obj_ext='.rc.o'
-	if self.env['WINRC_TGT_F']=='/fo':
+	if self.env.WINRC_TGT_F=='/fo':
 		obj_ext='.res'
 	rctask=self.create_task('winrc',node,node.change_ext(obj_ext))
 	try:
 		self.compiled_tasks.append(rctask)
 	except AttributeError:
 		self.compiled_tasks=[rctask]
+re_lines=re.compile(r'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'r'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE)
+class rc_parser(c_preproc.c_parser):
+	def filter_comments(self,node):
+		code=node.read()
+		if c_preproc.use_trigraphs:
+			for(a,b)in c_preproc.trig_def:
+				code=code.split(a).join(b)
+		code=c_preproc.re_nl.sub('',code)
+		code=c_preproc.re_cpp.sub(c_preproc.repl,code)
+		ret=[]
+		for m in re.finditer(re_lines,code):
+			if m.group(2):
+				ret.append((m.group(2),m.group(3)))
+			else:
+				ret.append(('include',m.group(5)))
+		return ret
 class winrc(Task.Task):
 	run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
 	color='BLUE'
+	def scan(self):
+		tmp=rc_parser(self.generator.includes_nodes)
+		tmp.start(self.inputs[0],self.env)
+		return(tmp.nodes,tmp.names)
 def configure(conf):
 	v=conf.env
-	v['WINRC_TGT_F']='-o'
-	v['WINRC_SRC_F']='-i'
-	if not conf.env.WINRC:
+	if not v.WINRC:
 		if v.CC_NAME=='msvc':
-			conf.find_program('RC',var='WINRC',path_list=v['PATH'])
-			v['WINRC_TGT_F']='/fo'
-			v['WINRC_SRC_F']=''
+			conf.find_program('RC',var='WINRC',path_list=v.PATH)
+			v.WINRC_TGT_F='/fo'
+			v.WINRC_SRC_F=''
 		else:
-			conf.find_program('windres',var='WINRC',path_list=v['PATH'])
-	if not conf.env.WINRC:
-		conf.fatal('winrc was not found!')
-	v['WINRCFLAGS']=[]
-
-extension('.rc')(rc_file)
\ No newline at end of file
+			conf.find_program('windres',var='WINRC',path_list=v.PATH)
+			v.WINRC_TGT_F='-o'
+			v.WINRC_SRC_F='-i'
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/xlc.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/xlc.py
@@ -1,39 +1,40 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib.Tools import ccroot,ar
 from waflib.Configure import conf
+@conf
 def find_xlc(conf):
 	cc=conf.find_program(['xlc_r','xlc'],var='CC')
-	cc=conf.cmd_to_list(cc)
 	conf.get_xlc_version(cc)
 	conf.env.CC_NAME='xlc'
-	conf.env.CC=cc
+@conf
 def xlc_common_flags(conf):
 	v=conf.env
-	v['CC_SRC_F']=[]
-	v['CC_TGT_F']=['-c','-o']
-	if not v['LINK_CC']:v['LINK_CC']=v['CC']
-	v['CCLNK_SRC_F']=[]
-	v['CCLNK_TGT_F']=['-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['RPATH_ST']='-Wl,-rpath,%s'
-	v['SONAME_ST']=[]
-	v['SHLIB_MARKER']=[]
-	v['STLIB_MARKER']=[]
-	v['LINKFLAGS_cprogram']=['-Wl,-brtl']
-	v['cprogram_PATTERN']='%s'
-	v['CFLAGS_cshlib']=['-fPIC']
-	v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull']
-	v['cshlib_PATTERN']='lib%s.so'
-	v['LINKFLAGS_cstlib']=[]
-	v['cstlib_PATTERN']='lib%s.a'
+	v.CC_SRC_F=[]
+	v.CC_TGT_F=['-c','-o']
+	if not v.LINK_CC:
+		v.LINK_CC=v.CC
+	v.CCLNK_SRC_F=[]
+	v.CCLNK_TGT_F=['-o']
+	v.CPPPATH_ST='-I%s'
+	v.DEFINES_ST='-D%s'
+	v.LIB_ST='-l%s'
+	v.LIBPATH_ST='-L%s'
+	v.STLIB_ST='-l%s'
+	v.STLIBPATH_ST='-L%s'
+	v.RPATH_ST='-Wl,-rpath,%s'
+	v.SONAME_ST=[]
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.LINKFLAGS_cprogram=['-Wl,-brtl']
+	v.cprogram_PATTERN='%s'
+	v.CFLAGS_cshlib=['-fPIC']
+	v.LINKFLAGS_cshlib=['-G','-Wl,-brtl,-bexpfull']
+	v.cshlib_PATTERN='lib%s.so'
+	v.LINKFLAGS_cstlib=[]
+	v.cstlib_PATTERN='lib%s.a'
 def configure(conf):
 	conf.find_xlc()
 	conf.find_ar()
@@ -41,6 +42,3 @@ def configure(conf):
 	conf.cc_load_tools()
 	conf.cc_add_flags()
 	conf.link_add_flags()
-
-conf(find_xlc)
-conf(xlc_common_flags)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Tools/xlcxx.py
+++ pugl-0~svn32+dfsg0/waflib/Tools/xlcxx.py
@@ -1,39 +1,40 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
 from waflib.Tools import ccroot,ar
 from waflib.Configure import conf
+@conf
 def find_xlcxx(conf):
 	cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX')
-	cxx=conf.cmd_to_list(cxx)
 	conf.get_xlc_version(cxx)
 	conf.env.CXX_NAME='xlc++'
-	conf.env.CXX=cxx
+@conf
 def xlcxx_common_flags(conf):
 	v=conf.env
-	v['CXX_SRC_F']=[]
-	v['CXX_TGT_F']=['-c','-o']
-	if not v['LINK_CXX']:v['LINK_CXX']=v['CXX']
-	v['CXXLNK_SRC_F']=[]
-	v['CXXLNK_TGT_F']=['-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['RPATH_ST']='-Wl,-rpath,%s'
-	v['SONAME_ST']=[]
-	v['SHLIB_MARKER']=[]
-	v['STLIB_MARKER']=[]
-	v['LINKFLAGS_cxxprogram']=['-Wl,-brtl']
-	v['cxxprogram_PATTERN']='%s'
-	v['CXXFLAGS_cxxshlib']=['-fPIC']
-	v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull']
-	v['cxxshlib_PATTERN']='lib%s.so'
-	v['LINKFLAGS_cxxstlib']=[]
-	v['cxxstlib_PATTERN']='lib%s.a'
+	v.CXX_SRC_F=[]
+	v.CXX_TGT_F=['-c','-o']
+	if not v.LINK_CXX:
+		v.LINK_CXX=v.CXX
+	v.CXXLNK_SRC_F=[]
+	v.CXXLNK_TGT_F=['-o']
+	v.CPPPATH_ST='-I%s'
+	v.DEFINES_ST='-D%s'
+	v.LIB_ST='-l%s'
+	v.LIBPATH_ST='-L%s'
+	v.STLIB_ST='-l%s'
+	v.STLIBPATH_ST='-L%s'
+	v.RPATH_ST='-Wl,-rpath,%s'
+	v.SONAME_ST=[]
+	v.SHLIB_MARKER=[]
+	v.STLIB_MARKER=[]
+	v.LINKFLAGS_cxxprogram=['-Wl,-brtl']
+	v.cxxprogram_PATTERN='%s'
+	v.CXXFLAGS_cxxshlib=['-fPIC']
+	v.LINKFLAGS_cxxshlib=['-G','-Wl,-brtl,-bexpfull']
+	v.cxxshlib_PATTERN='lib%s.so'
+	v.LINKFLAGS_cxxstlib=[]
+	v.cxxstlib_PATTERN='lib%s.a'
 def configure(conf):
 	conf.find_xlcxx()
 	conf.find_ar()
@@ -41,6 +42,3 @@ def configure(conf):
 	conf.cxx_load_tools()
 	conf.cxx_add_flags()
 	conf.link_add_flags()
-
-conf(find_xlcxx)
-conf(xlcxx_common_flags)
\ No newline at end of file
--- pugl-0~svn32+dfsg0.orig/waflib/Utils.py
+++ pugl-0~svn32+dfsg0/waflib/Utils.py
@@ -1,43 +1,51 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import os,sys,errno,traceback,inspect,re,shutil,datetime,gc
+from __future__ import with_statement
+import atexit,os,sys,errno,inspect,re,datetime,platform,base64,signal,functools,time
 try:
-	import subprocess
-except:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+if os.name=='posix'and sys.version_info[0]<3:
 	try:
-		import waflib.extras.subprocess as subprocess
-	except:
-		print("The subprocess module is missing (python2.3?):\n try calling 'waf update --files=subprocess'\n or add a copy of subprocess.py to the python libraries")
+		import subprocess32 as subprocess
+	except ImportError:
+		import subprocess
+else:
+	import subprocess
 try:
-	from collections import deque
-except ImportError:
-	class deque(list):
-		def popleft(self):
-			return self.pop(0)
+	TimeoutExpired=subprocess.TimeoutExpired
+except AttributeError:
+	class TimeoutExpired(Exception):
+		pass
+from collections import deque,defaultdict
 try:
 	import _winreg as winreg
-except:
+except ImportError:
 	try:
 		import winreg
-	except:
+	except ImportError:
 		winreg=None
 from waflib import Errors
 try:
-	from collections import UserDict
-except:
-	from UserDict import UserDict
-try:
 	from hashlib import md5
-except:
+except ImportError:
 	try:
-		from md5 import md5
-	except:
+		from hashlib import sha1 as md5
+	except ImportError:
 		pass
+else:
+	try:
+		md5().digest()
+	except ValueError:
+		from hashlib import sha1 as md5
 try:
 	import threading
-except:
+except ImportError:
+	if not'JOBS'in os.environ:
+		os.environ['JOBS']='1'
 	class threading(object):
 		pass
 	class Lock(object):
@@ -46,59 +54,176 @@ except:
 		def release(self):
 			pass
 	threading.Lock=threading.Thread=Lock
-else:
-	run_old=threading.Thread.run
-	def run(*args,**kwargs):
-		try:
-			run_old(*args,**kwargs)
-		except(KeyboardInterrupt,SystemExit):
-			raise
-		except:
-			sys.excepthook(*sys.exc_info())
-	threading.Thread.run=run
-SIG_NIL='iluvcuteoverload'
+SIG_NIL='SIG_NIL_SIG_NIL_'.encode()
 O644=420
 O755=493
 rot_chr=['\\','|','/','-']
 rot_idx=0
-try:
-	from collections import defaultdict
-except ImportError:
-	class defaultdict(dict):
-		def __init__(self,default_factory):
-			super(defaultdict,self).__init__()
-			self.default_factory=default_factory
-		def __getitem__(self,key):
-			try:
-				return super(defaultdict,self).__getitem__(key)
-			except KeyError:
-				value=self.default_factory()
-				self[key]=value
-				return value
-is_win32=sys.platform in('win32','cli')
-indicator='\x1b[K%s%s%s\r'
-if is_win32 and'NOCOLOR'in os.environ:
-	indicator='%s%s%s\r'
-def readf(fname,m='r'):
-	f=open(fname,m)
-	try:
-		txt=f.read()
-	finally:
-		f.close()
+class ordered_iter_dict(dict):
+	def __init__(self,*k,**kw):
+		self.lst=deque()
+		dict.__init__(self,*k,**kw)
+	def clear(self):
+		dict.clear(self)
+		self.lst=deque()
+	def __setitem__(self,key,value):
+		if key in dict.keys(self):
+			self.lst.remove(key)
+		dict.__setitem__(self,key,value)
+		self.lst.append(key)
+	def __delitem__(self,key):
+		dict.__delitem__(self,key)
+		try:
+			self.lst.remove(key)
+		except ValueError:
+			pass
+	def __iter__(self):
+		return reversed(self.lst)
+	def keys(self):
+		return reversed(self.lst)
+class lru_node(object):
+	__slots__=('next','prev','key','val')
+	def __init__(self):
+		self.next=self
+		self.prev=self
+		self.key=None
+		self.val=None
+class lru_cache(object):
+	__slots__=('maxlen','table','head')
+	def __init__(self,maxlen=100):
+		self.maxlen=maxlen
+		self.table={}
+		self.head=lru_node()
+		self.head.next=self.head
+		self.head.prev=self.head
+	def __getitem__(self,key):
+		node=self.table[key]
+		if node is self.head:
+			return node.val
+		node.prev.next=node.next
+		node.next.prev=node.prev
+		node.next=self.head.next
+		node.prev=self.head
+		self.head=node.next.prev=node.prev.next=node
+		return node.val
+	def __setitem__(self,key,val):
+		if key in self.table:
+			node=self.table[key]
+			node.val=val
+			self.__getitem__(key)
+		else:
+			if len(self.table)<self.maxlen:
+				node=lru_node()
+				node.prev=self.head
+				node.next=self.head.next
+				node.prev.next=node.next.prev=node
+			else:
+				node=self.head=self.head.next
+				try:
+					del self.table[node.key]
+				except KeyError:
+					pass
+			node.key=key
+			node.val=val
+			self.table[key]=node
+class lazy_generator(object):
+	def __init__(self,fun,params):
+		self.fun=fun
+		self.params=params
+	def __iter__(self):
+		return self
+	def __next__(self):
+		try:
+			it=self.it
+		except AttributeError:
+			it=self.it=self.fun(*self.params)
+		return next(it)
+	next=__next__
+is_win32=os.sep=='\\'or sys.platform=='win32'or os.name=='nt'
+def readf(fname,m='r',encoding='latin-1'):
+	if sys.hexversion>0x3000000 and not'b'in m:
+		m+='b'
+		with open(fname,m)as f:
+			txt=f.read()
+		if encoding:
+			txt=txt.decode(encoding)
+		else:
+			txt=txt.decode()
+	else:
+		with open(fname,m)as f:
+			txt=f.read()
 	return txt
-def h_file(filename):
-	f=open(filename,'rb')
+def writef(fname,data,m='w',encoding='latin-1'):
+	if sys.hexversion>0x3000000 and not'b'in m:
+		data=data.encode(encoding)
+		m+='b'
+	with open(fname,m)as f:
+		f.write(data)
+def h_file(fname):
 	m=md5()
+	with open(fname,'rb')as f:
+		while fname:
+			fname=f.read(200000)
+			m.update(fname)
+	return m.digest()
+def readf_win32(f,m='r',encoding='latin-1'):
+	flags=os.O_NOINHERIT|os.O_RDONLY
+	if'b'in m:
+		flags|=os.O_BINARY
+	if'+'in m:
+		flags|=os.O_RDWR
+	try:
+		fd=os.open(f,flags)
+	except OSError:
+		raise IOError('Cannot read from %r'%f)
+	if sys.hexversion>0x3000000 and not'b'in m:
+		m+='b'
+		with os.fdopen(fd,m)as f:
+			txt=f.read()
+		if encoding:
+			txt=txt.decode(encoding)
+		else:
+			txt=txt.decode()
+	else:
+		with os.fdopen(fd,m)as f:
+			txt=f.read()
+	return txt
+def writef_win32(f,data,m='w',encoding='latin-1'):
+	if sys.hexversion>0x3000000 and not'b'in m:
+		data=data.encode(encoding)
+		m+='b'
+	flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT
+	if'b'in m:
+		flags|=os.O_BINARY
+	if'+'in m:
+		flags|=os.O_RDWR
 	try:
-		while filename:
-			filename=f.read(100000)
-			m.update(filename)
-	finally:
-		f.close()
+		fd=os.open(f,flags)
+	except OSError:
+		raise OSError('Cannot write to %r'%f)
+	with os.fdopen(fd,m)as f:
+		f.write(data)
+def h_file_win32(fname):
+	try:
+		fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT)
+	except OSError:
+		raise OSError('Cannot read from %r'%fname)
+	m=md5()
+	with os.fdopen(fd,'rb')as f:
+		while fname:
+			fname=f.read(200000)
+			m.update(fname)
 	return m.digest()
+readf_unix=readf
+writef_unix=writef
+h_file_unix=h_file
+if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000:
+	readf=readf_win32
+	writef=writef_win32
+	h_file=h_file_win32
 try:
 	x=''.encode('hex')
-except:
+except LookupError:
 	import binascii
 	def to_hex(s):
 		ret=binascii.hexlify(s)
@@ -114,27 +239,27 @@ Return the hexadecimal representation of
 :param s: string to convert
 :type s: string
 """
+def listdir_win32(s):
+	if not s:
+		try:
+			import ctypes
+		except ImportError:
+			return[x+':\\'for x in'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
+		else:
+			dlen=4
+			maxdrives=26
+			buf=ctypes.create_string_buffer(maxdrives*dlen)
+			ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf))
+			return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))]
+	if len(s)==2 and s[1]==":":
+		s+=os.sep
+	if not os.path.isdir(s):
+		e=OSError('%s is not a directory'%s)
+		e.errno=errno.ENOENT
+		raise e
+	return os.listdir(s)
 listdir=os.listdir
 if is_win32:
-	def listdir_win32(s):
-		if not s:
-			try:
-				import ctypes
-			except:
-				return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]
-			else:
-				dlen=4
-				maxdrives=26
-				buf=ctypes.create_string_buffer(maxdrives*dlen)
-				ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives,ctypes.byref(buf))
-				return[buf.raw[4*i:4*i+3].decode('ascii')for i in range(int(ndrives/dlen))]
-		if len(s)==2 and s[1]==":":
-			s+=os.sep
-		if not os.path.isdir(s):
-			e=OSError()
-			e.errno=errno.ENOENT
-			raise e
-		return os.listdir(s)
 	listdir=listdir_win32
 def num2ver(ver):
 	if isinstance(ver,str):
@@ -146,27 +271,26 @@ def num2ver(ver):
 				ret+=256**(3-i)*int(ver[i])
 		return ret
 	return ver
-def ex_stack():
-	exc_type,exc_value,tb=sys.exc_info()
-	exc_lines=traceback.format_exception(exc_type,exc_value,tb)
-	return''.join(exc_lines)
-def to_list(sth):
-	if isinstance(sth,str):
-		return sth.split()
-	else:
-		return sth
-re_nl=re.compile('\r*\n',re.M)
-def str_to_dict(txt):
-	tbl={}
-	lines=re_nl.split(txt)
-	for x in lines:
-		x=x.strip()
-		if not x or x.startswith('#')or x.find('=')<0:
-			continue
-		tmp=x.split('=')
-		tbl[tmp[0].strip()]='='.join(tmp[1:]).strip()
-	return tbl
-def split_path(path):
+def to_list(val):
+	if isinstance(val,str):
+		return val.split()
+	else:
+		return val
+def console_encoding():
+	try:
+		import ctypes
+	except ImportError:
+		pass
+	else:
+		try:
+			codepage=ctypes.windll.kernel32.GetConsoleCP()
+		except AttributeError:
+			pass
+		else:
+			if codepage:
+				return'cp%d'%codepage
+	return sys.stdout.encoding or('cp1252'if is_win32 else'latin-1')
+def split_path_unix(path):
 	return path.split('/')
 def split_path_cygwin(path):
 	if path.startswith('//'):
@@ -174,56 +298,113 @@ def split_path_cygwin(path):
 		ret[0]='/'+ret[0]
 		return ret
 	return path.split('/')
-re_sp=re.compile('[/\\\\]')
+re_sp=re.compile('[/\\\\]+')
 def split_path_win32(path):
 	if path.startswith('\\\\'):
-		ret=re.split(re_sp,path)[2:]
-		ret[0]='\\'+ret[0]
+		ret=re_sp.split(path)[1:]
+		ret[0]='\\\\'+ret[0]
+		if ret[0]=='\\\\?':
+			return ret[1:]
 		return ret
-	return re.split(re_sp,path)
+	return re_sp.split(path)
+msysroot=None
+def split_path_msys(path):
+	if path.startswith(('/','\\'))and not path.startswith(('//','\\\\')):
+		global msysroot
+		if not msysroot:
+			msysroot=subprocess.check_output(['cygpath','-w','/']).decode(sys.stdout.encoding or'latin-1')
+			msysroot=msysroot.strip()
+		path=os.path.normpath(msysroot+os.sep+path)
+	return split_path_win32(path)
 if sys.platform=='cygwin':
 	split_path=split_path_cygwin
 elif is_win32:
-	split_path=split_path_win32
+	if os.environ.get('MSYSTEM')and sys.executable.startswith('/'):
+		split_path=split_path_msys
+	else:
+		split_path=split_path_win32
+else:
+	split_path=split_path_unix
 split_path.__doc__="""
-Split a path by / or \\. This function is not like os.path.split
+Splits a path by / or \\; do not confuse this function with with ``os.path.split``
 
 :type  path: string
 :param path: path to split
-:return:     list of strings
+:return:     list of string
 """
 def check_dir(path):
 	if not os.path.isdir(path):
 		try:
 			os.makedirs(path)
-		except OSError ,e:
+		except OSError as e:
 			if not os.path.isdir(path):
 				raise Errors.WafError('Cannot create the folder %r'%path,ex=e)
+def check_exe(name,env=None):
+	if not name:
+		raise ValueError('Cannot execute an empty string!')
+	def is_exe(fpath):
+		return os.path.isfile(fpath)and os.access(fpath,os.X_OK)
+	fpath,fname=os.path.split(name)
+	if fpath and is_exe(name):
+		return os.path.abspath(name)
+	else:
+		env=env or os.environ
+		for path in env['PATH'].split(os.pathsep):
+			path=path.strip('"')
+			exe_file=os.path.join(path,name)
+			if is_exe(exe_file):
+				return os.path.abspath(exe_file)
+	return None
 def def_attrs(cls,**kw):
 	for k,v in kw.items():
 		if not hasattr(cls,k):
 			setattr(cls,k,v)
 def quote_define_name(s):
-	fu=re.compile("[^a-zA-Z0-9]").sub("_",s)
+	fu=re.sub('[^a-zA-Z0-9]','_',s)
+	fu=re.sub('_+','_',fu)
 	fu=fu.upper()
 	return fu
+re_sh=re.compile('\\s|\'|"')
+def shell_escape(cmd):
+	if isinstance(cmd,str):
+		return cmd
+	return' '.join(repr(x)if re_sh.search(x)else x for x in cmd)
 def h_list(lst):
-	m=md5()
-	m.update(str(lst))
-	return m.digest()
+	return md5(repr(lst).encode()).digest()
+if sys.hexversion<0x3000000:
+	def h_list_python2(lst):
+		return md5(repr(lst)).digest()
+	h_list_python2.__doc__=h_list.__doc__
+	h_list=h_list_python2
 def h_fun(fun):
 	try:
 		return fun.code
 	except AttributeError:
+		if isinstance(fun,functools.partial):
+			code=list(fun.args)
+			code.extend(sorted(fun.keywords.items()))
+			code.append(h_fun(fun.func))
+			fun.code=h_list(code)
+			return fun.code
 		try:
 			h=inspect.getsource(fun)
-		except IOError:
-			h="nocode"
+		except EnvironmentError:
+			h='nocode'
 		try:
 			fun.code=h
 		except AttributeError:
 			pass
 		return h
+def h_cmd(ins):
+	if isinstance(ins,str):
+		ret=ins
+	elif isinstance(ins,list)or isinstance(ins,tuple):
+		ret=str([h_cmd(x)for x in ins])
+	else:
+		ret=str(h_fun(ins))
+	if sys.hexversion>0x3000000:
+		ret=ret.encode('latin-1','xmlcharrefreplace')
+	return ret
 reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
 def subst_vars(expr,params):
 	def repl_var(m):
@@ -244,7 +425,7 @@ def destos_to_binfmt(key):
 	return'elf'
 def unversioned_sys_platform():
 	s=sys.platform
-	if s=='java':
+	if s.startswith('java'):
 		from java.lang import System
 		s=System.getProperty('os.name')
 		if s=='Mac OS X':
@@ -254,25 +435,30 @@ def unversioned_sys_platform():
 		elif s=='OS/2':
 			return'os2'
 		elif s=='HP-UX':
-			return'hpux'
+			return'hp-ux'
 		elif s in('SunOS','Solaris'):
 			return'sunos'
 		else:s=s.lower()
 	if s=='powerpc':
 		return'darwin'
-	if s=='win32'or s.endswith('os2')and s!='sunos2':return s
-	return re.split('\d+$',s)[0]
+	if s=='win32'or s=='os2':
+		return s
+	if s=='cli'and os.name=='nt':
+		return'win32'
+	return re.split(r'\d+$',s)[0]
 def nada(*k,**kw):
 	pass
 class Timer(object):
 	def __init__(self):
-		self.start_time=datetime.datetime.utcnow()
+		self.start_time=self.now()
 	def __str__(self):
-		delta=datetime.datetime.utcnow()-self.start_time
-		days=int(delta.days)
-		hours=delta.seconds//3600
-		minutes=(delta.seconds-hours*3600)//60
-		seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000
+		delta=self.now()-self.start_time
+		if not isinstance(delta,datetime.timedelta):
+			delta=datetime.timedelta(seconds=delta)
+		days=delta.days
+		hours,rem=divmod(delta.seconds,3600)
+		minutes,seconds=divmod(rem,60)
+		seconds+=delta.microseconds*1e-6
 		result=''
 		if days:
 			result+='%dd'%days
@@ -281,18 +467,11 @@ class Timer(object):
 		if days or hours or minutes:
 			result+='%dm'%minutes
 		return'%s%.3fs'%(result,seconds)
-if is_win32:
-	old=shutil.copy2
-	def copy2(src,dst):
-		old(src,dst)
-		shutil.copystat(src,dst)
-	setattr(shutil,'copy2',copy2)
-if os.name=='java':
-	try:
-		gc.disable()
-		gc.enable()
-	except NotImplementedError:
-		gc.disable=gc.enable
+	def now(self):
+		return datetime.datetime.utcnow()
+	if hasattr(time,'perf_counter'):
+		def now(self):
+			return time.perf_counter()
 def read_la_file(path):
 	sp=re.compile(r'^([^=]+)=\'(.*)\'$')
 	dc={}
@@ -303,34 +482,144 @@ def read_la_file(path):
 		except ValueError:
 			pass
 	return dc
-def nogc(fun):
-	def f(*k,**kw):
-		try:
-			gc.disable()
-			ret=fun(*k,**kw)
-		finally:
-			gc.enable()
-		return ret
-	f.__doc__=fun.__doc__
-	return f
 def run_once(fun):
 	cache={}
-	def wrap(k):
+	def wrap(*k):
 		try:
 			return cache[k]
 		except KeyError:
-			ret=fun(k)
+			ret=fun(*k)
 			cache[k]=ret
 			return ret
 	wrap.__cache__=cache
+	wrap.__name__=fun.__name__
 	return wrap
 def get_registry_app_path(key,filename):
 	if not winreg:
 		return None
 	try:
 		result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0])
-	except WindowsError:
+	except OSError:
 		pass
 	else:
 		if os.path.isfile(result):
 			return result
+def lib64():
+	if os.sep=='/':
+		if platform.architecture()[0]=='64bit':
+			if os.path.exists('/usr/lib64')and not os.path.exists('/usr/lib32'):
+				return'64'
+	return''
+def sane_path(p):
+	return os.path.abspath(os.path.expanduser(p))
+process_pool=[]
+def get_process():
+	try:
+		return process_pool.pop()
+	except IndexError:
+		filepath=os.path.dirname(os.path.abspath(__file__))+os.sep+'processor.py'
+		cmd=[sys.executable,'-c',readf(filepath)]
+		return subprocess.Popen(cmd,stdout=subprocess.PIPE,stdin=subprocess.PIPE,bufsize=0,close_fds=not is_win32)
+def run_prefork_process(cmd,kwargs,cargs):
+	if not kwargs.get('env'):
+		kwargs['env']=dict(os.environ)
+	try:
+		obj=base64.b64encode(cPickle.dumps([cmd,kwargs,cargs]))
+	except(TypeError,AttributeError):
+		return run_regular_process(cmd,kwargs,cargs)
+	proc=get_process()
+	if not proc:
+		return run_regular_process(cmd,kwargs,cargs)
+	proc.stdin.write(obj)
+	proc.stdin.write('\n'.encode())
+	proc.stdin.flush()
+	obj=proc.stdout.readline()
+	if not obj:
+		raise OSError('Preforked sub-process %r died'%proc.pid)
+	process_pool.append(proc)
+	lst=cPickle.loads(base64.b64decode(obj))
+	assert len(lst)==5
+	ret,out,err,ex,trace=lst
+	if ex:
+		if ex=='OSError':
+			raise OSError(trace)
+		elif ex=='ValueError':
+			raise ValueError(trace)
+		elif ex=='TimeoutExpired':
+			exc=TimeoutExpired(cmd,timeout=cargs['timeout'],output=out)
+			exc.stderr=err
+			raise exc
+		else:
+			raise Exception(trace)
+	return ret,out,err
+def lchown(path,user=-1,group=-1):
+	if isinstance(user,str):
+		import pwd
+		entry=pwd.getpwnam(user)
+		if not entry:
+			raise OSError('Unknown user %r'%user)
+		user=entry[2]
+	if isinstance(group,str):
+		import grp
+		entry=grp.getgrnam(group)
+		if not entry:
+			raise OSError('Unknown group %r'%group)
+		group=entry[2]
+	return os.lchown(path,user,group)
+def run_regular_process(cmd,kwargs,cargs={}):
+	proc=subprocess.Popen(cmd,**kwargs)
+	if kwargs.get('stdout')or kwargs.get('stderr'):
+		try:
+			out,err=proc.communicate(**cargs)
+		except TimeoutExpired:
+			if kwargs.get('start_new_session')and hasattr(os,'killpg'):
+				os.killpg(proc.pid,signal.SIGKILL)
+			else:
+				proc.kill()
+			out,err=proc.communicate()
+			exc=TimeoutExpired(proc.args,timeout=cargs['timeout'],output=out)
+			exc.stderr=err
+			raise exc
+		status=proc.returncode
+	else:
+		out,err=(None,None)
+		try:
+			status=proc.wait(**cargs)
+		except TimeoutExpired as e:
+			if kwargs.get('start_new_session')and hasattr(os,'killpg'):
+				os.killpg(proc.pid,signal.SIGKILL)
+			else:
+				proc.kill()
+			proc.wait()
+			raise e
+	return status,out,err
+def run_process(cmd,kwargs,cargs={}):
+	if kwargs.get('stdout')and kwargs.get('stderr'):
+		return run_prefork_process(cmd,kwargs,cargs)
+	else:
+		return run_regular_process(cmd,kwargs,cargs)
+def alloc_process_pool(n,force=False):
+	global run_process,get_process,alloc_process_pool
+	if not force:
+		n=max(n-len(process_pool),0)
+	try:
+		lst=[get_process()for x in range(n)]
+	except OSError:
+		run_process=run_regular_process
+		get_process=alloc_process_pool=nada
+	else:
+		for x in lst:
+			process_pool.append(x)
+def atexit_pool():
+	for k in process_pool:
+		try:
+			os.kill(k.pid,9)
+		except OSError:
+			pass
+		else:
+			k.wait()
+if(sys.hexversion<0x207000f and not is_win32)or sys.hexversion>=0x306000f:
+	atexit.register(atexit_pool)
+if os.environ.get('WAF_NO_PREFORK')or sys.platform=='cli'or not sys.executable:
+	run_process=run_regular_process
+	get_process=alloc_process_pool=nada
--- pugl-0~svn32+dfsg0.orig/waflib/__init__.py
+++ pugl-0~svn32+dfsg0/waflib/__init__.py
@@ -1,4 +1,4 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
--- pugl-0~svn32+dfsg0.orig/waflib/ansiterm.py
+++ pugl-0~svn32+dfsg0/waflib/ansiterm.py
@@ -1,52 +1,84 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
-import sys,os
+import os,re,sys
+from waflib import Utils
+wlock=Utils.threading.Lock()
 try:
-	if not(sys.stderr.isatty()and sys.stdout.isatty()):
-		raise ValueError('not a tty')
-	from ctypes import*
+	from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,c_wchar,POINTER,c_long
+except ImportError:
+	class AnsiTerm(object):
+		def __init__(self,stream):
+			self.stream=stream
+			try:
+				self.errors=self.stream.errors
+			except AttributeError:
+				pass
+			self.encoding=self.stream.encoding
+		def write(self,txt):
+			try:
+				wlock.acquire()
+				self.stream.write(txt)
+				self.stream.flush()
+			finally:
+				wlock.release()
+		def fileno(self):
+			return self.stream.fileno()
+		def flush(self):
+			self.stream.flush()
+		def isatty(self):
+			return self.stream.isatty()
+else:
 	class COORD(Structure):
 		_fields_=[("X",c_short),("Y",c_short)]
 	class SMALL_RECT(Structure):
 		_fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)]
 	class CONSOLE_SCREEN_BUFFER_INFO(Structure):
-		_fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]
+		_fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_ushort),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]
 	class CONSOLE_CURSOR_INFO(Structure):
 		_fields_=[('dwSize',c_ulong),('bVisible',c_int)]
-	sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
-	csinfo=CONSOLE_CURSOR_INFO()
-	hconsole=windll.kernel32.GetStdHandle(-11)
-	windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo))
-	if sbinfo.Size.X<9 or sbinfo.Size.Y<9:raise ValueError('small console')
-	windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo))
-except Exception:
-	pass
-else:
-	import re,threading
-	is_vista=getattr(sys,"getwindowsversion",None)and sys.getwindowsversion()[0]>=6
 	try:
 		_type=unicode
-	except:
+	except NameError:
 		_type=str
 	to_int=lambda number,default:number and int(number)or default
-	wlock=threading.Lock()
 	STD_OUTPUT_HANDLE=-11
 	STD_ERROR_HANDLE=-12
+	windll.kernel32.GetStdHandle.argtypes=[c_ulong]
+	windll.kernel32.GetStdHandle.restype=c_ulong
+	windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+	windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long
+	windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort]
+	windll.kernel32.SetConsoleTextAttribute.restype=c_long
+	windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)]
+	windll.kernel32.FillConsoleOutputCharacterW.restype=c_long
+	windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)]
+	windll.kernel32.FillConsoleOutputAttribute.restype=c_long
+	windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)]
+	windll.kernel32.SetConsoleCursorPosition.restype=c_long
+	windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)]
+	windll.kernel32.SetConsoleCursorInfo.restype=c_long
 	class AnsiTerm(object):
-		def __init__(self):
-			self.encoding=sys.stdout.encoding
-			self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
+		def __init__(self,s):
+			self.stream=s
+			try:
+				self.errors=s.errors
+			except AttributeError:
+				pass
+			self.encoding=s.encoding
 			self.cursor_history=[]
-			self.orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
-			self.orig_csinfo=CONSOLE_CURSOR_INFO()
-			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self.orig_sbinfo))
-			windll.kernel32.GetConsoleCursorInfo(hconsole,byref(self.orig_csinfo))
+			handle=(s.fileno()==2)and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
+			self.hconsole=windll.kernel32.GetStdHandle(handle)
+			self._sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+			self._csinfo=CONSOLE_CURSOR_INFO()
+			windll.kernel32.GetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
+			self._orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+			r=windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._orig_sbinfo))
+			self._isatty=r==1
 		def screen_buffer_info(self):
-			sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
-			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
-			return sbinfo
+			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._sbinfo))
+			return self._sbinfo
 		def clear_line(self,param):
 			mode=param and int(param)or 0
 			sbinfo=self.screen_buffer_info()
@@ -59,8 +91,8 @@ else:
 			else:
 				line_start=sbinfo.CursorPosition
 				line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
-			chars_written=c_int()
-			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written))
+			chars_written=c_ulong()
+			windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written))
 			windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written))
 		def clear_screen(self,param):
 			mode=to_int(param,0)
@@ -75,8 +107,8 @@ else:
 			else:
 				clear_start=sbinfo.CursorPosition
 				clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y))
-			chars_written=c_int()
-			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written))
+			chars_written=c_ulong()
+			windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written))
 			windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written))
 		def push_cursor(self,param):
 			sbinfo=self.screen_buffer_info()
@@ -119,20 +151,16 @@ else:
 			return((c&1)<<2)|(c&2)|((c&4)>>2)
 		def set_color(self,param):
 			cols=param.split(';')
-			sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
-			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
+			sbinfo=self.screen_buffer_info()
 			attr=sbinfo.Attributes
 			for c in cols:
-				if is_vista:
-					c=int(c)
-				else:
-					c=to_int(c,0)
-				if c in range(30,38):
+				c=to_int(c,0)
+				if 29<c<38:
 					attr=(attr&0xfff0)|self.rgb2bgr(c-30)
-				elif c in range(40,48):
+				elif 39<c<48:
 					attr=(attr&0xff0f)|(self.rgb2bgr(c-40)<<4)
 				elif c==0:
-					attr=self.orig_sbinfo.Attributes
+					attr=self._orig_sbinfo.Attributes
 				elif c==1:
 					attr|=0x08
 				elif c==4:
@@ -141,37 +169,70 @@ else:
 					attr=(attr&0xff88)|((attr&0x70)>>4)|((attr&0x07)<<4)
 			windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr)
 		def show_cursor(self,param):
-			csinfo.bVisible=1
-			windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+			self._csinfo.bVisible=1
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
 		def hide_cursor(self,param):
-			csinfo.bVisible=0
-			windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+			self._csinfo.bVisible=0
+			windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
 		ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,}
-		ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+		ansi_tokens=re.compile(r'(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
 		def write(self,text):
 			try:
 				wlock.acquire()
-				for param,cmd,txt in self.ansi_tokens.findall(text):
-					if cmd:
-						cmd_func=self.ansi_command_table.get(cmd)
-						if cmd_func:
-							cmd_func(self,param)
-					else:
-						self.writeconsole(txt)
+				if self._isatty:
+					for param,cmd,txt in self.ansi_tokens.findall(text):
+						if cmd:
+							cmd_func=self.ansi_command_table.get(cmd)
+							if cmd_func:
+								cmd_func(self,param)
+						else:
+							self.writeconsole(txt)
+				else:
+					self.stream.write(text)
 			finally:
 				wlock.release()
 		def writeconsole(self,txt):
-			chars_written=c_int()
+			chars_written=c_ulong()
 			writeconsole=windll.kernel32.WriteConsoleA
 			if isinstance(txt,_type):
 				writeconsole=windll.kernel32.WriteConsoleW
-			TINY_STEP=3000
-			for x in range(0,len(txt),TINY_STEP):
-				tiny=txt[x:x+TINY_STEP]
-				writeconsole(self.hconsole,tiny,len(tiny),byref(chars_written),None)
+			done=0
+			todo=len(txt)
+			chunk=32<<10
+			while todo!=0:
+				doing=min(chunk,todo)
+				buf=txt[done:done+doing]
+				r=writeconsole(self.hconsole,buf,doing,byref(chars_written),None)
+				if r==0:
+					chunk>>=1
+					continue
+				done+=doing
+				todo-=doing
+		def fileno(self):
+			return self.stream.fileno()
 		def flush(self):
 			pass
 		def isatty(self):
-			return True
-	sys.stderr=sys.stdout=AnsiTerm()
-	os.environ['TERM']='vt100'
+			return self._isatty
+	if sys.stdout.isatty()or sys.stderr.isatty():
+		handle=sys.stdout.isatty()and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
+		console=windll.kernel32.GetStdHandle(handle)
+		sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+		def get_term_cols():
+			windll.kernel32.GetConsoleScreenBufferInfo(console,byref(sbinfo))
+			return sbinfo.Size.X-1
+try:
+	import struct,fcntl,termios
+except ImportError:
+	pass
+else:
+	if(sys.stdout.isatty()or sys.stderr.isatty())and os.environ.get('TERM','')not in('dumb','emacs'):
+		FD=sys.stdout.isatty()and sys.stdout.fileno()or sys.stderr.fileno()
+		def fun():
+			return struct.unpack("HHHH",fcntl.ioctl(FD,termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[1]
+		try:
+			fun()
+		except Exception as e:
+			pass
+		else:
+			get_term_cols=fun
--- pugl-0~svn32+dfsg0.orig/waflib/extras/__init__.py
+++ pugl-0~svn32+dfsg0/waflib/extras/__init__.py
@@ -1,4 +1,4 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/extras/compat15.py
@@ -0,0 +1,305 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import sys
+from waflib import ConfigSet,Logs,Options,Scripting,Task,Build,Configure,Node,Runner,TaskGen,Utils,Errors,Context
+sys.modules['Environment']=ConfigSet
+ConfigSet.Environment=ConfigSet.ConfigSet
+sys.modules['Logs']=Logs
+sys.modules['Options']=Options
+sys.modules['Scripting']=Scripting
+sys.modules['Task']=Task
+sys.modules['Build']=Build
+sys.modules['Configure']=Configure
+sys.modules['Node']=Node
+sys.modules['Runner']=Runner
+sys.modules['TaskGen']=TaskGen
+sys.modules['Utils']=Utils
+sys.modules['Constants']=Context
+Context.SRCDIR=''
+Context.BLDDIR=''
+from waflib.Tools import c_preproc
+sys.modules['preproc']=c_preproc
+from waflib.Tools import c_config
+sys.modules['config_c']=c_config
+ConfigSet.ConfigSet.copy=ConfigSet.ConfigSet.derive
+ConfigSet.ConfigSet.set_variant=Utils.nada
+Utils.pproc=Utils.subprocess
+Build.BuildContext.add_subdirs=Build.BuildContext.recurse
+Build.BuildContext.new_task_gen=Build.BuildContext.__call__
+Build.BuildContext.is_install=0
+Node.Node.relpath_gen=Node.Node.path_from
+Utils.pproc=Utils.subprocess
+Utils.get_term_cols=Logs.get_term_cols
+def cmd_output(cmd,**kw):
+	silent=False
+	if'silent'in kw:
+		silent=kw['silent']
+		del(kw['silent'])
+	if'e'in kw:
+		tmp=kw['e']
+		del(kw['e'])
+		kw['env']=tmp
+	kw['shell']=isinstance(cmd,str)
+	kw['stdout']=Utils.subprocess.PIPE
+	if silent:
+		kw['stderr']=Utils.subprocess.PIPE
+	try:
+		p=Utils.subprocess.Popen(cmd,**kw)
+		output=p.communicate()[0]
+	except OSError as e:
+		raise ValueError(str(e))
+	if p.returncode:
+		if not silent:
+			msg="command execution failed: %s -> %r"%(cmd,str(output))
+			raise ValueError(msg)
+		output=''
+	return output
+Utils.cmd_output=cmd_output
+def name_to_obj(self,s,env=None):
+	if Logs.verbose:
+		Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
+	return self.get_tgen_by_name(s)
+Build.BuildContext.name_to_obj=name_to_obj
+def env_of_name(self,name):
+	try:
+		return self.all_envs[name]
+	except KeyError:
+		Logs.error('no such environment: '+name)
+		return None
+Build.BuildContext.env_of_name=env_of_name
+def set_env_name(self,name,env):
+	self.all_envs[name]=env
+	return env
+Configure.ConfigurationContext.set_env_name=set_env_name
+def retrieve(self,name,fromenv=None):
+	try:
+		env=self.all_envs[name]
+	except KeyError:
+		env=ConfigSet.ConfigSet()
+		self.prepare_env(env)
+		self.all_envs[name]=env
+	else:
+		if fromenv:
+			Logs.warn('The environment %s may have been configured already',name)
+	return env
+Configure.ConfigurationContext.retrieve=retrieve
+Configure.ConfigurationContext.sub_config=Configure.ConfigurationContext.recurse
+Configure.ConfigurationContext.check_tool=Configure.ConfigurationContext.load
+Configure.conftest=Configure.conf
+Configure.ConfigurationError=Errors.ConfigurationError
+Utils.WafError=Errors.WafError
+Options.OptionsContext.sub_options=Options.OptionsContext.recurse
+Options.OptionsContext.tool_options=Context.Context.load
+Options.Handler=Options.OptionsContext
+Task.simple_task_type=Task.task_type_from_func=Task.task_factory
+Task.Task.classes=Task.classes
+def setitem(self,key,value):
+	if key.startswith('CCFLAGS'):
+		key=key[1:]
+	self.table[key]=value
+ConfigSet.ConfigSet.__setitem__=setitem
+@TaskGen.feature('d')
+@TaskGen.before('apply_incpaths')
+def old_importpaths(self):
+	if getattr(self,'importpaths',[]):
+		self.includes=self.importpaths
+from waflib import Context
+eld=Context.load_tool
+def load_tool(*k,**kw):
+	ret=eld(*k,**kw)
+	if'set_options'in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "set_options" to options')
+		ret.options=ret.set_options
+	if'detect'in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "detect" to "configure"')
+		ret.configure=ret.detect
+	return ret
+Context.load_tool=load_tool
+def get_curdir(self):
+	return self.path.abspath()
+Context.Context.curdir=property(get_curdir,Utils.nada)
+def get_srcdir(self):
+	return self.srcnode.abspath()
+Configure.ConfigurationContext.srcdir=property(get_srcdir,Utils.nada)
+def get_blddir(self):
+	return self.bldnode.abspath()
+Configure.ConfigurationContext.blddir=property(get_blddir,Utils.nada)
+Configure.ConfigurationContext.check_message_1=Configure.ConfigurationContext.start_msg
+Configure.ConfigurationContext.check_message_2=Configure.ConfigurationContext.end_msg
+rev=Context.load_module
+def load_module(path,encoding=None):
+	ret=rev(path,encoding)
+	if'set_options'in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "set_options" to "options" (%r)',path)
+		ret.options=ret.set_options
+	if'srcdir'in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "srcdir" to "top" (%r)',path)
+		ret.top=ret.srcdir
+	if'blddir'in ret.__dict__:
+		if Logs.verbose:
+			Logs.warn('compat: rename "blddir" to "out" (%r)',path)
+		ret.out=ret.blddir
+	Utils.g_module=Context.g_module
+	Options.launch_dir=Context.launch_dir
+	return ret
+Context.load_module=load_module
+old_post=TaskGen.task_gen.post
+def post(self):
+	self.features=self.to_list(self.features)
+	if'cc'in self.features:
+		if Logs.verbose:
+			Logs.warn('compat: the feature cc does not exist anymore (use "c")')
+		self.features.remove('cc')
+		self.features.append('c')
+	if'cstaticlib'in self.features:
+		if Logs.verbose:
+			Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
+		self.features.remove('cstaticlib')
+		self.features.append(('cxx'in self.features)and'cxxstlib'or'cstlib')
+	if getattr(self,'ccflags',None):
+		if Logs.verbose:
+			Logs.warn('compat: "ccflags" was renamed to "cflags"')
+		self.cflags=self.ccflags
+	return old_post(self)
+TaskGen.task_gen.post=post
+def waf_version(*k,**kw):
+	Logs.warn('wrong version (waf_version was removed in waf 1.6)')
+Utils.waf_version=waf_version
+import os
+@TaskGen.feature('c','cxx','d')
+@TaskGen.before('apply_incpaths','propagate_uselib_vars')
+@TaskGen.after('apply_link','process_source')
+def apply_uselib_local(self):
+	env=self.env
+	from waflib.Tools.ccroot import stlink_task
+	self.uselib=self.to_list(getattr(self,'uselib',[]))
+	self.includes=self.to_list(getattr(self,'includes',[]))
+	names=self.to_list(getattr(self,'uselib_local',[]))
+	get=self.bld.get_tgen_by_name
+	seen=set()
+	seen_uselib=set()
+	tmp=Utils.deque(names)
+	if tmp:
+		if Logs.verbose:
+			Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+	while tmp:
+		lib_name=tmp.popleft()
+		if lib_name in seen:
+			continue
+		y=get(lib_name)
+		y.post()
+		seen.add(lib_name)
+		if getattr(y,'uselib_local',None):
+			for x in self.to_list(getattr(y,'uselib_local',[])):
+				obj=get(x)
+				obj.post()
+				if getattr(obj,'link_task',None):
+					if not isinstance(obj.link_task,stlink_task):
+						tmp.append(x)
+		if getattr(y,'link_task',None):
+			link_name=y.target[y.target.rfind(os.sep)+1:]
+			if isinstance(y.link_task,stlink_task):
+				env.append_value('STLIB',[link_name])
+			else:
+				env.append_value('LIB',[link_name])
+			self.link_task.set_run_after(y.link_task)
+			self.link_task.dep_nodes+=y.link_task.outputs
+			tmp_path=y.link_task.outputs[0].parent.bldpath()
+			if not tmp_path in env['LIBPATH']:
+				env.prepend_value('LIBPATH',[tmp_path])
+		for v in self.to_list(getattr(y,'uselib',[])):
+			if v not in seen_uselib:
+				seen_uselib.add(v)
+				if not env['STLIB_'+v]:
+					if not v in self.uselib:
+						self.uselib.insert(0,v)
+		if getattr(y,'export_includes',None):
+			self.includes.extend(y.to_incnodes(y.export_includes))
+@TaskGen.feature('cprogram','cxxprogram','cstlib','cxxstlib','cshlib','cxxshlib','dprogram','dstlib','dshlib')
+@TaskGen.after('apply_link')
+def apply_objdeps(self):
+	names=getattr(self,'add_objects',[])
+	if not names:
+		return
+	names=self.to_list(names)
+	get=self.bld.get_tgen_by_name
+	seen=[]
+	while names:
+		x=names[0]
+		if x in seen:
+			names=names[1:]
+			continue
+		y=get(x)
+		if getattr(y,'add_objects',None):
+			added=0
+			lst=y.to_list(y.add_objects)
+			lst.reverse()
+			for u in lst:
+				if u in seen:
+					continue
+				added=1
+				names=[u]+names
+			if added:
+				continue
+		y.post()
+		seen.append(x)
+		for t in getattr(y,'compiled_tasks',[]):
+			self.link_task.inputs.extend(t.outputs)
+@TaskGen.after('apply_link')
+def process_obj_files(self):
+	if not hasattr(self,'obj_files'):
+		return
+	for x in self.obj_files:
+		node=self.path.find_resource(x)
+		self.link_task.inputs.append(node)
+@TaskGen.taskgen_method
+def add_obj_file(self,file):
+	if not hasattr(self,'obj_files'):
+		self.obj_files=[]
+	if not'process_obj_files'in self.meths:
+		self.meths.append('process_obj_files')
+	self.obj_files.append(file)
+old_define=Configure.ConfigurationContext.__dict__['define']
+@Configure.conf
+def define(self,key,val,quote=True,comment=''):
+	old_define(self,key,val,quote,comment)
+	if key.startswith('HAVE_'):
+		self.env[key]=1
+old_undefine=Configure.ConfigurationContext.__dict__['undefine']
+@Configure.conf
+def undefine(self,key,comment=''):
+	old_undefine(self,key,comment)
+	if key.startswith('HAVE_'):
+		self.env[key]=0
+def set_incdirs(self,val):
+	Logs.warn('compat: change "export_incdirs" by "export_includes"')
+	self.export_includes=val
+TaskGen.task_gen.export_incdirs=property(None,set_incdirs)
+def install_dir(self,path):
+	if not path:
+		return[]
+	destpath=Utils.subst_vars(path,self.env)
+	if self.is_install>0:
+		Logs.info('* creating %s',destpath)
+		Utils.check_dir(destpath)
+	elif self.is_install<0:
+		Logs.info('* removing %s',destpath)
+		try:
+			os.remove(destpath)
+		except OSError:
+			pass
+Build.BuildContext.install_dir=install_dir
+repl={'apply_core':'process_source','apply_lib_vars':'process_source','apply_obj_vars':'propagate_uselib_vars','exec_rule':'process_rule'}
+def after(*k):
+	k=[repl.get(key,key)for key in k]
+	return TaskGen.after_method(*k)
+def before(*k):
+	k=[repl.get(key,key)for key in k]
+	return TaskGen.before_method(*k)
+TaskGen.before=before
--- pugl-0~svn32+dfsg0.orig/waflib/fixpy2.py
+++ pugl-0~svn32+dfsg0/waflib/fixpy2.py
@@ -1,11 +1,11 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
 
+from __future__ import with_statement
 import os
 all_modifs={}
 def fixdir(dir):
-	global all_modifs
 	for k in all_modifs:
 		for v in all_modifs[k]:
 			modif(os.path.join(dir,'waflib'),k,v)
@@ -20,16 +20,13 @@ def modif(dir,name,fun):
 			modif(dir,x,fun)
 		return
 	filename=os.path.join(dir,name)
-	f=open(filename,'r')
-	txt=f.read()
-	f.close()
+	with open(filename,'r')as f:
+		txt=f.read()
 	txt=fun(txt)
-	f=open(filename,'w')
-	f.write(txt)
-	f.close()
+	with open(filename,'w')as f:
+		f.write(txt)
 def subst(*k):
 	def do_subst(fun):
-		global all_modifs
 		for x in k:
 			try:
 				all_modifs[x].append(fun)
@@ -37,14 +34,14 @@ def subst(*k):
 				all_modifs[x]=[fun]
 		return fun
 	return do_subst
+@subst('*')
 def r1(code):
-	code=code.replace(',e:',',e:')
-	code=code.replace("",'')
-	code=code.replace('','')
-	return code
+	code=code.replace('as e:',',e:')
+	code=code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')",'')
+	return code.replace('.encode()','')
+@subst('Runner.py')
 def r4(code):
-	code=code.replace('next(self.biter)','self.biter.next()')
-	return code
-
-subst('*')(r1)
-subst('Runner.py')(r4)
\ No newline at end of file
+	return code.replace('next(self.biter)','self.biter.next()')
+@subst('Context.py')
+def r5(code):
+	return code.replace("('Execution failure: %s'%str(e),ex=e)","('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")
--- /dev/null
+++ pugl-0~svn32+dfsg0/waflib/processor.py
@@ -0,0 +1,57 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,sys,traceback,base64,signal
+try:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+try:
+	import subprocess32 as subprocess
+except ImportError:
+	import subprocess
+try:
+	TimeoutExpired=subprocess.TimeoutExpired
+except AttributeError:
+	class TimeoutExpired(Exception):
+		pass
+def run():
+	txt=sys.stdin.readline().strip()
+	if not txt:
+		sys.exit(1)
+	[cmd,kwargs,cargs]=cPickle.loads(base64.b64decode(txt))
+	cargs=cargs or{}
+	if not'close_fds'in kwargs:
+		kwargs['close_fds']=False
+	ret=1
+	out,err,ex,trace=(None,None,None,None)
+	try:
+		proc=subprocess.Popen(cmd,**kwargs)
+		try:
+			out,err=proc.communicate(**cargs)
+		except TimeoutExpired:
+			if kwargs.get('start_new_session')and hasattr(os,'killpg'):
+				os.killpg(proc.pid,signal.SIGKILL)
+			else:
+				proc.kill()
+			out,err=proc.communicate()
+			exc=TimeoutExpired(proc.args,timeout=cargs['timeout'],output=out)
+			exc.stderr=err
+			raise exc
+		ret=proc.returncode
+	except Exception as e:
+		exc_type,exc_value,tb=sys.exc_info()
+		exc_lines=traceback.format_exception(exc_type,exc_value,tb)
+		trace=str(cmd)+'\n'+''.join(exc_lines)
+		ex=e.__class__.__name__
+	tmp=[ret,out,err,ex,trace]
+	obj=base64.b64encode(cPickle.dumps(tmp))
+	sys.stdout.write(obj.decode())
+	sys.stdout.write('\n')
+	sys.stdout.flush()
+while 1:
+	try:
+		run()
+	except KeyboardInterrupt:
+		break
--- pugl-0~svn32+dfsg0.orig/wscript
+++ pugl-0~svn32+dfsg0/wscript
@@ -27,8 +27,6 @@ out = 'build'
 
 def options(opt):
     opt.load('compiler_c')
-    if Options.platform == 'win32':
-        opt.load('compiler_cxx')
     autowaf.set_options(opt)
     opt.add_option('--test', action='store_true', default=False, dest='build_tests',
                    help="Build unit tests")
@@ -39,8 +37,6 @@ def options(opt):
 
 def configure(conf):
     conf.load('compiler_c')
-    if Options.platform == 'win32':
-        conf.load('compiler_cxx')
     autowaf.configure(conf)
     autowaf.display_header('Pugl Configuration')
 
@@ -51,7 +47,7 @@ def configure(conf):
 
     # Shared library building is broken on win32 for some reason
     conf.env['BUILD_TESTS']  = Options.options.build_tests
-    conf.env['BUILD_SHARED'] = (Options.platform != 'win32' or
+    conf.env['BUILD_SHARED'] = (True or
                                 Options.options.shared)
     conf.env['BUILD_STATIC'] = (Options.options.build_tests or
                                 Options.options.static)
@@ -80,21 +76,10 @@ def build(bld):
     libflags  = [ '-fvisibility=hidden' ]
     framework = []
     libs      = []
-    if Options.platform == 'win32':
-        lang       = 'cxx'
-        lib_source = ['pugl/pugl_win.cpp']
-        libs       = ['opengl32', 'glu32', 'gdi32', 'user32']
-        defines    = []
-    elif Options.platform == 'darwin':
-        lang       = 'c'  # Objective C, actually
-        lib_source = ['pugl/pugl_osx.m']
-        framework  = ['Cocoa', 'OpenGL']
-        defines    = []
-    else:
-        lang       = 'c'
-        lib_source = ['pugl/pugl_x11.c']
-        libs       = ['X11', 'GL', 'GLU']
-        defines    = []
+    lang       = 'c'
+    lib_source = ['pugl/pugl_x11.c']
+    libs       = ['X11', 'GL', 'GLU']
+    defines    = []
     if bld.env['MSVC_COMPILER']:
         libflags = []
 
