id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
20,600 | swig.py | projecthamster_hamster/waflib/extras/swig.py | #! /usr/bin/env python
# encoding: UTF-8
# Petar Forai
# Thomas Nagy 2008-2010 (ita)
import re
from waflib import Task, Logs
from waflib.TaskGen import extension, feature, after_method
from waflib.Configure import conf
from waflib.Tools import c_preproc
"""
tasks have to be added dynamically:
- swig interface files may be created at runtime
- the module name may be unknown in advance
"""
SWIG_EXTS = ['.swig', '.i']
re_module = re.compile(r'%module(?:\s*\(.*\))?\s+([^\r\n]+)', re.M)
re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
re_2 = re.compile(r'[#%](?:include|import(?:\(module=".*"\))+|python(?:begin|code)) [<"](.*)[">]', re.M)
class swig(Task.Task):
color = 'BLUE'
run_str = '${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}'
ext_out = ['.h'] # might produce .h files although it is not mandatory
vars = ['SWIG_VERSION', 'SWIGDEPS']
def runnable_status(self):
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
if not getattr(self, 'init_outputs', None):
self.init_outputs = True
if not getattr(self, 'module', None):
# search the module name
txt = self.inputs[0].read()
m = re_module.search(txt)
if not m:
raise ValueError("could not find the swig module name")
self.module = m.group(1)
swig_c(self)
# add the language-specific output files as nodes
# call funs in the dict swig_langs
for x in self.env['SWIGFLAGS']:
# obtain the language
x = x[1:]
try:
fun = swig_langs[x]
except KeyError:
pass
else:
fun(self)
return super(swig, self).runnable_status()
def scan(self):
"scan for swig dependencies, climb the .i files"
lst_src = []
seen = []
missing = []
to_see = [self.inputs[0]]
while to_see:
node = to_see.pop(0)
if node in seen:
continue
seen.append(node)
lst_src.append(node)
# read the file
code = node.read()
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
# find .i files and project headers
names = re_2.findall(code)
for n in names:
for d in self.generator.includes_nodes + [node.parent]:
u = d.find_resource(n)
if u:
to_see.append(u)
break
else:
missing.append(n)
return (lst_src, missing)
# provide additional language processing
swig_langs = {}
def swigf(fun):
swig_langs[fun.__name__.replace('swig_', '')] = fun
return fun
swig.swigf = swigf
def swig_c(self):
ext = '.swigwrap_%d.c' % self.generator.idx
flags = self.env['SWIGFLAGS']
if '-c++' in flags:
ext += 'xx'
out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
if '-c++' in flags:
c_tsk = self.generator.cxx_hook(out_node)
else:
c_tsk = self.generator.c_hook(out_node)
c_tsk.set_run_after(self)
# transfer weights from swig task to c task
if getattr(self, 'weight', None):
c_tsk.weight = self.weight
if getattr(self, 'tree_weight', None):
c_tsk.tree_weight = self.tree_weight
try:
self.more_tasks.append(c_tsk)
except AttributeError:
self.more_tasks = [c_tsk]
try:
ltask = self.generator.link_task
except AttributeError:
pass
else:
ltask.set_run_after(c_tsk)
# setting input nodes does not declare the build order
# because the build already started, but it sets
# the dependency to enable rebuilds
ltask.inputs.append(c_tsk.outputs[0])
self.outputs.append(out_node)
if not '-o' in self.env['SWIGFLAGS']:
self.env.append_value('SWIGFLAGS', ['-o', self.outputs[0].abspath()])
@swigf
def swig_python(tsk):
node = tsk.inputs[0].parent
if tsk.outdir:
node = tsk.outdir
tsk.set_outputs(node.find_or_declare(tsk.module+'.py'))
@swigf
def swig_ocaml(tsk):
node = tsk.inputs[0].parent
if tsk.outdir:
node = tsk.outdir
tsk.set_outputs(node.find_or_declare(tsk.module+'.ml'))
tsk.set_outputs(node.find_or_declare(tsk.module+'.mli'))
@extension(*SWIG_EXTS)
def i_file(self, node):
# the task instance
tsk = self.create_task('swig')
tsk.set_inputs(node)
tsk.module = getattr(self, 'swig_module', None)
flags = self.to_list(getattr(self, 'swig_flags', []))
tsk.env.append_value('SWIGFLAGS', flags)
tsk.outdir = None
if '-outdir' in flags:
outdir = flags[flags.index('-outdir')+1]
outdir = tsk.generator.bld.bldnode.make_node(outdir)
outdir.mkdir()
tsk.outdir = outdir
@feature('c', 'cxx', 'd', 'fc', 'asm')
@after_method('apply_link', 'process_source')
def enforce_swig_before_link(self):
try:
link_task = self.link_task
except AttributeError:
pass
else:
for x in self.tasks:
if x.__class__.__name__ == 'swig':
link_task.run_after.add(x)
@conf
def check_swig_version(conf, minver=None):
"""
Check if the swig tool is found matching a given minimum version.
minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver.
If successful, SWIG_VERSION is defined as 'MAJOR.MINOR'
(eg. '1.3') of the actual swig version found.
:param minver: minimum version
:type minver: tuple of int
:return: swig version
:rtype: tuple of int
"""
assert minver is None or isinstance(minver, tuple)
swigbin = conf.env['SWIG']
if not swigbin:
conf.fatal('could not find the swig executable')
# Get swig version string
cmd = swigbin + ['-version']
Logs.debug('swig: Running swig command %r', cmd)
reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
swig_out = conf.cmd_and_log(cmd)
swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
# Compare swig version with the minimum required
result = (minver is None) or (swigver_tuple >= minver)
if result:
# Define useful environment variables
swigver = '.'.join([str(x) for x in swigver_tuple[:2]])
conf.env['SWIG_VERSION'] = swigver
# Feedback
swigver_full = '.'.join(map(str, swigver_tuple[:3]))
if minver is None:
conf.msg('Checking for swig version', swigver_full)
else:
minver_str = '.'.join(map(str, minver))
conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW')
if not result:
conf.fatal('The swig version is too old, expecting %r' % (minver,))
return swigver_tuple
def configure(conf):
conf.find_program('swig', var='SWIG')
conf.env.SWIGPATH_ST = '-I%s'
conf.env.SWIGDEF_ST = '-D%s'
| 6,258 | Python | .py | 195 | 29.225641 | 113 | 0.68776 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,601 | run_r_script.py | projecthamster_hamster/waflib/extras/run_r_script.py | #!/usr/bin/env python
# encoding: utf-8
# Hans-Martin von Gaudecker, 2012
"""
Run a R script in the directory specified by **ctx.bldnode**.
For error-catching purposes, keep an own log-file that is destroyed if the
task finished without error. If not, it will show up as rscript_[index].log
in the bldnode directory.
Usage::
ctx(features='run_r_script',
source='some_script.r',
target=['some_table.tex', 'some_figure.eps'],
deps='some_data.csv')
"""
import os, sys
from waflib import Task, TaskGen, Logs
R_COMMANDS = ['RTerm', 'R', 'r']
def configure(ctx):
ctx.find_program(R_COMMANDS, var='RCMD', errmsg = """\n
No R executable found!\n\n
If R is needed:\n
1) Check the settings of your system path.
2) Note we are looking for R executables called: %s
If yours has a different name, please report to hmgaudecker [at] gmail\n
Else:\n
Do not load the 'run_r_script' tool in the main wscript.\n\n""" % R_COMMANDS)
ctx.env.RFLAGS = 'CMD BATCH --slave'
class run_r_script_base(Task.Task):
"""Run a R script."""
run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"'
shell = True
class run_r_script(run_r_script_base):
"""Erase the R overall log file if everything went okay, else raise an
error and print its 10 last lines.
"""
def run(self):
ret = run_r_script_base.run(self)
logfile = self.env.LOGFILEPATH
if ret:
mode = 'r'
if sys.version_info.major >= 3:
mode = 'rb'
with open(logfile, mode=mode) as f:
tail = f.readlines()[-10:]
Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
self.inputs[0], ret, logfile, '\n'.join(tail))
else:
os.remove(logfile)
return ret
@TaskGen.feature('run_r_script')
@TaskGen.before_method('process_source')
def apply_run_r_script(tg):
"""Task generator customising the options etc. to call R in batch
mode for running a R script.
"""
# Convert sources and targets to nodes
src_node = tg.path.find_resource(tg.source)
tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
tsk = tg.create_task('run_r_script', src=src_node, tgt=tgt_nodes)
tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (os.path.splitext(src_node.name)[0], tg.idx))
# dependencies (if the attribute 'deps' changes, trigger a recompilation)
for x in tg.to_list(getattr(tg, 'deps', [])):
node = tg.path.find_resource(x)
if not node:
tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
tsk.dep_nodes.append(node)
Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
# Bypass the execution of process_source by setting the source to an empty list
tg.source = []
| 2,766 | Python | .py | 69 | 37.347826 | 121 | 0.698507 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,602 | fsb.py | projecthamster_hamster/waflib/extras/fsb.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
"""
Fully sequential builds
The previous tasks from task generators are re-processed, and this may lead to speed issues
Yet, if you are using this, speed is probably a minor concern
"""
from waflib import Build
def options(opt):
pass
def configure(conf):
pass
class FSBContext(Build.BuildContext):
def __call__(self, *k, **kw):
ret = Build.BuildContext.__call__(self, *k, **kw)
# evaluate the results immediately
Build.BuildContext.compile(self)
return ret
def compile(self):
pass
| 572 | Python | .py | 21 | 25.095238 | 91 | 0.752311 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,603 | stale.py | projecthamster_hamster/waflib/extras/stale.py | #! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy, 2006-2015 (ita)
"""
Add a pre-build hook to remove build files (declared in the system)
that do not have a corresponding target
This can be used for example to remove the targets
that have changed name without performing
a full 'waf clean'
Of course, it will only work if there are no dynamically generated
nodes/tasks, in which case the method will have to be modified
to exclude some folders for example.
Make sure to set bld.post_mode = waflib.Build.POST_AT_ONCE
"""
from waflib import Logs, Build
from waflib.Runner import Parallel
DYNAMIC_EXT = [] # add your non-cleanable files/extensions here
MOC_H_EXTS = '.cpp .cxx .hpp .hxx .h'.split()
def can_delete(node):
"""Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files"""
if not node.name.endswith('.moc'):
return True
base = node.name[:-4]
p1 = node.parent.get_src()
p2 = node.parent.get_bld()
for k in MOC_H_EXTS:
h_name = base + k
n = p1.search_node(h_name)
if n:
return False
n = p2.search_node(h_name)
if n:
return False
# foo.cpp.moc, foo.h.moc, etc.
if base.endswith(k):
return False
return True
# recursion over the nodes to find the stale files
def stale_rec(node, nodes):
if node.abspath() in node.ctx.env[Build.CFG_FILES]:
return
if getattr(node, 'children', []):
for x in node.children.values():
if x.name != "c4che":
stale_rec(x, nodes)
else:
for ext in DYNAMIC_EXT:
if node.name.endswith(ext):
break
else:
if not node in nodes:
if can_delete(node):
Logs.warn('Removing stale file -> %r', node)
node.delete()
old = Parallel.refill_task_list
def refill_task_list(self):
iit = old(self)
bld = self.bld
# execute this operation only once
if getattr(self, 'stale_done', False):
return iit
self.stale_done = True
# this does not work in partial builds
if bld.targets != '*':
return iit
# this does not work in dynamic builds
if getattr(bld, 'post_mode') == Build.POST_AT_ONCE:
return iit
# obtain the nodes to use during the build
nodes = []
for tasks in bld.groups:
for x in tasks:
try:
nodes.extend(x.outputs)
except AttributeError:
pass
stale_rec(bld.bldnode, nodes)
return iit
Parallel.refill_task_list = refill_task_list
| 2,297 | Python | .py | 79 | 26.443038 | 82 | 0.721692 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,604 | buildcopy.py | projecthamster_hamster/waflib/extras/buildcopy.py | #! /usr/bin/env python
# encoding: utf-8
# Calle Rosenquist, 2017 (xbreak)
"""
Create task that copies source files to the associated build node.
This is useful to e.g. construct a complete Python package so it can be unit tested
without installation.
Source files to be copied can be specified either in `buildcopy_source` attribute, or
`source` attribute. If both are specified `buildcopy_source` has priority.
Examples::
def build(bld):
bld(name = 'bar',
features = 'py buildcopy',
source = bld.path.ant_glob('src/bar/*.py'))
bld(name = 'py baz',
features = 'buildcopy',
buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])
"""
import os, shutil
from waflib import Errors, Task, TaskGen, Utils, Node, Logs
@TaskGen.before_method('process_source')
@TaskGen.feature('buildcopy')
def make_buildcopy(self):
"""
Creates the buildcopy task.
"""
def to_src_nodes(lst):
"""Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
preference to nodes in build.
"""
if isinstance(lst, Node.Node):
if not lst.is_src():
raise Errors.WafError('buildcopy: node %s is not in src'%lst)
if not os.path.isfile(lst.abspath()):
raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
return lst
if isinstance(lst, str):
lst = [x for x in Utils.split_path(lst) if x and x != '.']
node = self.bld.path.get_src().search_node(lst)
if node:
if not os.path.isfile(node.abspath()):
raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
return node
node = self.bld.path.get_src().find_node(lst)
if node:
if not os.path.isfile(node.abspath()):
raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
return node
raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
if not nodes:
Logs.warn('buildcopy: No source files provided to buildcopy in %s (set `buildcopy_source` or `source`)',
self)
return
node_pairs = [(n, n.get_bld()) for n in nodes]
self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)
class buildcopy(Task.Task):
"""
Copy for each pair `n` in `node_pairs`: n[0] -> n[1].
Attribute `node_pairs` should contain a list of tuples describing source and target:
node_pairs = [(in, out), ...]
"""
color = 'PINK'
def keyword(self):
return 'Copying'
def run(self):
for f,t in self.node_pairs:
t.parent.mkdir()
shutil.copy2(f.abspath(), t.abspath())
| 2,736 | Python | .py | 69 | 36.811594 | 109 | 0.6937 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,605 | pgicc.py | projecthamster_hamster/waflib/extras/pgicc.py | #!/usr/bin/env python
# encoding: utf-8
# Antoine Dechaume 2011
"""
Detect the PGI C compiler
"""
import sys, re
from waflib import Errors
from waflib.Configure import conf
from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'].append('pgicc')
@conf
def find_pgi_compiler(conf, var, name):
"""
Find the program name, and execute it to ensure it really is itself.
"""
if sys.platform == 'cygwin':
conf.fatal('The PGI compiler does not work on Cygwin')
v = conf.env
cc = None
if v[var]:
cc = v[var]
elif var in conf.environ:
cc = conf.environ[var]
if not cc:
cc = conf.find_program(name, var=var)
if not cc:
conf.fatal('PGI Compiler (%s) was not found' % name)
v[var + '_VERSION'] = conf.get_pgi_version(cc)
v[var] = cc
v[var + '_NAME'] = 'pgi'
@conf
def get_pgi_version(conf, cc):
"""Find the version of a pgi compiler."""
version_re = re.compile(r"The Portland Group", re.I).search
cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking
try:
out, err = conf.cmd_and_log(cmd, output=0)
except Errors.WafError:
conf.fatal('Could not find pgi compiler %r' % cmd)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not verify PGI signature')
cmd = cc + ['-help=variable']
try:
out, err = conf.cmd_and_log(cmd, output=0)
except Errors.WafError:
conf.fatal('Could not find pgi compiler %r' % cmd)
version = re.findall(r'^COMPVER\s*=(.*)', out, re.M)
if len(version) != 1:
conf.fatal('Could not determine the compiler version')
return version[0]
def configure(conf):
conf.find_pgi_compiler('CC', 'pgcc')
conf.find_ar()
conf.gcc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| 1,739 | Python | .py | 62 | 25.887097 | 69 | 0.699519 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,606 | color_gcc.py | projecthamster_hamster/waflib/extras/color_gcc.py | #!/usr/bin/env python
# encoding: utf-8
# Replaces the default formatter by one which understands GCC output and colorizes it.
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2012"
import sys
from waflib import Logs
class ColorGCCFormatter(Logs.formatter):
def __init__(self, colors):
self.colors = colors
Logs.formatter.__init__(self)
def format(self, rec):
frame = sys._getframe()
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
cmd = frame.f_locals.get('cmd')
if isinstance(cmd, list) and (len(cmd) > 0) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
lines = []
for line in rec.msg.splitlines():
if 'warning: ' in line:
lines.append(self.colors.YELLOW + line)
elif 'error: ' in line:
lines.append(self.colors.RED + line)
elif 'note: ' in line:
lines.append(self.colors.CYAN + line)
else:
lines.append(line)
rec.msg = "\n".join(lines)
frame = frame.f_back
return Logs.formatter.format(self, rec)
def options(opt):
Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
| 1,161 | Python | .py | 33 | 30.939394 | 89 | 0.67263 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,607 | doxygen.py | projecthamster_hamster/waflib/extras/doxygen.py | #! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy 2008-2010 (ita)
"""
Doxygen support
Variables passed to bld():
* doxyfile -- the Doxyfile to use
* doxy_tar -- destination archive for generated documentation (if desired)
* install_path -- where to install the documentation
* pars -- dictionary overriding doxygen configuration settings
When using this tool, the wscript will look like:
def options(opt):
opt.load('doxygen')
def configure(conf):
conf.load('doxygen')
# check conf.env.DOXYGEN, if it is mandatory
def build(bld):
if bld.env.DOXYGEN:
bld(features="doxygen", doxyfile='Doxyfile', ...)
"""
import os, os.path, re
from collections import OrderedDict
from waflib import Task, Utils, Node
from waflib.TaskGen import feature
DOXY_STR = '"${DOXYGEN}" - '
DOXY_FMTS = 'html latex man rft xml'.split()
DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
'''.split())
re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
re_nl = re.compile('\r*\n', re.M)
def parse_doxy(txt):
'''
Parses a doxygen file.
Returns an ordered dictionary. We cannot return a default dictionary, as the
order in which the entries are reported does matter, especially for the
'@INCLUDE' lines.
'''
tbl = OrderedDict()
txt = re_rl.sub('', txt)
lines = re_nl.split(txt)
for x in lines:
x = x.strip()
if not x or x.startswith('#') or x.find('=') < 0:
continue
if x.find('+=') >= 0:
tmp = x.split('+=')
key = tmp[0].strip()
if key in tbl:
tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
else:
tbl[key] = '+='.join(tmp[1:]).strip()
else:
tmp = x.split('=')
tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
return tbl
class doxygen(Task.Task):
vars = ['DOXYGEN', 'DOXYFLAGS']
color = 'BLUE'
ext_in = [ '.py', '.c', '.h', '.java', '.pb.cc' ]
def runnable_status(self):
'''
self.pars are populated in runnable_status - because this function is being
run *before* both self.pars "consumers" - scan() and run()
set output_dir (node) for the output
'''
for x in self.run_after:
if not x.hasrun:
return Task.ASK_LATER
if not getattr(self, 'pars', None):
txt = self.inputs[0].read()
self.pars = parse_doxy(txt)
# Override with any parameters passed to the task generator
if getattr(self.generator, 'pars', None):
for k, v in self.generator.pars.items():
self.pars[k] = v
if self.pars.get('OUTPUT_DIRECTORY'):
# Use the path parsed from the Doxyfile as an absolute path
output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
else:
# If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy'
output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy')
output_node.mkdir()
self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()
self.doxy_inputs = getattr(self, 'doxy_inputs', [])
if not self.pars.get('INPUT'):
self.doxy_inputs.append(self.inputs[0].parent)
else:
for i in self.pars.get('INPUT').split():
if os.path.isabs(i):
node = self.generator.bld.root.find_node(i)
else:
node = self.inputs[0].parent.find_node(i)
if not node:
self.generator.bld.fatal('Could not find the doxygen input %r' % i)
self.doxy_inputs.append(node)
if not getattr(self, 'output_dir', None):
bld = self.generator.bld
# Output path is always an absolute path as it was transformed above.
self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
self.signature()
ret = Task.Task.runnable_status(self)
if ret == Task.SKIP_ME:
# in case the files were removed
self.add_install()
return ret
def scan(self):
exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns]
file_patterns = self.pars.get('FILE_PATTERNS','').split()
if not file_patterns:
file_patterns = DOXY_FILE_PATTERNS.split()
if self.pars.get('RECURSIVE') == 'YES':
file_patterns = ["**/%s" % pattern for pattern in file_patterns]
nodes = []
names = []
for node in self.doxy_inputs:
if os.path.isdir(node.abspath()):
for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
nodes.append(m)
else:
nodes.append(node)
return (nodes, names)
def run(self):
dct = self.pars.copy()
code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
code = code.encode() # for python 3
#fmt = DOXY_STR % (self.inputs[0].parent.abspath())
cmd = Utils.subst_vars(DOXY_STR, self.env)
env = self.env.env or None
proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath())
proc.communicate(code)
return proc.returncode
def post_run(self):
nodes = self.output_dir.ant_glob('**/*', quiet=True)
for x in nodes:
self.generator.bld.node_sigs[x] = self.uid()
self.add_install()
return Task.Task.post_run(self)
def add_install(self):
nodes = self.output_dir.ant_glob('**/*', quiet=True)
self.outputs += nodes
if getattr(self.generator, 'install_path', None):
if not getattr(self.generator, 'doxy_tar', None):
self.generator.add_install_files(install_to=self.generator.install_path,
install_from=self.outputs,
postpone=False,
cwd=self.output_dir,
relative_trick=True)
class tar(Task.Task):
"quick tar creation"
run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
color = 'RED'
after = ['doxygen']
def runnable_status(self):
for x in getattr(self, 'input_tasks', []):
if not x.hasrun:
return Task.ASK_LATER
if not getattr(self, 'tar_done_adding', None):
# execute this only once
self.tar_done_adding = True
for x in getattr(self, 'input_tasks', []):
self.set_inputs(x.outputs)
if not self.inputs:
return Task.SKIP_ME
return Task.Task.runnable_status(self)
def __str__(self):
tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
return '%s: %s\n' % (self.__class__.__name__, tgt_str)
@feature('doxygen')
def process_doxy(self):
if not getattr(self, 'doxyfile', None):
self.bld.fatal('no doxyfile variable specified??')
node = self.doxyfile
if not isinstance(node, Node.Node):
node = self.path.find_resource(node)
if not node:
self.bld.fatal('doxygen file %s not found' % self.doxyfile)
# the task instance
dsk = self.create_task('doxygen', node, always_run=getattr(self, 'always', False))
if getattr(self, 'doxy_tar', None):
tsk = self.create_task('tar', always_run=getattr(self, 'always', False))
tsk.input_tasks = [dsk]
tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
if self.doxy_tar.endswith('bz2'):
tsk.env['TAROPTS'] = ['cjf']
elif self.doxy_tar.endswith('gz'):
tsk.env['TAROPTS'] = ['czf']
else:
tsk.env['TAROPTS'] = ['cf']
if getattr(self, 'install_path', None):
self.add_install_files(install_to=self.install_path, install_from=tsk.outputs)
def configure(conf):
'''
Check if doxygen and tar commands are present in the system
If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
variables will be set. Detection can be controlled by setting DOXYGEN and
TAR environmental variables.
'''
conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
conf.find_program('tar', var='TAR', mandatory=False)
| 7,471 | Python | .py | 202 | 33.762376 | 123 | 0.682239 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,608 | qnxnto.py | projecthamster_hamster/waflib/extras/qnxnto.py | #!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero 2011 (zougloub)
# QNX neutrino compatibility functions
import sys, os
from waflib import Utils
class Popen(object):
"""
Popen cannot work on QNX from a threaded program:
Forking in threads is not implemented in neutrino.
Python's os.popen / spawn / fork won't work when running in threads (they will if in the main program thread)
In waf, this happens mostly in build.
And the use cases can be replaced by os.system() calls.
"""
__slots__ = ["prog", "kw", "popen", "verbose"]
verbose = 0
def __init__(self, prog, **kw):
try:
self.prog = prog
self.kw = kw
self.popen = None
if Popen.verbose:
sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw))
do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1
if do_delegate:
if Popen.verbose:
print("Delegating to real Popen")
self.popen = self.real_Popen(prog, **kw)
else:
if Popen.verbose:
print("Emulating")
except Exception as e:
if Popen.verbose:
print("Exception: %s" % e)
raise
def __getattr__(self, name):
if Popen.verbose:
sys.stdout.write("Getattr: %s..." % name)
if name in Popen.__slots__:
return object.__getattribute__(self, name)
else:
if self.popen is not None:
if Popen.verbose:
print("from Popen")
return getattr(self.popen, name)
else:
if name == "wait":
return self.emu_wait
else:
raise Exception("subprocess emulation: not implemented: %s" % name)
def emu_wait(self):
if Popen.verbose:
print("emulated wait (%r kw=%r)" % (self.prog, self.kw))
if isinstance(self.prog, str):
cmd = self.prog
else:
cmd = " ".join(self.prog)
if 'cwd' in self.kw:
cmd = 'cd "%s" && %s' % (self.kw['cwd'], cmd)
return os.system(cmd)
if sys.platform == "qnx6":
Popen.real_Popen = Utils.subprocess.Popen
Utils.subprocess.Popen = Popen
| 1,905 | Python | .py | 63 | 26.746032 | 110 | 0.666849 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,609 | dcc.py | projecthamster_hamster/waflib/extras/dcc.py | #!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero, 2011 (zougloub)
from waflib import Options
from waflib.Tools import ccroot
from waflib.Configure import conf
@conf
def find_dcc(conf):
conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', ""))
conf.env.CC_NAME = 'dcc'
@conf
def find_dld(conf):
conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', ""))
conf.env.LINK_CC_NAME = 'dld'
@conf
def find_dar(conf):
conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', ""))
conf.env.AR_NAME = 'dar'
conf.env.ARFLAGS = 'rcs'
@conf
def find_ddump(conf):
conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', ""))
@conf
def dcc_common_flags(conf):
v = conf.env
v['CC_SRC_F'] = []
v['CC_TGT_F'] = ['-c', '-o']
# linker
if not v['LINK_CC']:
v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-D%s'
v['LIB_ST'] = '-l:%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STLIB_ST'] = '-l:%s'
v['STLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
#v['STLIB_MARKER'] = '-Wl,-Bstatic'
# program
v['cprogram_PATTERN'] = '%s.elf'
# static lib
v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
v['cstlib_PATTERN'] = 'lib%s.a'
def configure(conf):
conf.find_dcc()
conf.find_dar()
conf.find_dld()
conf.find_ddump()
conf.dcc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
def options(opt):
"""
Add the ``--with-diab-bindir`` command-line options.
"""
opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
| 1,915 | Python | .py | 59 | 30.491525 | 127 | 0.61271 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,610 | sphinx.py | projecthamster_hamster/waflib/extras/sphinx.py | """Support for Sphinx documentation
This is a wrapper for sphinx-build program. Please note that sphinx-build supports only
one output format at a time, but the tool can create multiple tasks to handle more.
The output formats can be passed via the sphinx_output_format, which is an array of
strings. For backwards compatibility if only one output is needed, it can be passed
as a single string.
The default output format is html.
Specific formats can be installed in different directories by specifying the
install_path_<FORMAT> attribute. If not defined, the standard install_path
will be used instead.
Example wscript:
def configure(cnf):
conf.load('sphinx')
def build(bld):
bld(
features='sphinx',
sphinx_source='sources', # path to source directory
sphinx_options='-a -v', # sphinx-build program additional options
sphinx_output_format=['html', 'man'], # output format of sphinx documentation
install_path_man='${DOCDIR}/man' # put man pages in a specific directory
)
"""
from waflib.Node import Node
from waflib import Utils
from waflib import Task
from waflib.TaskGen import feature, after_method
def configure(cnf):
"""Check if sphinx-build program is available and loads gnu_dirs tool."""
cnf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False)
cnf.load('gnu_dirs')
@feature('sphinx')
def build_sphinx(self):
"""Builds sphinx sources.
"""
if not self.env.SPHINX_BUILD:
self.bld.fatal('Program SPHINX_BUILD not defined.')
if not getattr(self, 'sphinx_source', None):
self.bld.fatal('Attribute sphinx_source not defined.')
if not isinstance(self.sphinx_source, Node):
self.sphinx_source = self.path.find_node(self.sphinx_source)
if not self.sphinx_source:
self.bld.fatal('Can\'t find sphinx_source: %r' % self.sphinx_source)
# In the taskgen we have the complete list of formats
Utils.def_attrs(self, sphinx_output_format='html')
self.sphinx_output_format = Utils.to_list(self.sphinx_output_format)
self.env.SPHINX_OPTIONS = getattr(self, 'sphinx_options', [])
for source_file in self.sphinx_source.ant_glob('**/*'):
self.bld.add_manual_dependency(self.sphinx_source, source_file)
for cfmt in self.sphinx_output_format:
sphinx_build_task = self.create_task('SphinxBuildingTask')
sphinx_build_task.set_inputs(self.sphinx_source)
# In task we keep the specific format this task is generating
sphinx_build_task.env.SPHINX_OUTPUT_FORMAT = cfmt
# the sphinx-build results are in <build + output_format> directory
sphinx_build_task.sphinx_output_directory = self.path.get_bld().make_node(cfmt)
sphinx_build_task.set_outputs(sphinx_build_task.sphinx_output_directory)
sphinx_build_task.sphinx_output_directory.mkdir()
Utils.def_attrs(sphinx_build_task, install_path=getattr(self, 'install_path_' + cfmt, getattr(self, 'install_path', get_install_path(sphinx_build_task))))
def get_install_path(object):
if object.env.SPHINX_OUTPUT_FORMAT == 'man':
return object.env.MANDIR
elif object.env.SPHINX_OUTPUT_FORMAT == 'info':
return object.env.INFODIR
else:
return object.env.DOCDIR
class SphinxBuildingTask(Task.Task):
color = 'BOLD'
run_str = '${SPHINX_BUILD} -M ${SPHINX_OUTPUT_FORMAT} ${SRC} ${TGT} -d ${TGT[0].bld_dir()}/doctrees-${SPHINX_OUTPUT_FORMAT} ${SPHINX_OPTIONS}'
def keyword(self):
return 'Compiling (%s)' % self.env.SPHINX_OUTPUT_FORMAT
def runnable_status(self):
for x in self.run_after:
if not x.hasrun:
return Task.ASK_LATER
self.signature()
ret = Task.Task.runnable_status(self)
if ret == Task.SKIP_ME:
# in case the files were removed
self.add_install()
return ret
def post_run(self):
self.add_install()
return Task.Task.post_run(self)
def add_install(self):
nodes = self.sphinx_output_directory.ant_glob('**/*', quiet=True)
self.outputs += nodes
self.generator.add_install_files(install_to=self.install_path,
install_from=nodes,
postpone=False,
cwd=self.sphinx_output_directory.make_node(self.env.SPHINX_OUTPUT_FORMAT),
relative_trick=True)
| 4,502 | Python | .py | 91 | 41.648352 | 162 | 0.676403 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,611 | fc_fujitsu.py | projecthamster_hamster/waflib/extras/fc_fujitsu.py | #! /usr/bin/env python
# encoding: utf-8
# Detection of the Fujitsu Fortran compiler for ARM64FX
import re
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_fujitsu')
@conf
def find_fujitsu(conf):
fc=conf.find_program(['frtpx'],var='FC')
conf.get_fujitsu_version(fc)
conf.env.FC_NAME='FUJITSU'
conf.env.FC_MOD_CAPITALIZATION='lower'
@conf
def fujitsu_flags(conf):
v=conf.env
v['_FCMODOUTFLAGS']=[]
v['FCFLAGS_DEBUG']=[]
v['FCFLAGS_fcshlib']=[]
v['LINKFLAGS_fcshlib']=[]
v['FCSTLIB_MARKER']=''
v['FCSHLIB_MARKER']=''
@conf
def get_fujitsu_version(conf,fc):
version_re=re.compile(r"frtpx\s*\(FRT\)\s*(?P<major>\d+)\.(?P<minor>\d+)\.",re.I).search
cmd=fc+['--version']
out,err=fc_config.getoutput(conf,cmd,stdin=False)
if out:
match=version_re(out)
else:
match=version_re(err)
if not match:
return(False)
conf.fatal('Could not determine the Fujitsu FRT Fortran compiler version.')
else:
k=match.groupdict()
conf.env['FC_VERSION']=(k['major'],k['minor'])
def configure(conf):
conf.find_fujitsu()
conf.find_program('ar',var='AR')
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
conf.env.ARFLAGS=['rcs']
conf.fc_flags()
conf.fc_add_flags()
conf.fujitsu_flags()
| 1,316 | Python | .py | 47 | 26.06383 | 89 | 0.724684 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,612 | fc_nec.py | projecthamster_hamster/waflib/extras/fc_nec.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
import re
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_nec')
@conf
def find_sxfc(conf):
"""Find the NEC fortran compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['sxf90','sxf03'], var='FC')
conf.get_sxfc_version(fc)
conf.env.FC_NAME = 'NEC'
conf.env.FC_MOD_CAPITALIZATION = 'lower'
@conf
def sxfc_flags(conf):
v = conf.env
v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directory
v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings
v['FCFLAGS_fcshlib'] = []
v['LINKFLAGS_fcshlib'] = []
v['FCSTLIB_MARKER'] = ''
v['FCSHLIB_MARKER'] = ''
@conf
def get_sxfc_version(conf, fc):
version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the NEC Fortran compiler version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])
def configure(conf):
conf.find_sxfc()
conf.find_program('sxar',var='AR')
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
conf.env.ARFLAGS=['rcs']
conf.fc_flags()
conf.fc_add_flags()
conf.sxfc_flags()
| 1,659 | Python | .py | 52 | 29.788462 | 111 | 0.693558 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,613 | use_config.py | projecthamster_hamster/waflib/extras/use_config.py | #!/usr/bin/env python
# coding=utf-8
# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
"""
When a project has a lot of options the 'waf configure' command line can be
very long and it becomes a cause of error.
This tool provides a convenient way to load a set of configuration parameters
from a local file or from a remote url.
The configuration parameters are stored in a Python file that is imported as
an extra waf tool can be.
Example:
$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ...
The file 'myconf1' will be downloaded from 'http://www.anywhere.org'
(or 'http://www.anywhere.org/wafcfg').
If the files are available locally, it could be:
$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ...
The configuration of 'myconf1.py' is automatically loaded by calling
its 'configure' function. In this example, it defines environment variables and
set options:
def configure(self):
self.env['CC'] = 'gcc-4.8'
self.env.append_value('LIBPATH', [...])
self.options.perlbinary = '/usr/local/bin/perl'
self.options.pyc = False
The corresponding command line should have been:
$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl
This is an extra tool, not bundled with the default waf binary.
To add the use_config tool to the waf file:
$ ./waf-light --tools=use_config
When using this tool, the wscript will look like:
def options(opt):
opt.load('use_config')
def configure(conf):
conf.load('use_config')
"""
import sys
import os.path as osp
import os
local_repo = ''
"""Local repository containing additional Waf tools (plugins)"""
remote_repo = 'https://gitlab.com/ita1024/waf/raw/master/'
"""
Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
$ waf configure --download
"""
remote_locs = ['waflib/extras', 'waflib/Tools']
"""
Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo`
"""
try:
from urllib import request
except ImportError:
from urllib import urlopen
else:
urlopen = request.urlopen
from waflib import Errors, Context, Logs, Utils, Options, Configure
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
DEFAULT_DIR = 'wafcfg'
# add first the current wafcfg subdirectory
sys.path.append(osp.abspath(DEFAULT_DIR))
def options(self):
group = self.add_option_group('configure options')
group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
group.add_option('--use-config', action='store', default=None,
metavar='CFG', dest='use_config',
help='force the configuration parameters by importing '
'CFG.py. Several modules may be provided (comma '
'separated).')
group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR,
metavar='CFG_DIR', dest='use_config_dir',
help='path or url where to find the configuration file')
def download_check(node):
"""
Hook to check for the tools which are downloaded. Replace with your function if necessary.
"""
pass
def download_tool(tool, force=False, ctx=None):
"""
Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`::
$ waf configure --download
"""
for x in Utils.to_list(remote_repo):
for sub in Utils.to_list(remote_locs):
url = '/'.join((x, sub, tool + '.py'))
try:
web = urlopen(url)
try:
if web.getcode() != 200:
continue
except AttributeError:
pass
except Exception:
# on python3 urlopen throws an exception
# python 2.3 does not have getcode and throws an exception to fail
continue
else:
tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
tmp.write(web.read(), 'wb')
Logs.warn('Downloaded %s from %s', tool, url)
download_check(tmp)
try:
module = Context.load_tool(tool)
except Exception:
Logs.warn('The tool %s from %s is unusable', tool, url)
try:
tmp.delete()
except Exception:
pass
continue
return module
raise Errors.WafError('Could not load the Waf tool')
def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
try:
module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
except ImportError as e:
if not ctx or not hasattr(Options.options, 'download'):
Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool)
raise
if Options.options.download:
module = download_tool(tool, ctx=ctx)
if not module:
ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
else:
ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
return module
Context.load_tool_default = Context.load_tool
Context.load_tool = load_tool
Configure.download_tool = download_tool
def configure(self):
opts = self.options
use_cfg = opts.use_config
if use_cfg is None:
return
url = urlparse(opts.use_config_dir)
kwargs = {}
if url.scheme:
kwargs['download'] = True
kwargs['remote_url'] = url.geturl()
# search first with the exact url, else try with +'/wafcfg'
kwargs['remote_locs'] = ['', DEFAULT_DIR]
tooldir = url.geturl() + ' ' + DEFAULT_DIR
for cfg in use_cfg.split(','):
Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg)
self.load(cfg, tooldir=tooldir, **kwargs)
self.start_msg('Checking for configuration')
self.end_msg(use_cfg)
| 5,657 | Python | .py | 149 | 35.087248 | 142 | 0.726791 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,614 | run_m_script.py | projecthamster_hamster/waflib/extras/run_m_script.py | #!/usr/bin/env python
# encoding: utf-8
# Hans-Martin von Gaudecker, 2012
"""
Run a Matlab script.
Note that the script is run in the directory where it lives -- Matlab won't
allow it any other way.
For error-catching purposes, keep an own log-file that is destroyed if the
task finished without error. If not, it will show up as mscript_[index].log
in the bldnode directory.
Usage::
ctx(features='run_m_script',
source='some_script.m',
target=['some_table.tex', 'some_figure.eps'],
deps='some_data.mat')
"""
import os, sys
from waflib import Task, TaskGen, Logs
MATLAB_COMMANDS = ['matlab']
def configure(ctx):
ctx.find_program(MATLAB_COMMANDS, var='MATLABCMD', errmsg = """\n
No Matlab executable found!\n\n
If Matlab is needed:\n
1) Check the settings of your system path.
2) Note we are looking for Matlab executables called: %s
If yours has a different name, please report to hmgaudecker [at] gmail\n
Else:\n
Do not load the 'run_m_script' tool in the main wscript.\n\n""" % MATLAB_COMMANDS)
ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize'
class run_m_script_base(Task.Task):
"""Run a Matlab script."""
run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"'
shell = True
class run_m_script(run_m_script_base):
"""Erase the Matlab overall log file if everything went okay, else raise an
error and print its 10 last lines.
"""
def run(self):
ret = run_m_script_base.run(self)
logfile = self.env.LOGFILEPATH
if ret:
mode = 'r'
if sys.version_info.major >= 3:
mode = 'rb'
with open(logfile, mode=mode) as f:
tail = f.readlines()[-10:]
Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
self.inputs[0], ret, logfile, '\n'.join(tail))
else:
os.remove(logfile)
return ret
@TaskGen.feature('run_m_script')
@TaskGen.before_method('process_source')
def apply_run_m_script(tg):
"""Task generator customising the options etc. to call Matlab in batch
mode for running a m-script.
"""
# Convert sources and targets to nodes
src_node = tg.path.find_resource(tg.source)
tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
tsk = tg.create_task('run_m_script', src=src_node, tgt=tgt_nodes)
tsk.cwd = src_node.parent.abspath()
tsk.env.MSCRIPTTRUNK = os.path.splitext(src_node.name)[0]
tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (tsk.env.MSCRIPTTRUNK, tg.idx))
# dependencies (if the attribute 'deps' changes, trigger a recompilation)
for x in tg.to_list(getattr(tg, 'deps', [])):
node = tg.path.find_resource(x)
if not node:
tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
tsk.dep_nodes.append(node)
Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
# Bypass the execution of process_source by setting the source to an empty list
tg.source = []
| 3,066 | Python | .py | 73 | 39.150685 | 151 | 0.708529 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,615 | pch.py | projecthamster_hamster/waflib/extras/pch.py | #! /usr/bin/env python
# encoding: utf-8
# Alexander Afanasyev (UCLA), 2014
"""
Enable precompiled C++ header support (currently only clang++ and g++ are supported)
To use this tool, wscript should look like:
def options(opt):
opt.load('pch')
# This will add `--with-pch` configure option.
# Unless --with-pch during configure stage specified, the precompiled header support is disabled
def configure(conf):
conf.load('pch')
# this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
# Unless conf.env.WITH_PCH is set, the precompiled header support is disabled
def build(bld):
bld(features='cxx pch',
target='precompiled-headers',
name='precompiled-headers',
headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`
# Other parameters to compile precompiled headers
# includes=...,
# export_includes=...,
# use=...,
# ...
# Exported parameters will be propagated even if precompiled headers are disabled
)
bld(
target='test',
features='cxx cxxprogram',
source='a.cpp b.cpp d.cpp main.cpp',
use='precompiled-headers',
)
# or
bld(
target='test',
features='pch cxx cxxprogram',
source='a.cpp b.cpp d.cpp main.cpp',
headers='a.h b.h c.h',
)
Note that precompiled header must have multiple inclusion guards. If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
"""
import os
from waflib import Task, TaskGen, Utils
from waflib.Tools import c_preproc, cxx
PCH_COMPILER_OPTIONS = {
'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
'g++': [['-include'], '.gch', ['-x', 'c++-header']],
}
def options(opt):
opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')
def configure(conf):
if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
conf.env.WITH_PCH = True
flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
conf.env.CXXPCH_F = flags[0]
conf.env.CXXPCH_EXT = flags[1]
conf.env.CXXPCH_FLAGS = flags[2]
@TaskGen.feature('pch')
@TaskGen.before('process_source')
def apply_pch(self):
if not self.env.WITH_PCH:
return
if getattr(self.bld, 'pch_tasks', None) is None:
self.bld.pch_tasks = {}
if getattr(self, 'headers', None) is None:
return
self.headers = self.to_nodes(self.headers)
if getattr(self, 'name', None):
try:
task = self.bld.pch_tasks[self.name]
self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx))
except KeyError:
pass
out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
out = self.path.find_or_declare(out)
task = self.create_task('gchx', self.headers, out)
# target should be an absolute path of `out`, but without precompiled header extension
task.target = out.abspath()[:-len(out.suffix())]
self.pch_task = task
if getattr(self, 'name', None):
self.bld.pch_tasks[self.name] = task
@TaskGen.feature('cxx')
@TaskGen.after_method('process_source', 'propagate_uselib_vars')
def add_pch(self):
if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
return
pch = None
# find pch task, if any
if getattr(self, 'pch_task', None):
pch = self.pch_task
else:
for use in Utils.to_list(self.use):
try:
pch = self.bld.pch_tasks[use]
except KeyError:
pass
if pch:
for x in self.compiled_tasks:
x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
class gchx(Task.Task):
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
scan = c_preproc.scan
color = 'BLUE'
ext_out=['.h']
def runnable_status(self):
try:
node_deps = self.generator.bld.node_deps[self.uid()]
except KeyError:
node_deps = []
ret = Task.Task.runnable_status(self)
if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
t = os.stat(self.outputs[0].abspath()).st_mtime
for n in self.inputs + node_deps:
if os.stat(n.abspath()).st_mtime > t:
return Task.RUN_ME
return ret
| 4,384 | Python | .py | 116 | 34.844828 | 234 | 0.693107 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,616 | netcache_client.py | projecthamster_hamster/waflib/extras/netcache_client.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011-2015 (ita)
"""
A client for the network cache (playground/netcache/). Launch the server with:
./netcache_server, then use it for the builds by adding the following:
def build(bld):
bld.load('netcache_client')
The parameters should be present in the environment in the form:
NETCACHE=host:port waf configure build
Or in a more detailed way:
NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build
where:
host: host where the server resides, by default localhost
port: by default push on 11001 and pull on 12001
Use the server provided in playground/netcache/Netcache.java
"""
import os, socket, time, atexit, sys
from waflib import Task, Logs, Utils, Build, Runner
from waflib.Configure import conf
BUF = 8192 * 16
HEADER_SIZE = 128
MODES = ['PUSH', 'PULL', 'PUSH_PULL']
STALE_TIME = 30 # seconds
GET = 'GET'
PUT = 'PUT'
LST = 'LST'
BYE = 'BYE'
all_sigs_in_cache = (0.0, [])
def put_data(conn, data):
if sys.hexversion > 0x3000000:
data = data.encode('latin-1')
cnt = 0
while cnt < len(data):
sent = conn.send(data[cnt:])
if sent == 0:
raise RuntimeError('connection ended')
cnt += sent
push_connections = Runner.Queue(0)
pull_connections = Runner.Queue(0)
def get_connection(push=False):
# return a new connection... do not forget to release it!
try:
if push:
ret = push_connections.get(block=False)
else:
ret = pull_connections.get(block=False)
except Exception:
ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if push:
ret.connect(Task.push_addr)
else:
ret.connect(Task.pull_addr)
return ret
def release_connection(conn, msg='', push=False):
if conn:
if push:
push_connections.put(conn)
else:
pull_connections.put(conn)
def close_connection(conn, msg=''):
if conn:
data = '%s,%s' % (BYE, msg)
try:
put_data(conn, data.ljust(HEADER_SIZE))
except:
pass
try:
conn.close()
except:
pass
def close_all():
for q in (push_connections, pull_connections):
while q.qsize():
conn = q.get()
try:
close_connection(conn)
except:
# ignore errors when cleaning up
pass
atexit.register(close_all)
def read_header(conn):
cnt = 0
buf = []
while cnt < HEADER_SIZE:
data = conn.recv(HEADER_SIZE - cnt)
if not data:
#import traceback
#traceback.print_stack()
raise ValueError('connection ended when reading a header %r' % buf)
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
ret = ''.encode('latin-1').join(buf)
ret = ret.decode('latin-1')
else:
ret = ''.join(buf)
return ret
def check_cache(conn, ssig):
"""
List the files on the server, this is an optimization because it assumes that
concurrent builds are rare
"""
global all_sigs_in_cache
if not STALE_TIME:
return
if time.time() - all_sigs_in_cache[0] > STALE_TIME:
params = (LST,'')
put_data(conn, ','.join(params).ljust(HEADER_SIZE))
# read what is coming back
ret = read_header(conn)
size = int(ret.split(',')[0])
buf = []
cnt = 0
while cnt < size:
data = conn.recv(min(BUF, size-cnt))
if not data:
raise ValueError('connection ended %r %r' % (cnt, size))
buf.append(data)
cnt += len(data)
if sys.hexversion > 0x3000000:
ret = ''.encode('latin-1').join(buf)
ret = ret.decode('latin-1')
else:
ret = ''.join(buf)
all_sigs_in_cache = (time.time(), ret.splitlines())
Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1]))
if not ssig in all_sigs_in_cache[1]:
raise ValueError('no file %s in cache' % ssig)
class MissingFile(Exception):
pass
def recv_file(conn, ssig, count, p):
check_cache(conn, ssig)
params = (GET, ssig, str(count))
put_data(conn, ','.join(params).ljust(HEADER_SIZE))
data = read_header(conn)
size = int(data.split(',')[0])
if size == -1:
raise MissingFile('no file %s - %s in cache' % (ssig, count))
# get the file, writing immediately
# TODO a tmp file would be better
f = open(p, 'wb')
cnt = 0
while cnt < size:
data = conn.recv(min(BUF, size-cnt))
if not data:
raise ValueError('connection ended %r %r' % (cnt, size))
f.write(data)
cnt += len(data)
f.close()
def sock_send(conn, ssig, cnt, p):
#print "pushing %r %r %r" % (ssig, cnt, p)
size = os.stat(p).st_size
params = (PUT, ssig, str(cnt), str(size))
put_data(conn, ','.join(params).ljust(HEADER_SIZE))
f = open(p, 'rb')
cnt = 0
while cnt < size:
r = f.read(min(BUF, size-cnt))
while r:
k = conn.send(r)
if not k:
raise ValueError('connection ended')
cnt += k
r = r[k:]
def can_retrieve_cache(self):
if not Task.pull_addr:
return False
if not self.outputs:
return False
self.cached = False
cnt = 0
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)
conn = None
err = False
try:
try:
conn = get_connection()
for node in self.outputs:
p = node.abspath()
recv_file(conn, ssig, cnt, p)
cnt += 1
except MissingFile as e:
Logs.debug('netcache: file is not in the cache %r', e)
err = True
except Exception as e:
Logs.debug('netcache: could not get the files %r', self.outputs)
if Logs.verbose > 1:
Logs.debug('netcache: exception %r', e)
err = True
# broken connection? remove this one
close_connection(conn)
conn = None
else:
Logs.debug('netcache: obtained %r from cache', self.outputs)
finally:
release_connection(conn)
if err:
return False
self.cached = True
return True
@Utils.run_once
def put_files_cache(self):
if not Task.push_addr:
return
if not self.outputs:
return
if getattr(self, 'cached', None):
return
#print "called put_files_cache", id(self)
bld = self.generator.bld
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)
conn = None
cnt = 0
try:
for node in self.outputs:
# We could re-create the signature of the task with the signature of the outputs
# in practice, this means hashing the output files
# this is unnecessary
try:
if not conn:
conn = get_connection(push=True)
sock_send(conn, ssig, cnt, node.abspath())
Logs.debug('netcache: sent %r', node)
except Exception as e:
Logs.debug('netcache: could not push the files %r', e)
# broken connection? remove this one
close_connection(conn)
conn = None
cnt += 1
finally:
release_connection(conn, push=True)
bld.task_sigs[self.uid()] = self.cache_sig
def hash_env_vars(self, env, vars_lst):
# reimplement so that the resulting hash does not depend on local paths
if not env.table:
env = env.parent
if not env:
return Utils.SIG_NIL
idx = str(id(env)) + str(vars_lst)
try:
cache = self.cache_env
except AttributeError:
cache = self.cache_env = {}
else:
try:
return self.cache_env[idx]
except KeyError:
pass
v = str([env[a] for a in vars_lst])
v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
m = Utils.md5()
m.update(v.encode())
ret = m.digest()
Logs.debug('envhash: %r %r', ret, v)
cache[idx] = ret
return ret
def uid(self):
# reimplement so that the signature does not depend on local paths
try:
return self.uid_
except AttributeError:
m = Utils.md5()
src = self.generator.bld.srcnode
up = m.update
up(self.__class__.__name__.encode())
for x in self.inputs + self.outputs:
up(x.path_from(src).encode())
self.uid_ = m.digest()
return self.uid_
def make_cached(cls):
if getattr(cls, 'nocache', None):
return
m1 = cls.run
def run(self):
if getattr(self, 'nocache', False):
return m1(self)
if self.can_retrieve_cache():
return 0
return m1(self)
cls.run = run
m2 = cls.post_run
def post_run(self):
if getattr(self, 'nocache', False):
return m2(self)
bld = self.generator.bld
ret = m2(self)
if bld.cache_global:
self.put_files_cache()
if hasattr(self, 'chmod'):
for node in self.outputs:
os.chmod(node.abspath(), self.chmod)
return ret
cls.post_run = post_run
@conf
def setup_netcache(ctx, push_addr, pull_addr):
Task.Task.can_retrieve_cache = can_retrieve_cache
Task.Task.put_files_cache = put_files_cache
Task.Task.uid = uid
Task.push_addr = push_addr
Task.pull_addr = pull_addr
Build.BuildContext.hash_env_vars = hash_env_vars
ctx.cache_global = True
for x in Task.classes.values():
make_cached(x)
def build(bld):
if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ:
Logs.warn('Setting NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001')
os.environ['NETCACHE_PULL'] = '127.0.0.1:12001'
os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001'
if 'NETCACHE' in os.environ:
if not 'NETCACHE_PUSH' in os.environ:
os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE']
if not 'NETCACHE_PULL' in os.environ:
os.environ['NETCACHE_PULL'] = os.environ['NETCACHE']
v = os.environ['NETCACHE_PULL']
if v:
h, p = v.split(':')
pull_addr = (h, int(p))
else:
pull_addr = None
v = os.environ['NETCACHE_PUSH']
if v:
h, p = v.split(':')
push_addr = (h, int(p))
else:
push_addr = None
setup_netcache(bld, push_addr, pull_addr)
| 9,095 | Python | .py | 330 | 24.715152 | 109 | 0.690867 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,617 | resx.py | projecthamster_hamster/waflib/extras/resx.py | #! /usr/bin/env python
# encoding: utf-8
import os
from waflib import Task
from waflib.TaskGen import extension
def configure(conf):
conf.find_program(['resgen'], var='RESGEN')
conf.env.RESGENFLAGS = '/useSourcePath'
@extension('.resx')
def resx_file(self, node):
"""
Bind the .resx extension to a resgen task
"""
if not getattr(self, 'cs_task', None):
self.bld.fatal('resx_file has no link task for use %r' % self)
# Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources'
assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0])
res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.')
out = self.path.find_or_declare(assembly + '.' + res + '.resources')
tsk = self.create_task('resgen', node, out)
self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
self.env.append_value('RESOURCES', tsk.outputs[0].bldpath())
class resgen(Task.Task):
"""
Compile C# resource files
"""
color = 'YELLOW'
run_str = '${RESGEN} ${RESGENFLAGS} ${SRC} ${TGT}'
| 1,050 | Python | .py | 28 | 35.535714 | 90 | 0.698522 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,618 | classic_runner.py | projecthamster_hamster/waflib/extras/classic_runner.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2021 (ita)
from waflib import Utils, Runner
"""
Re-enable the classic threading system from waf 1.x
def configure(conf):
conf.load('classic_runner')
"""
class TaskConsumer(Utils.threading.Thread):
"""
Task consumers belong to a pool of workers
They wait for tasks in the queue and then use ``task.process(...)``
"""
def __init__(self, spawner):
Utils.threading.Thread.__init__(self)
"""
Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
"""
self.spawner = spawner
self.daemon = True
self.start()
def run(self):
"""
Loop over the tasks to execute
"""
try:
self.loop()
except Exception:
pass
def loop(self):
"""
Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
:py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
"""
master = self.spawner.master
while 1:
if not master.stop:
try:
tsk = master.ready.get()
if tsk:
tsk.log_display(tsk.generator.bld)
master.process_task(tsk)
else:
break
finally:
master.out.put(tsk)
class Spawner(object):
"""
Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
:py:class:`waflib.Task.Task` instance.
"""
def __init__(self, master):
self.master = master
""":py:class:`waflib.Runner.Parallel` producer instance"""
self.pool = [TaskConsumer(self) for i in range(master.numjobs)]
Runner.Spawner = Spawner
| 1,581 | Python | .py | 58 | 24.172414 | 87 | 0.707204 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,619 | cfg_altoptions.py | projecthamster_hamster/waflib/extras/cfg_altoptions.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to extend c_config.check_cfg()
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"
"""
This tool allows to work around the absence of ``*-config`` programs
on systems, by keeping the same clean configuration syntax but inferring
values or permitting their modification via the options interface.
Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
so you can put custom files in a folder containing new .pc files.
This tool could also be implemented by taking advantage of this fact.
Usage::
def options(opt):
opt.load('c_config_alt')
opt.add_package_option('package')
def configure(cfg):
conf.load('c_config_alt')
conf.check_cfg(...)
Known issues:
- Behavior with different build contexts...
"""
import os
import functools
from waflib import Configure, Options, Errors
def name_to_dest(x):
return x.lower().replace('-', '_')
def options(opt):
def x(opt, param):
dest = name_to_dest(param)
gr = opt.get_option_group("configure options")
gr.add_option('--%s-root' % dest,
help="path containing include and lib subfolders for %s" \
% param,
)
opt.add_package_option = functools.partial(x, opt)
check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')
@Configure.conf
def check_cfg(conf, *k, **kw):
if k:
lst = k[0].split()
kw['package'] = lst[0]
kw['args'] = ' '.join(lst[1:])
if not 'package' in kw:
return check_cfg_old(conf, **kw)
package = kw['package']
package_lo = name_to_dest(package)
package_hi = package.upper().replace('-', '_') # TODO FIXME
package_hi = kw.get('uselib_store', package_hi)
def check_folder(path, name):
try:
assert os.path.isdir(path)
except AssertionError:
raise Errors.ConfigurationError(
"%s_%s (%s) is not a folder!" \
% (package_lo, name, path))
return path
root = getattr(Options.options, '%s_root' % package_lo, None)
if root is None:
return check_cfg_old(conf, **kw)
else:
def add_manual_var(k, v):
conf.start_msg('Adding for %s a manual var' % (package))
conf.env["%s_%s" % (k, package_hi)] = v
conf.end_msg("%s = %s" % (k, v))
check_folder(root, 'root')
pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
add_manual_var('INCLUDES', [pkg_inc])
pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
add_manual_var('LIBPATH', [pkg_lib])
add_manual_var('LIB', [package])
for x in kw.get('manual_deps', []):
for k, v in sorted(conf.env.get_merged_dict().items()):
if k.endswith('_%s' % x):
k = k.replace('_%s' % x, '')
conf.start_msg('Adding for %s a manual dep' \
%(package))
conf.env["%s_%s" % (k, package_hi)] += v
conf.end_msg('%s += %s' % (k, v))
return True
| 2,825 | Python | .py | 80 | 32.0875 | 72 | 0.668388 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,620 | biber.py | projecthamster_hamster/waflib/extras/biber.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
"""
Latex processing using "biber"
"""
import os
from waflib import Task, Logs
from waflib.Tools import tex as texmodule
class tex(texmodule.tex):
biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
biber_fun.__doc__ = """
Execute the program **biber**
"""
def bibfile(self):
return None
def bibunits(self):
self.env.env = {}
self.env.env.update(os.environ)
self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
self.env.SRCFILE = self.aux_nodes[0].name[:-4]
if not self.env['PROMPT_LATEX']:
self.env.append_unique('BIBERFLAGS', '--quiet')
path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
if os.path.isfile(path):
Logs.warn('calling biber')
self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
else:
super(tex, self).bibfile()
super(tex, self).bibunits()
class latex(tex):
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
class pdflatex(tex):
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
class xelatex(tex):
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
def configure(self):
"""
Almost the same as in tex.py, but try to detect 'biber'
"""
v = self.env
for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
try:
self.find_program(p, var=p.upper())
except self.errors.ConfigurationError:
pass
v['DVIPSFLAGS'] = '-Ppdf'
| 1,629 | Python | .py | 47 | 32.191489 | 112 | 0.700191 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,621 | softlink_libs.py | projecthamster_hamster/waflib/extras/softlink_libs.py | #! /usr/bin/env python
# per rosengren 2011
from waflib.TaskGen import feature, after_method
from waflib.Task import Task, always_run
from os.path import basename, isabs
from os import tmpfile, linesep
def options(opt):
grp = opt.add_option_group('Softlink Libraries Options')
grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]')
def configure(cnf):
cnf.find_program('ldd')
if not cnf.env.SOFTLINK_EXCLUDE:
cnf.env.SOFTLINK_EXCLUDE = cnf.options.exclude.split(',')
@feature('softlink_libs')
@after_method('process_rule')
def add_finder(self):
tgt = self.path.find_or_declare(self.target)
self.create_task('sll_finder', tgt=tgt)
self.create_task('sll_installer', tgt=tgt)
always_run(sll_installer)
class sll_finder(Task):
ext_out = 'softlink_libs'
def run(self):
bld = self.generator.bld
linked=[]
target_paths = []
for g in bld.groups:
for tgen in g:
# FIXME it might be better to check if there is a link_task (getattr?)
target_paths += [tgen.path.get_bld().bldpath()]
linked += [t.outputs[0].bldpath()
for t in getattr(tgen, 'tasks', [])
if t.__class__.__name__ in
['cprogram', 'cshlib', 'cxxprogram', 'cxxshlib']]
lib_list = []
if len(linked):
cmd = [self.env.LDD] + linked
# FIXME add DYLD_LIBRARY_PATH+PATH for osx+win32
ldd_env = {'LD_LIBRARY_PATH': ':'.join(target_paths + self.env.LIBPATH)}
# FIXME the with syntax will not work in python 2
with tmpfile() as result:
self.exec_command(cmd, env=ldd_env, stdout=result)
result.seek(0)
for line in result.readlines():
words = line.split()
if len(words) < 3 or words[1] != '=>':
continue
lib = words[2]
if lib == 'not':
continue
if any([lib.startswith(p) for p in
[bld.bldnode.abspath(), '('] +
self.env.SOFTLINK_EXCLUDE]):
continue
if not isabs(lib):
continue
lib_list.append(lib)
lib_list = sorted(set(lib_list))
self.outputs[0].write(linesep.join(lib_list + self.env.DYNAMIC_LIBS))
return 0
class sll_installer(Task):
ext_in = 'softlink_libs'
def run(self):
tgt = self.outputs[0]
self.generator.bld.install_files('${LIBDIR}', tgt, postpone=False)
lib_list=tgt.read().split()
for lib in lib_list:
self.generator.bld.symlink_as('${LIBDIR}/'+basename(lib), lib, postpone=False)
return 0
| 2,389 | Python | .py | 69 | 30.971014 | 118 | 0.683096 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,622 | clang_cross.py | projecthamster_hamster/waflib/extras/clang_cross.py | #!/usr/bin/env python
# encoding: utf-8
# Krzysztof Kosiński 2014
# DragoonX6 2018
"""
Detect the Clang C compiler
This version is an attempt at supporting the -target and -sysroot flag of Clang.
"""
from waflib.Tools import ccroot, ar, gcc
from waflib.Configure import conf
import waflib.Context
import waflib.extras.clang_cross_common
def options(opt):
"""
Target triplet for clang::
$ waf configure --clang-target-triple=x86_64-pc-linux-gnu
"""
cc_compiler_opts = opt.add_option_group('Configuration options')
cc_compiler_opts.add_option('--clang-target-triple', default=None,
help='Target triple for clang',
dest='clang_target_triple')
cc_compiler_opts.add_option('--clang-sysroot', default=None,
help='Sysroot for clang',
dest='clang_sysroot')
@conf
def find_clang(conf):
"""
Finds the program clang and executes it to ensure it really is clang
"""
import os
cc = conf.find_program('clang', var='CC')
if conf.options.clang_target_triple != None:
conf.env.append_value('CC', ['-target', conf.options.clang_target_triple])
if conf.options.clang_sysroot != None:
sysroot = str()
if os.path.isabs(conf.options.clang_sysroot):
sysroot = conf.options.clang_sysroot
else:
sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clang_sysroot))
conf.env.append_value('CC', ['--sysroot', sysroot])
conf.get_cc_version(cc, clang=True)
conf.env.CC_NAME = 'clang'
@conf
def clang_modifier_x86_64_w64_mingw32(conf):
conf.gcc_modifier_win32()
@conf
def clang_modifier_i386_w64_mingw32(conf):
conf.gcc_modifier_win32()
@conf
def clang_modifier_x86_64_windows_msvc(conf):
conf.clang_modifier_msvc()
# Allow the user to override any flags if they so desire.
clang_modifier_user_func = getattr(conf, 'clang_modifier_x86_64_windows_msvc_user', None)
if clang_modifier_user_func:
clang_modifier_user_func()
@conf
def clang_modifier_i386_windows_msvc(conf):
conf.clang_modifier_msvc()
# Allow the user to override any flags if they so desire.
clang_modifier_user_func = getattr(conf, 'clang_modifier_i386_windows_msvc_user', None)
if clang_modifier_user_func:
clang_modifier_user_func()
def configure(conf):
conf.find_clang()
conf.find_program(['llvm-ar', 'ar'], var='AR')
conf.find_ar()
conf.gcc_common_flags()
# Allow the user to provide flags for the target platform.
conf.gcc_modifier_platform()
# And allow more fine grained control based on the compiler's triplet.
conf.clang_modifier_target_triple()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| 2,550 | Python | .py | 74 | 32.310811 | 90 | 0.753458 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,623 | fsc.py | projecthamster_hamster/waflib/extras/fsc.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
"""
Experimental F# stuff
FSC="mono /path/to/fsc.exe" waf configure build
"""
from waflib import Utils, Task
from waflib.TaskGen import before_method, after_method, feature
from waflib.Tools import ccroot, cs
ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
@feature('fs')
@before_method('process_source')
def apply_fsc(self):
cs_nodes = []
no_nodes = []
for x in self.to_nodes(self.source):
if x.name.endswith('.fs'):
cs_nodes.append(x)
else:
no_nodes.append(x)
self.source = no_nodes
bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe')
self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen))
tsk.env.CSTYPE = '/target:%s' % bintype
tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
if inst_to:
# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
feature('fs')(cs.use_cs)
after_method('apply_fsc')(cs.use_cs)
feature('fs')(cs.debug_cs)
after_method('apply_fsc', 'use_cs')(cs.debug_cs)
class fsc(Task.Task):
"""
Compile F# files
"""
color = 'YELLOW'
run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
def configure(conf):
"""
Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc)
"""
conf.find_program(['fsc.exe', 'fsharpc'], var='FSC')
conf.env.ASS_ST = '/r:%s'
conf.env.RES_ST = '/resource:%s'
conf.env.FS_NAME = 'fsc'
if str(conf.env.FSC).lower().find('fsharpc') > -1:
conf.env.FS_NAME = 'mono'
| 1,909 | Python | .py | 51 | 35.372549 | 113 | 0.686721 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,624 | compat15.py | projecthamster_hamster/waflib/extras/compat15.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)
"""
This file is provided to enable compatibility with waf 1.5
It was enabled by default in waf 1.6, but it is not used in waf 1.7
"""
import sys
from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context
# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure"
sys.modules['Environment'] = ConfigSet
ConfigSet.Environment = ConfigSet.ConfigSet
sys.modules['Logs'] = Logs
sys.modules['Options'] = Options
sys.modules['Scripting'] = Scripting
sys.modules['Task'] = Task
sys.modules['Build'] = Build
sys.modules['Configure'] = Configure
sys.modules['Node'] = Node
sys.modules['Runner'] = Runner
sys.modules['TaskGen'] = TaskGen
sys.modules['Utils'] = Utils
sys.modules['Constants'] = Context
Context.SRCDIR = ''
Context.BLDDIR = ''
from waflib.Tools import c_preproc
sys.modules['preproc'] = c_preproc
from waflib.Tools import c_config
sys.modules['config_c'] = c_config
ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
ConfigSet.ConfigSet.set_variant = Utils.nada
Utils.pproc = Utils.subprocess
Build.BuildContext.add_subdirs = Build.BuildContext.recurse
Build.BuildContext.new_task_gen = Build.BuildContext.__call__
Build.BuildContext.is_install = 0
Node.Node.relpath_gen = Node.Node.path_from
Utils.pproc = Utils.subprocess
Utils.get_term_cols = Logs.get_term_cols
def cmd_output(cmd, **kw):
silent = False
if 'silent' in kw:
silent = kw['silent']
del(kw['silent'])
if 'e' in kw:
tmp = kw['e']
del(kw['e'])
kw['env'] = tmp
kw['shell'] = isinstance(cmd, str)
kw['stdout'] = Utils.subprocess.PIPE
if silent:
kw['stderr'] = Utils.subprocess.PIPE
try:
p = Utils.subprocess.Popen(cmd, **kw)
output = p.communicate()[0]
except OSError as e:
raise ValueError(str(e))
if p.returncode:
if not silent:
msg = "command execution failed: %s -> %r" % (cmd, str(output))
raise ValueError(msg)
output = ''
return output
Utils.cmd_output = cmd_output
def name_to_obj(self, s, env=None):
if Logs.verbose:
Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
return self.get_tgen_by_name(s)
Build.BuildContext.name_to_obj = name_to_obj
def env_of_name(self, name):
try:
return self.all_envs[name]
except KeyError:
Logs.error('no such environment: '+name)
return None
Build.BuildContext.env_of_name = env_of_name
def set_env_name(self, name, env):
self.all_envs[name] = env
return env
Configure.ConfigurationContext.set_env_name = set_env_name
def retrieve(self, name, fromenv=None):
try:
env = self.all_envs[name]
except KeyError:
env = ConfigSet.ConfigSet()
self.prepare_env(env)
self.all_envs[name] = env
else:
if fromenv:
Logs.warn('The environment %s may have been configured already', name)
return env
Configure.ConfigurationContext.retrieve = retrieve
Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
Configure.conftest = Configure.conf
Configure.ConfigurationError = Errors.ConfigurationError
Utils.WafError = Errors.WafError
Options.OptionsContext.sub_options = Options.OptionsContext.recurse
Options.OptionsContext.tool_options = Context.Context.load
Options.Handler = Options.OptionsContext
Task.simple_task_type = Task.task_type_from_func = Task.task_factory
Task.Task.classes = Task.classes
def setitem(self, key, value):
if key.startswith('CCFLAGS'):
key = key[1:]
self.table[key] = value
ConfigSet.ConfigSet.__setitem__ = setitem
@TaskGen.feature('d')
@TaskGen.before('apply_incpaths')
def old_importpaths(self):
if getattr(self, 'importpaths', []):
self.includes = self.importpaths
from waflib import Context
eld = Context.load_tool
def load_tool(*k, **kw):
ret = eld(*k, **kw)
if 'set_options' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "set_options" to options')
ret.options = ret.set_options
if 'detect' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "detect" to "configure"')
ret.configure = ret.detect
return ret
Context.load_tool = load_tool
def get_curdir(self):
return self.path.abspath()
Context.Context.curdir = property(get_curdir, Utils.nada)
def get_srcdir(self):
return self.srcnode.abspath()
Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada)
def get_blddir(self):
return self.bldnode.abspath()
Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada)
Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg
Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg
rev = Context.load_module
def load_module(path, encoding=None):
ret = rev(path, encoding)
if 'set_options' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "set_options" to "options" (%r)', path)
ret.options = ret.set_options
if 'srcdir' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "srcdir" to "top" (%r)', path)
ret.top = ret.srcdir
if 'blddir' in ret.__dict__:
if Logs.verbose:
Logs.warn('compat: rename "blddir" to "out" (%r)', path)
ret.out = ret.blddir
Utils.g_module = Context.g_module
Options.launch_dir = Context.launch_dir
return ret
Context.load_module = load_module
old_post = TaskGen.task_gen.post
def post(self):
self.features = self.to_list(self.features)
if 'cc' in self.features:
if Logs.verbose:
Logs.warn('compat: the feature cc does not exist anymore (use "c")')
self.features.remove('cc')
self.features.append('c')
if 'cstaticlib' in self.features:
if Logs.verbose:
Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
self.features.remove('cstaticlib')
self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
if getattr(self, 'ccflags', None):
if Logs.verbose:
Logs.warn('compat: "ccflags" was renamed to "cflags"')
self.cflags = self.ccflags
return old_post(self)
TaskGen.task_gen.post = post
def waf_version(*k, **kw):
Logs.warn('wrong version (waf_version was removed in waf 1.6)')
Utils.waf_version = waf_version
import os
@TaskGen.feature('c', 'cxx', 'd')
@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
@TaskGen.after('apply_link', 'process_source')
def apply_uselib_local(self):
"""
process the uselib_local attribute
execute after apply_link because of the execution order set on 'link_task'
"""
env = self.env
from waflib.Tools.ccroot import stlink_task
# 1. the case of the libs defined in the project (visit ancestors first)
# the ancestors external libraries (uselib) will be prepended
self.uselib = self.to_list(getattr(self, 'uselib', []))
self.includes = self.to_list(getattr(self, 'includes', []))
names = self.to_list(getattr(self, 'uselib_local', []))
get = self.bld.get_tgen_by_name
seen = set()
seen_uselib = set()
tmp = Utils.deque(names) # consume a copy of the list of names
if tmp:
if Logs.verbose:
Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
while tmp:
lib_name = tmp.popleft()
# visit dependencies only once
if lib_name in seen:
continue
y = get(lib_name)
y.post()
seen.add(lib_name)
# object has ancestors to process (shared libraries): add them to the end of the list
if getattr(y, 'uselib_local', None):
for x in self.to_list(getattr(y, 'uselib_local', [])):
obj = get(x)
obj.post()
if getattr(obj, 'link_task', None):
if not isinstance(obj.link_task, stlink_task):
tmp.append(x)
# link task and flags
if getattr(y, 'link_task', None):
link_name = y.target[y.target.rfind(os.sep) + 1:]
if isinstance(y.link_task, stlink_task):
env.append_value('STLIB', [link_name])
else:
# some linkers can link against programs
env.append_value('LIB', [link_name])
# the order
self.link_task.set_run_after(y.link_task)
# for the recompilation
self.link_task.dep_nodes += y.link_task.outputs
# add the link path too
tmp_path = y.link_task.outputs[0].parent.bldpath()
if not tmp_path in env['LIBPATH']:
env.prepend_value('LIBPATH', [tmp_path])
# add ancestors uselib too - but only propagate those that have no staticlib defined
for v in self.to_list(getattr(y, 'uselib', [])):
if v not in seen_uselib:
seen_uselib.add(v)
if not env['STLIB_' + v]:
if not v in self.uselib:
self.uselib.insert(0, v)
# if the library task generator provides 'export_includes', add to the include path
# the export_includes must be a list of paths relative to the other library
if getattr(y, 'export_includes', None):
self.includes.extend(y.to_incnodes(y.export_includes))
@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
@TaskGen.after('apply_link')
def apply_objdeps(self):
"add the .o files produced by some other object files in the same manner as uselib_local"
names = getattr(self, 'add_objects', [])
if not names:
return
names = self.to_list(names)
get = self.bld.get_tgen_by_name
seen = []
while names:
x = names[0]
# visit dependencies only once
if x in seen:
names = names[1:]
continue
# object does not exist ?
y = get(x)
# object has ancestors to process first ? update the list of names
if getattr(y, 'add_objects', None):
added = 0
lst = y.to_list(y.add_objects)
lst.reverse()
for u in lst:
if u in seen:
continue
added = 1
names = [u]+names
if added:
continue # list of names modified, loop
# safe to process the current object
y.post()
seen.append(x)
for t in getattr(y, 'compiled_tasks', []):
self.link_task.inputs.extend(t.outputs)
@TaskGen.after('apply_link')
def process_obj_files(self):
if not hasattr(self, 'obj_files'):
return
for x in self.obj_files:
node = self.path.find_resource(x)
self.link_task.inputs.append(node)
@TaskGen.taskgen_method
def add_obj_file(self, file):
"""Small example on how to link object files as if they were source
obj = bld.create_obj('cc')
obj.add_obj_file('foo.o')"""
if not hasattr(self, 'obj_files'):
self.obj_files = []
if not 'process_obj_files' in self.meths:
self.meths.append('process_obj_files')
self.obj_files.append(file)
old_define = Configure.ConfigurationContext.__dict__['define']
@Configure.conf
def define(self, key, val, quote=True, comment=''):
old_define(self, key, val, quote, comment)
if key.startswith('HAVE_'):
self.env[key] = 1
old_undefine = Configure.ConfigurationContext.__dict__['undefine']
@Configure.conf
def undefine(self, key, comment=''):
old_undefine(self, key, comment)
if key.startswith('HAVE_'):
self.env[key] = 0
# some people might want to use export_incdirs, but it was renamed
def set_incdirs(self, val):
Logs.warn('compat: change "export_incdirs" by "export_includes"')
self.export_includes = val
TaskGen.task_gen.export_incdirs = property(None, set_incdirs)
def install_dir(self, path):
if not path:
return []
destpath = Utils.subst_vars(path, self.env)
if self.is_install > 0:
Logs.info('* creating %s', destpath)
Utils.check_dir(destpath)
elif self.is_install < 0:
Logs.info('* removing %s', destpath)
try:
os.remove(destpath)
except OSError:
pass
Build.BuildContext.install_dir = install_dir
# before/after names
repl = {'apply_core': 'process_source',
'apply_lib_vars': 'process_source',
'apply_obj_vars': 'propagate_uselib_vars',
'exec_rule': 'process_rule'
}
def after(*k):
k = [repl.get(key, key) for key in k]
return TaskGen.after_method(*k)
def before(*k):
k = [repl.get(key, key) for key in k]
return TaskGen.before_method(*k)
TaskGen.before = before
| 11,813 | Python | .py | 339 | 32.348083 | 125 | 0.730166 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,625 | pgicxx.py | projecthamster_hamster/waflib/extras/pgicxx.py | #!/usr/bin/env python
# encoding: utf-8
# Antoine Dechaume 2011
"""
Detect the PGI C++ compiler
"""
from waflib.Tools.compiler_cxx import cxx_compiler
cxx_compiler['linux'].append('pgicxx')
from waflib.extras import pgicc
def configure(conf):
conf.find_pgi_compiler('CXX', 'pgCC')
conf.find_ar()
conf.gxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
| 395 | Python | .py | 16 | 23.0625 | 50 | 0.752 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,626 | clang_cross_common.py | projecthamster_hamster/waflib/extras/clang_cross_common.py | #!/usr/bin/env python
# encoding: utf-8
# DragoonX6 2018
"""
Common routines for cross_clang.py and cross_clangxx.py
"""
from waflib.Configure import conf
import waflib.Context
def normalize_target_triple(target_triple):
target_triple = target_triple[:-1]
normalized_triple = target_triple.replace('--', '-unknown-')
if normalized_triple.startswith('-'):
normalized_triple = 'unknown' + normalized_triple
if normalized_triple.endswith('-'):
normalized_triple += 'unknown'
# Normalize MinGW builds to *arch*-w64-mingw32
if normalized_triple.endswith('windows-gnu'):
normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-w64-mingw32'
# Strip the vendor when doing msvc builds, since it's unused anyway.
if normalized_triple.endswith('windows-msvc'):
normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-windows-msvc'
return normalized_triple.replace('-', '_')
@conf
def clang_modifier_msvc(conf):
import os
"""
Really basic setup to use clang in msvc mode.
We actually don't really want to do a lot, even though clang is msvc compatible
in this mode, that doesn't mean we're actually using msvc.
It's probably the best to leave it to the user, we can assume msvc mode if the user
uses the clang-cl frontend, but this module only concerns itself with the gcc-like frontend.
"""
v = conf.env
v.cprogram_PATTERN = '%s.exe'
v.cshlib_PATTERN = '%s.dll'
v.implib_PATTERN = '%s.lib'
v.IMPLIB_ST = '-Wl,-IMPLIB:%s'
v.SHLIB_MARKER = []
v.CFLAGS_cshlib = []
v.LINKFLAGS_cshlib = ['-Wl,-DLL']
v.cstlib_PATTERN = '%s.lib'
v.STLIB_MARKER = []
del(v.AR)
conf.find_program(['llvm-lib', 'lib'], var='AR')
v.ARFLAGS = ['-nologo']
v.AR_TGT_F = ['-out:']
# Default to the linker supplied with llvm instead of link.exe or ld
v.LINK_CC = v.CC + ['-fuse-ld=lld', '-nostdlib']
v.CCLNK_TGT_F = ['-o']
v.def_PATTERN = '-Wl,-def:%s'
v.LINKFLAGS = []
v.LIB_ST = '-l%s'
v.LIBPATH_ST = '-Wl,-LIBPATH:%s'
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-Wl,-LIBPATH:%s'
CFLAGS_CRT_COMMON = [
'-Xclang', '--dependent-lib=oldnames',
'-Xclang', '-fno-rtti-data',
'-D_MT'
]
v.CFLAGS_CRT_MULTITHREADED = CFLAGS_CRT_COMMON + [
'-Xclang', '-flto-visibility-public-std',
'-Xclang', '--dependent-lib=libcmt',
]
v.CXXFLAGS_CRT_MULTITHREADED = v.CFLAGS_CRT_MULTITHREADED
v.CFLAGS_CRT_MULTITHREADED_DBG = CFLAGS_CRT_COMMON + [
'-D_DEBUG',
'-Xclang', '-flto-visibility-public-std',
'-Xclang', '--dependent-lib=libcmtd',
]
v.CXXFLAGS_CRT_MULTITHREADED_DBG = v.CFLAGS_CRT_MULTITHREADED_DBG
v.CFLAGS_CRT_MULTITHREADED_DLL = CFLAGS_CRT_COMMON + [
'-D_DLL',
'-Xclang', '--dependent-lib=msvcrt'
]
v.CXXFLAGS_CRT_MULTITHREADED_DLL = v.CFLAGS_CRT_MULTITHREADED_DLL
v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = CFLAGS_CRT_COMMON + [
'-D_DLL',
'-D_DEBUG',
'-Xclang', '--dependent-lib=msvcrtd',
]
v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CFLAGS_CRT_MULTITHREADED_DLL_DBG
@conf
def clang_modifier_target_triple(conf, cpp=False):
compiler = conf.env.CXX if cpp else conf.env.CC
output = conf.cmd_and_log(compiler + ['-dumpmachine'], output=waflib.Context.STDOUT)
modifier = ('clangxx' if cpp else 'clang') + '_modifier_'
clang_modifier_func = getattr(conf, modifier + normalize_target_triple(output), None)
if clang_modifier_func:
clang_modifier_func()
| 3,426 | Python | .py | 90 | 35.755556 | 93 | 0.690311 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,627 | distnet.py | projecthamster_hamster/waflib/extras/distnet.py | #! /usr/bin/env python
# encoding: utf-8
"""
waf-powered distributed network builds, with a network cache.
Caching files from a server has advantages over a NFS/Samba shared folder:
- builds are much faster because they use local files
- builds just continue to work in case of a network glitch
- permissions are much simpler to manage
"""
import os, urllib, tarfile, re, shutil, tempfile, sys
from collections import OrderedDict
from waflib import Context, Utils, Logs
try:
from urllib.parse import urlencode
except ImportError:
urlencode = urllib.urlencode
def safe_urlencode(data):
x = urlencode(data)
try:
x = x.encode('utf-8')
except Exception:
pass
return x
try:
from urllib.error import URLError
except ImportError:
from urllib2 import URLError
try:
from urllib.request import Request, urlopen
except ImportError:
from urllib2 import Request, urlopen
DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
TARFORMAT = 'w:bz2'
TIMEOUT = 60
REQUIRES = 'requires.txt'
re_com = re.compile(r'\s*#.*', re.M)
def total_version_order(num):
lst = num.split('.')
template = '%10s' * len(lst)
ret = template % tuple(lst)
return ret
def get_distnet_cache():
return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
def get_server_url():
return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
def get_download_url():
return '%s/download.py' % get_server_url()
def get_upload_url():
return '%s/upload.py' % get_server_url()
def get_resolve_url():
return '%s/resolve.py' % get_server_url()
def send_package_name():
out = getattr(Context.g_module, 'out', 'build')
pkgfile = '%s/package_to_upload.tarfile' % out
return pkgfile
class package(Context.Context):
fun = 'package'
cmd = 'package'
def execute(self):
try:
files = self.files
except AttributeError:
files = self.files = []
Context.Context.execute(self)
pkgfile = send_package_name()
if not pkgfile in files:
if not REQUIRES in files:
files.append(REQUIRES)
self.make_tarfile(pkgfile, files, add_to_package=False)
def make_tarfile(self, filename, files, **kw):
if kw.get('add_to_package', True):
self.files.append(filename)
with tarfile.open(filename, TARFORMAT) as tar:
endname = os.path.split(filename)[-1]
endname = endname.split('.')[0] + '/'
for x in files:
tarinfo = tar.gettarinfo(x, x)
tarinfo.uid = tarinfo.gid = 0
tarinfo.uname = tarinfo.gname = 'root'
tarinfo.size = os.stat(x).st_size
if os.environ.get('SOURCE_DATE_EPOCH'):
tarinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH'))
# TODO - more archive creation options?
if kw.get('bare', True):
tarinfo.name = os.path.split(x)[1]
else:
tarinfo.name = endname + x # todo, if tuple, then..
Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
with open(x, 'rb') as f:
tar.addfile(tarinfo, f)
Logs.info('Created %s', filename)
class publish(Context.Context):
fun = 'publish'
cmd = 'publish'
def execute(self):
if hasattr(Context.g_module, 'publish'):
Context.Context.execute(self)
mod = Context.g_module
rfile = getattr(self, 'rfile', send_package_name())
if not os.path.isfile(rfile):
self.fatal('Create the release file with "waf release" first! %r' % rfile)
fdata = Utils.readf(rfile, m='rb')
data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
req = Request(get_upload_url(), data)
response = urlopen(req, timeout=TIMEOUT)
data = response.read().strip()
if sys.hexversion>0x300000f:
data = data.decode('utf-8')
if data != 'ok':
self.fatal('Could not publish the package %r' % data)
class constraint(object):
def __init__(self, line=''):
self.required_line = line
self.info = []
line = line.strip()
if not line:
return
lst = line.split(',')
if lst:
self.pkgname = lst[0]
self.required_version = lst[1]
for k in lst:
a, b, c = k.partition('=')
if a and c:
self.info.append((a, c))
def __str__(self):
buf = []
buf.append(self.pkgname)
buf.append(self.required_version)
for k in self.info:
buf.append('%s=%s' % k)
return ','.join(buf)
def __repr__(self):
return "requires %s-%s" % (self.pkgname, self.required_version)
def human_display(self, pkgname, pkgver):
return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
def why(self):
ret = []
for x in self.info:
if x[0] == 'reason':
ret.append(x[1])
return ret
def add_reason(self, reason):
self.info.append(('reason', reason))
def parse_constraints(text):
assert(text is not None)
constraints = []
text = re.sub(re_com, '', text)
lines = text.splitlines()
for line in lines:
line = line.strip()
if not line:
continue
constraints.append(constraint(line))
return constraints
def list_package_versions(cachedir, pkgname):
pkgdir = os.path.join(cachedir, pkgname)
try:
versions = os.listdir(pkgdir)
except OSError:
return []
versions.sort(key=total_version_order)
versions.reverse()
return versions
class package_reader(Context.Context):
cmd = 'solver'
fun = 'solver'
def __init__(self, **kw):
Context.Context.__init__(self, **kw)
self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
self.cache_constraints = {}
self.constraints = []
def compute_dependencies(self, filename=REQUIRES):
text = Utils.readf(filename)
data = safe_urlencode([('text', text)])
if '--offline' in sys.argv:
self.constraints = self.local_resolve(text)
else:
req = Request(get_resolve_url(), data)
try:
response = urlopen(req, timeout=TIMEOUT)
except URLError as e:
Logs.warn('The package server is down! %r', e)
self.constraints = self.local_resolve(text)
else:
ret = response.read()
try:
ret = ret.decode('utf-8')
except Exception:
pass
self.trace(ret)
self.constraints = parse_constraints(ret)
self.check_errors()
def check_errors(self):
errors = False
for c in self.constraints:
if not c.required_version:
errors = True
reasons = c.why()
if len(reasons) == 1:
Logs.error('%s but no matching package could be found in this repository', reasons[0])
else:
Logs.error('Conflicts on package %r:', c.pkgname)
for r in reasons:
Logs.error(' %s', r)
if errors:
self.fatal('The package requirements cannot be satisfied!')
def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
try:
return self.cache_constraints[(pkgname, pkgver)]
except KeyError:
text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
ret = parse_constraints(text)
self.cache_constraints[(pkgname, pkgver)] = ret
return ret
def apply_constraint(self, domain, constraint):
vname = constraint.required_version.replace('*', '.*')
rev = re.compile(vname, re.M)
ret = [x for x in domain if rev.match(x)]
return ret
def trace(self, *k):
if getattr(self, 'debug', None):
Logs.error(*k)
def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
# breadth first search
n_packages_to_versions = dict(packages_to_versions)
n_packages_to_constraints = dict(packages_to_constraints)
self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
done = done + [pkgname]
constraints = self.load_constraints(pkgname, pkgver)
self.trace("constraints %r" % constraints)
for k in constraints:
try:
domain = n_packages_to_versions[k.pkgname]
except KeyError:
domain = list_package_versions(get_distnet_cache(), k.pkgname)
self.trace("constraints?")
if not k.pkgname in done:
todo = todo + [k.pkgname]
self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
# apply the constraint
domain = self.apply_constraint(domain, k)
self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
n_packages_to_versions[k.pkgname] = domain
# then store the constraint applied
constraints = list(packages_to_constraints.get(k.pkgname, []))
constraints.append((pkgname, pkgver, k))
n_packages_to_constraints[k.pkgname] = constraints
if not domain:
self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
return (n_packages_to_versions, n_packages_to_constraints)
# next package on the todo list
if not todo:
return (n_packages_to_versions, n_packages_to_constraints)
n_pkgname = todo[0]
n_pkgver = n_packages_to_versions[n_pkgname][0]
tmp = dict(n_packages_to_versions)
tmp[n_pkgname] = [n_pkgver]
self.trace("fixed point %s" % n_pkgname)
return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
def get_results(self):
return '\n'.join([str(c) for c in self.constraints])
def solution_to_constraints(self, versions, constraints):
solution = []
for p in versions:
c = constraint()
solution.append(c)
c.pkgname = p
if versions[p]:
c.required_version = versions[p][0]
else:
c.required_version = ''
for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
c.add_reason(c2.human_display(from_pkgname, from_pkgver))
return solution
def local_resolve(self, text):
self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
p2v = OrderedDict({self.myproject: [self.myversion]})
(versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
return self.solution_to_constraints(versions, constraints)
def download_to_file(self, pkgname, pkgver, subdir, tmp):
data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
req = urlopen(get_download_url(), data, timeout=TIMEOUT)
with open(tmp, 'wb') as f:
while True:
buf = req.read(8192)
if not buf:
break
f.write(buf)
def extract_tar(self, subdir, pkgdir, tmpfile):
with tarfile.open(tmpfile) as f:
temp = tempfile.mkdtemp(dir=pkgdir)
try:
f.extractall(temp)
os.rename(temp, os.path.join(pkgdir, subdir))
finally:
try:
shutil.rmtree(temp)
except Exception:
pass
def get_pkg_dir(self, pkgname, pkgver, subdir):
pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
if not os.path.isdir(pkgdir):
os.makedirs(pkgdir)
target = os.path.join(pkgdir, subdir)
if os.path.exists(target):
return target
(fd, tmp) = tempfile.mkstemp(dir=pkgdir)
try:
os.close(fd)
self.download_to_file(pkgname, pkgver, subdir, tmp)
if subdir == REQUIRES:
os.rename(tmp, target)
else:
self.extract_tar(subdir, pkgdir, tmp)
finally:
try:
os.remove(tmp)
except OSError:
pass
return target
def __iter__(self):
if not self.constraints:
self.compute_dependencies()
for x in self.constraints:
if x.pkgname == self.myproject:
continue
yield x
def execute(self):
self.compute_dependencies()
packages = package_reader()
def load_tools(ctx, extra):
global packages
for c in packages:
packages.get_pkg_dir(c.pkgname, c.required_version, extra)
noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
for x in os.listdir(noarchdir):
if x.startswith('waf_') and x.endswith('.py'):
ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
def options(opt):
opt.add_option('--offline', action='store_true')
packages.execute()
load_tools(opt, REQUIRES)
def configure(conf):
load_tools(conf, conf.variant)
def build(bld):
load_tools(bld, bld.variant)
| 11,695 | Python | .py | 349 | 30.131805 | 111 | 0.701589 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,628 | javatest.py | projecthamster_hamster/waflib/extras/javatest.py | #! /usr/bin/env python
# encoding: utf-8
# Federico Pellegrin, 2019 (fedepell)
"""
Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
task via the **javatest** feature.
This gives the possibility to run unit test and have them integrated into the
standard waf unit test environment. It has been tested with TestNG and JUnit
but should be easily expandable to other frameworks given the flexibility of
ut_str provided by the standard waf unit test environment.
The extra takes care also of managing non-java dependencies (ie. C/C++ libraries
using JNI or Python modules via JEP) and setting up the environment needed to run
them.
Example usage:
def options(opt):
opt.load('java waf_unit_test javatest')
def configure(conf):
conf.load('java javatest')
def build(bld):
[ ... mainprog is built here ... ]
bld(features = 'javac javatest',
srcdir = 'test/',
outdir = 'test',
sourcepath = ['test'],
classpath = [ 'src' ],
basedir = 'test',
use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/
ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}',
jtest_source = bld.path.ant_glob('test/*.xml'),
)
At command line the CLASSPATH where to find the testing environment and the
test runner (default TestNG) that will then be seen in the environment as
CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for
dependencies and ut_str generation.
Example configure for TestNG:
waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG
or as default runner is TestNG:
waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar
Example configure for JUnit:
waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore
The runner class presence on the system is checked for at configuration stage.
"""
import os
from waflib import Task, TaskGen, Options, Errors, Utils, Logs
from waflib.Tools import ccroot
JAR_RE = '**/*'
def _process_use_rec(self, name):
"""
Recursively process ``use`` for task generator with name ``name``..
Used by javatest_process_use.
"""
if name in self.javatest_use_not or name in self.javatest_use_seen:
return
try:
tg = self.bld.get_tgen_by_name(name)
except Errors.WafError:
self.javatest_use_not.add(name)
return
self.javatest_use_seen.append(name)
tg.post()
for n in self.to_list(getattr(tg, 'use', [])):
_process_use_rec(self, n)
@TaskGen.feature('javatest')
@TaskGen.after_method('process_source', 'apply_link', 'use_javac_files')
def javatest_process_use(self):
"""
Process the ``use`` attribute which contains a list of task generator names and store
paths that later is used to populate the unit test runtime environment.
"""
self.javatest_use_not = set()
self.javatest_use_seen = []
self.javatest_libpaths = [] # strings or Nodes
self.javatest_pypaths = [] # strings or Nodes
self.javatest_dep_nodes = []
names = self.to_list(getattr(self, 'use', []))
for name in names:
_process_use_rec(self, name)
def extend_unique(lst, varlst):
ext = []
for x in varlst:
if x not in lst:
ext.append(x)
lst.extend(ext)
# Collect type specific info needed to construct a valid runtime environment
# for the test.
for name in self.javatest_use_seen:
tg = self.bld.get_tgen_by_name(name)
# Python-Java embedding crosstools such as JEP
if 'py' in tg.features:
# Python dependencies are added to PYTHONPATH
pypath = getattr(tg, 'install_from', tg.path)
if 'buildcopy' in tg.features:
# Since buildcopy is used we assume that PYTHONPATH in build should be used,
# not source
extend_unique(self.javatest_pypaths, [pypath.get_bld().abspath()])
# Add buildcopy output nodes to dependencies
extend_unique(self.javatest_dep_nodes, [o for task in getattr(tg, 'tasks', []) for o in getattr(task, 'outputs', [])])
else:
# If buildcopy is not used, depend on sources instead
extend_unique(self.javatest_dep_nodes, tg.source)
extend_unique(self.javatest_pypaths, [pypath.abspath()])
if getattr(tg, 'link_task', None):
# For tasks with a link_task (C, C++, D et.c.) include their library paths:
if not isinstance(tg.link_task, ccroot.stlink_task):
extend_unique(self.javatest_dep_nodes, tg.link_task.outputs)
extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH)
if 'pyext' in tg.features:
# If the taskgen is extending Python we also want to add the interpreter libpath.
extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
else:
# Only add to libpath if the link task is not a Python extension
extend_unique(self.javatest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
if 'javac' in tg.features or 'jar' in tg.features:
if hasattr(tg, 'jar_task'):
# For Java JAR tasks depend on generated JAR
extend_unique(self.javatest_dep_nodes, tg.jar_task.outputs)
else:
# For Java non-JAR ones we need to glob generated files (Java output files are not predictable)
if hasattr(tg, 'outdir'):
base_node = tg.outdir
else:
base_node = tg.path.get_bld()
self.javatest_dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
@TaskGen.feature('javatest')
@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath', 'javatest_process_use')
def make_javatest(self):
"""
Creates a ``utest`` task with a populated environment for Java Unit test execution
"""
tsk = self.create_task('utest')
tsk.set_run_after(self.javac_task)
# Dependencies from recursive use analysis
tsk.dep_nodes.extend(self.javatest_dep_nodes)
# Put test input files as waf_unit_test relies on that for some prints and log generation
# If jtest_source is there, this is specially useful for passing XML for TestNG
# that contain test specification, use that as inputs, otherwise test sources
if getattr(self, 'jtest_source', None):
tsk.inputs = self.to_nodes(self.jtest_source)
else:
if self.javac_task.srcdir[0].exists():
tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False)
if getattr(self, 'ut_str', None):
self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
tsk.vars = lst + tsk.vars
if getattr(self, 'ut_cwd', None):
if isinstance(self.ut_cwd, str):
# we want a Node instance
if os.path.isabs(self.ut_cwd):
self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
else:
self.ut_cwd = self.path.make_node(self.ut_cwd)
else:
self.ut_cwd = self.bld.bldnode
# Get parent CLASSPATH and add output dir of test, we run from wscript dir
# We have to change it from list to the standard java -cp format (: separated)
tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath()
if not self.ut_cwd.exists():
self.ut_cwd.mkdir()
if not hasattr(self, 'ut_env'):
self.ut_env = dict(os.environ)
def add_paths(var, lst):
# Add list of paths to a variable, lst can contain strings or nodes
lst = [ str(n) for n in lst ]
Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
add_paths('PYTHONPATH', self.javatest_pypaths)
if Utils.is_win32:
add_paths('PATH', self.javatest_libpaths)
elif Utils.unversioned_sys_platform() == 'darwin':
add_paths('DYLD_LIBRARY_PATH', self.javatest_libpaths)
add_paths('LD_LIBRARY_PATH', self.javatest_libpaths)
else:
add_paths('LD_LIBRARY_PATH', self.javatest_libpaths)
def configure(ctx):
cp = ctx.env.CLASSPATH or '.'
if getattr(Options.options, 'jtpath', None):
ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':')
cp += ':' + getattr(Options.options, 'jtpath')
if getattr(Options.options, 'jtrunner', None):
ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner')
if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp):
ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER)
def options(opt):
opt.add_option('--jtpath', action='store', default='', dest='jtpath',
help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH')
opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner',
help='Class to run javatest test [default: org.testng.TestNG]')
| 8,367 | Python | .py | 187 | 41.759358 | 122 | 0.726322 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,629 | genpybind.py | projecthamster_hamster/waflib/extras/genpybind.py | import os
import pipes
import subprocess
import sys
from waflib import Logs, Task, Context
from waflib.Tools.c_preproc import scan as scan_impl
# ^-- Note: waflib.extras.gccdeps.scan does not work for us,
# due to its current implementation:
# The -MD flag is injected into the {C,CXX}FLAGS environment variable and
# dependencies are read out in a separate step after compiling by reading
# the .d file saved alongside the object file.
# As the genpybind task refers to a header file that is never compiled itself,
# gccdeps will not be able to extract the list of dependencies.
from waflib.TaskGen import feature, before_method
def join_args(args):
return " ".join(pipes.quote(arg) for arg in args)
def configure(cfg):
cfg.load("compiler_cxx")
cfg.load("python")
cfg.check_python_version(minver=(2, 7))
if not cfg.env.LLVM_CONFIG:
cfg.find_program("llvm-config", var="LLVM_CONFIG")
if not cfg.env.GENPYBIND:
cfg.find_program("genpybind", var="GENPYBIND")
# find clang reasource dir for builtin headers
cfg.env.GENPYBIND_RESOURCE_DIR = os.path.join(
cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--libdir"]).strip(),
"clang",
cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--version"]).strip())
if os.path.exists(cfg.env.GENPYBIND_RESOURCE_DIR):
cfg.msg("Checking clang resource dir", cfg.env.GENPYBIND_RESOURCE_DIR)
else:
cfg.fatal("Clang resource dir not found")
@feature("genpybind")
@before_method("process_source")
def generate_genpybind_source(self):
"""
Run genpybind on the headers provided in `source` and compile/link the
generated code instead. This works by generating the code on the fly and
swapping the source node before `process_source` is run.
"""
# name of module defaults to name of target
module = getattr(self, "module", self.target)
# create temporary source file in build directory to hold generated code
out = "genpybind-%s.%d.cpp" % (module, self.idx)
out = self.path.get_bld().find_or_declare(out)
task = self.create_task("genpybind", self.to_nodes(self.source), out)
# used to detect whether CFLAGS or CXXFLAGS should be passed to genpybind
task.features = self.features
task.module = module
# can be used to select definitions to include in the current module
# (when header files are shared by more than one module)
task.genpybind_tags = self.to_list(getattr(self, "genpybind_tags", []))
# additional include directories
task.includes = self.to_list(getattr(self, "includes", []))
task.genpybind = self.env.GENPYBIND
# Tell waf to compile/link the generated code instead of the headers
# originally passed-in via the `source` parameter. (see `process_source`)
self.source = [out]
class genpybind(Task.Task): # pylint: disable=invalid-name
"""
Runs genpybind on headers provided as input to this task.
Generated code will be written to the first (and only) output node.
"""
quiet = True
color = "PINK"
scan = scan_impl
@staticmethod
def keyword():
return "Analyzing"
def run(self):
if not self.inputs:
return
args = self.find_genpybind() + self._arguments(
resource_dir=self.env.GENPYBIND_RESOURCE_DIR)
output = self.run_genpybind(args)
# For debugging / log output
pasteable_command = join_args(args)
# write generated code to file in build directory
# (will be compiled during process_source stage)
(output_node,) = self.outputs
output_node.write("// {}\n{}\n".format(
pasteable_command.replace("\n", "\n// "), output))
def find_genpybind(self):
return self.genpybind
def run_genpybind(self, args):
bld = self.generator.bld
kwargs = dict(cwd=bld.variant_dir)
if hasattr(bld, "log_command"):
bld.log_command(args, kwargs)
else:
Logs.debug("runner: {!r}".format(args))
proc = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
stdout, stderr = proc.communicate()
if not isinstance(stdout, str):
stdout = stdout.decode(sys.stdout.encoding, errors="replace")
if not isinstance(stderr, str):
stderr = stderr.decode(sys.stderr.encoding, errors="replace")
if proc.returncode != 0:
bld.fatal(
"genpybind returned {code} during the following call:"
"\n{command}\n\n{stdout}\n\n{stderr}".format(
code=proc.returncode,
command=join_args(args),
stdout=stdout,
stderr=stderr,
))
if stderr.strip():
Logs.debug("non-fatal warnings during genpybind run:\n{}".format(stderr))
return stdout
def _include_paths(self):
return self.generator.to_incnodes(self.includes + self.env.INCLUDES)
def _inputs_as_relative_includes(self):
include_paths = self._include_paths()
relative_includes = []
for node in self.inputs:
for inc in include_paths:
if node.is_child_of(inc):
relative_includes.append(node.path_from(inc))
break
else:
self.generator.bld.fatal("could not resolve {}".format(node))
return relative_includes
def _arguments(self, genpybind_parse=None, resource_dir=None):
args = []
relative_includes = self._inputs_as_relative_includes()
is_cxx = "cxx" in self.features
# options for genpybind
args.extend(["--genpybind-module", self.module])
if self.genpybind_tags:
args.extend(["--genpybind-tag"] + self.genpybind_tags)
if relative_includes:
args.extend(["--genpybind-include"] + relative_includes)
if genpybind_parse:
args.extend(["--genpybind-parse", genpybind_parse])
args.append("--")
# headers to be processed by genpybind
args.extend(node.abspath() for node in self.inputs)
args.append("--")
# options for clang/genpybind-parse
args.append("-D__GENPYBIND__")
args.append("-xc++" if is_cxx else "-xc")
has_std_argument = False
for flag in self.env["CXXFLAGS" if is_cxx else "CFLAGS"]:
flag = flag.replace("-std=gnu", "-std=c")
if flag.startswith("-std=c"):
has_std_argument = True
args.append(flag)
if not has_std_argument:
args.append("-std=c++14")
args.extend("-I{}".format(n.abspath()) for n in self._include_paths())
args.extend("-D{}".format(p) for p in self.env.DEFINES)
# point to clang resource dir, if specified
if resource_dir:
args.append("-resource-dir={}".format(resource_dir))
return args
| 7,000 | Python | .py | 157 | 36.216561 | 85 | 0.640024 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,630 | scala.py | projecthamster_hamster/waflib/extras/scala.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)
"""
Scala support
scalac outputs files a bit where it wants to
"""
import os
from waflib import Task, Utils, Node
from waflib.TaskGen import feature, before_method, after_method
from waflib.Tools import ccroot
ccroot.USELIB_VARS['scalac'] = set(['CLASSPATH', 'SCALACFLAGS'])
from waflib.Tools import javaw
@feature('scalac')
@before_method('process_source')
def apply_scalac(self):
Utils.def_attrs(self, jarname='', classpath='',
sourcepath='.', srcdir='.',
jar_mf_attributes={}, jar_mf_classpath=[])
outdir = getattr(self, 'outdir', None)
if outdir:
if not isinstance(outdir, Node.Node):
outdir = self.path.get_bld().make_node(self.outdir)
else:
outdir = self.path.get_bld()
outdir.mkdir()
self.env['OUTDIR'] = outdir.abspath()
self.scalac_task = tsk = self.create_task('scalac')
tmp = []
srcdir = getattr(self, 'srcdir', '')
if isinstance(srcdir, Node.Node):
srcdir = [srcdir]
for x in Utils.to_list(srcdir):
if isinstance(x, Node.Node):
y = x
else:
y = self.path.find_dir(x)
if not y:
self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
tmp.append(y)
tsk.srcdir = tmp
# reuse some code
feature('scalac')(javaw.use_javac_files)
after_method('apply_scalac')(javaw.use_javac_files)
feature('scalac')(javaw.set_classpath)
after_method('apply_scalac', 'use_scalac_files')(javaw.set_classpath)
SOURCE_RE = '**/*.scala'
class scalac(javaw.javac):
color = 'GREEN'
vars = ['CLASSPATH', 'SCALACFLAGS', 'SCALAC', 'OUTDIR']
def runnable_status(self):
"""
Wait for dependent tasks to be complete, then read the file system to find the input nodes.
"""
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
if not self.inputs:
global SOURCE_RE
self.inputs = []
for x in self.srcdir:
self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
return super(javaw.javac, self).runnable_status()
def run(self):
"""
Execute the scalac compiler
"""
env = self.env
gen = self.generator
bld = gen.bld
wd = bld.bldnode.abspath()
def to_list(xx):
if isinstance(xx, str):
return [xx]
return xx
self.last_cmd = lst = []
lst.extend(to_list(env['SCALAC']))
lst.extend(['-classpath'])
lst.extend(to_list(env['CLASSPATH']))
lst.extend(['-d'])
lst.extend(to_list(env['OUTDIR']))
lst.extend(to_list(env['SCALACFLAGS']))
lst.extend([a.abspath() for a in self.inputs])
lst = [x for x in lst if x]
try:
self.out = self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, output=0, quiet=0)[1]
except:
self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None)
def configure(self):
"""
Detect the scalac program
"""
# If SCALA_HOME is set, we prepend it to the path list
java_path = self.environ['PATH'].split(os.pathsep)
v = self.env
if 'SCALA_HOME' in self.environ:
java_path = [os.path.join(self.environ['SCALA_HOME'], 'bin')] + java_path
self.env['SCALA_HOME'] = [self.environ['SCALA_HOME']]
for x in 'scalac scala'.split():
self.find_program(x, var=x.upper(), path_list=java_path)
if 'CLASSPATH' in self.environ:
v['CLASSPATH'] = self.environ['CLASSPATH']
v.SCALACFLAGS = ['-verbose']
if not v['SCALAC']:
self.fatal('scalac is required for compiling scala classes')
| 3,327 | Python | .py | 105 | 28.990476 | 100 | 0.69678 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,631 | kde4.py | projecthamster_hamster/waflib/extras/kde4.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"""
Support for the KDE4 libraries and msgfmt
"""
import os, re
from waflib import Task, Utils
from waflib.TaskGen import feature
@feature('msgfmt')
def apply_msgfmt(self):
"""
Process all languages to create .mo files and to install them::
def build(bld):
bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
"""
for lang in self.to_list(self.langs):
node = self.path.find_resource(lang+'.po')
task = self.create_task('msgfmt', node, node.change_ext('.mo'))
langname = lang.split('/')
langname = langname[-1]
inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
self.add_install_as(
inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
inst_from = task.outputs[0],
chmod = getattr(self, 'chmod', Utils.O644))
class msgfmt(Task.Task):
"""
Transform .po files into .mo files
"""
color = 'BLUE'
run_str = '${MSGFMT} ${SRC} -o ${TGT}'
def configure(self):
"""
Detect kde4-config and set various variables for the *use* system::
def options(opt):
opt.load('compiler_cxx kde4')
def configure(conf):
conf.load('compiler_cxx kde4')
def build(bld):
bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
"""
kdeconfig = self.find_program('kde4-config')
prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try:
os.stat(fname)
except OSError:
fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try:
os.stat(fname)
except OSError:
self.fatal('could not open %s' % fname)
try:
txt = Utils.readf(fname)
except EnvironmentError:
self.fatal('could not read %s' % fname)
txt = txt.replace('\\\n', '\n')
fu = re.compile('#(.*)\n')
txt = fu.sub('', txt)
setregexp = re.compile(r'([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
found = setregexp.findall(txt)
for (_, key, val) in found:
#print key, val
self.env[key] = val
# well well, i could just write an interpreter for cmake files
self.env['LIB_KDECORE']= ['kdecore']
self.env['LIB_KDEUI'] = ['kdeui']
self.env['LIB_KIO'] = ['kio']
self.env['LIB_KHTML'] = ['khtml']
self.env['LIB_KPARTS'] = ['kparts']
self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
self.find_program('msgfmt', var='MSGFMT')
| 2,723 | Python | .py | 75 | 33.76 | 127 | 0.673764 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,632 | dpapi.py | projecthamster_hamster/waflib/extras/dpapi.py | #! /usr/bin/env python
# encoding: utf-8
# Matt Clarkson, 2012
'''
DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx)
This file uses code originally created by Crusher Joe:
http://article.gmane.org/gmane.comp.python.ctypes/420
And modified by Wayne Koorts:
http://stackoverflow.com/questions/463832/using-dpapi-with-python
'''
from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer
from ctypes.wintypes import DWORD
from waflib.Configure import conf
LocalFree = windll.kernel32.LocalFree
memcpy = cdll.msvcrt.memcpy
CryptProtectData = windll.crypt32.CryptProtectData
CryptUnprotectData = windll.crypt32.CryptUnprotectData
CRYPTPROTECT_UI_FORBIDDEN = 0x01
try:
extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii')
except AttributeError:
extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'
class DATA_BLOB(Structure):
_fields_ = [
('cbData', DWORD),
('pbData', POINTER(c_char))
]
def get_data(blob_out):
cbData = int(blob_out.cbData)
pbData = blob_out.pbData
buffer = c_buffer(cbData)
memcpy(buffer, pbData, cbData)
LocalFree(pbData)
return buffer.raw
@conf
def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
'''
Encrypts data and returns byte string
:param input_bytes: The data to be encrypted
:type input_bytes: String or Bytes
:param entropy: Extra entropy to add to the encryption process (optional)
:type entropy: String or Bytes
'''
if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes):
self.fatal('The inputs to dpapi must be bytes')
buffer_in = c_buffer(input_bytes, len(input_bytes))
buffer_entropy = c_buffer(entropy, len(entropy))
blob_in = DATA_BLOB(len(input_bytes), buffer_in)
blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
blob_out = DATA_BLOB()
if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
return get_data(blob_out)
else:
self.fatal('Failed to decrypt data')
@conf
def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
'''
Decrypts data and returns byte string
:param encrypted_bytes: The encrypted data
:type encrypted_bytes: Bytes
:param entropy: Extra entropy to add to the encryption process (optional)
:type entropy: String or Bytes
'''
if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes):
self.fatal('The inputs to dpapi must be bytes')
buffer_in = c_buffer(encrypted_bytes, len(encrypted_bytes))
buffer_entropy = c_buffer(entropy, len(entropy))
blob_in = DATA_BLOB(len(encrypted_bytes), buffer_in)
blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
blob_out = DATA_BLOB()
if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None,
None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
return get_data(blob_out)
else:
self.fatal('Failed to decrypt data')
| 2,934 | Python | .py | 76 | 36.657895 | 77 | 0.756235 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,633 | erlang.py | projecthamster_hamster/waflib/extras/erlang.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010 (ita)
# Przemyslaw Rzepecki, 2016
"""
Erlang support
"""
import re
from waflib import Task, TaskGen
from waflib.TaskGen import feature, after_method, before_method
# to load the method "to_incnodes" below
from waflib.Tools import ccroot
# Those flags are required by the Erlang VM to execute/evaluate code in
# non-interactive mode. It is used in this tool to create Erlang modules
# documentation and run unit tests. The user can pass additional arguments to the
# 'erl' command with ERL_FLAGS environment variable.
EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval']
def configure(conf):
conf.find_program('erlc', var='ERLC')
conf.find_program('erl', var='ERL')
conf.add_os_flags('ERLC_FLAGS')
conf.add_os_flags('ERL_FLAGS')
conf.env.ERLC_DEF_PATTERN = '-D%s'
conf.env.ERLC_INC_PATTERN = '-I%s'
@TaskGen.extension('.erl')
def process_erl_node(self, node):
tsk = self.create_task('erl', node, node.change_ext('.beam'))
tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes)
tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes])
tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', [])))
tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', [])))
tsk.cwd = tsk.outputs[0].parent
class erl(Task.Task):
color = 'GREEN'
run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}'
def scan(task):
node = task.inputs[0]
deps = []
scanned = set([])
nodes_to_scan = [node]
for n in nodes_to_scan:
if n.abspath() in scanned:
continue
for i in re.findall(r'-include\("(.*)"\)\.', n.read()):
for d in task.erlc_incnodes:
r = d.find_node(i)
if r:
deps.append(r)
nodes_to_scan.append(r)
break
scanned.add(n.abspath())
return (deps, [])
@TaskGen.extension('.beam')
def process(self, node):
pass
class erl_test(Task.Task):
color = 'BLUE'
run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}'
@feature('eunit')
@after_method('process_source')
def add_erl_test_run(self):
test_modules = [t.outputs[0] for t in self.tasks]
test_task = self.create_task('erl_test')
test_task.set_inputs(self.source + test_modules)
test_task.cwd = test_modules[0].parent
test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', [])))
test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules])
test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list
test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE)
test_task.env.append_value('ERL_TEST_FLAGS', test_flag)
class edoc(Task.Task):
color = 'BLUE'
run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}"
def keyword(self):
return 'Generating edoc'
@feature('edoc')
@before_method('process_source')
def add_edoc_task(self):
# do not process source, it would create double erl->beam task
self.meths.remove('process_source')
e = self.path.find_resource(self.source)
t = e.change_ext('.html')
png = t.parent.make_node('erlang.png')
css = t.parent.make_node('stylesheet.css')
tsk = self.create_task('edoc', e, [t, png, css])
tsk.cwd = tsk.outputs[0].parent
tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE)
tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath())
# TODO the above can break if a file path contains '"'
| 3,515 | Python | .py | 89 | 37.134831 | 112 | 0.697797 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,634 | gob2.py | projecthamster_hamster/waflib/extras/gob2.py | #!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
from waflib import TaskGen
TaskGen.declare_chain(
name = 'gob2',
rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}',
ext_in = '.gob',
ext_out = '.c'
)
def configure(conf):
conf.find_program('gob2', var='GOB2')
conf.env['GOB2FLAGS'] = ''
| 314 | Python | .py | 13 | 22.384615 | 61 | 0.649832 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,635 | color_msvc.py | projecthamster_hamster/waflib/extras/color_msvc.py | #!/usr/bin/env python
# encoding: utf-8
# Replaces the default formatter by one which understands MSVC output and colorizes it.
# Modified from color_gcc.py
__author__ = __maintainer__ = "Alibek Omarov <a1ba.omarov@gmail.com>"
__copyright__ = "Alibek Omarov, 2019"
import sys
from waflib import Logs
class ColorMSVCFormatter(Logs.formatter):
def __init__(self, colors):
self.colors = colors
Logs.formatter.__init__(self)
def parseMessage(self, line, color):
# Split messaage from 'disk:filepath: type: message'
arr = line.split(':', 3)
if len(arr) < 4:
return line
colored = self.colors.BOLD + arr[0] + ':' + arr[1] + ':' + self.colors.NORMAL
colored += color + arr[2] + ':' + self.colors.NORMAL
colored += arr[3]
return colored
def format(self, rec):
frame = sys._getframe()
while frame:
func = frame.f_code.co_name
if func == 'exec_command':
cmd = frame.f_locals.get('cmd')
if isinstance(cmd, list):
# Fix file case, it may be CL.EXE or cl.exe
argv0 = cmd[0].lower()
if 'cl.exe' in argv0:
lines = []
# This will not work with "localized" versions
# of MSVC
for line in rec.msg.splitlines():
if ': warning ' in line:
lines.append(self.parseMessage(line, self.colors.YELLOW))
elif ': error ' in line:
lines.append(self.parseMessage(line, self.colors.RED))
elif ': fatal error ' in line:
lines.append(self.parseMessage(line, self.colors.RED + self.colors.BOLD))
elif ': note: ' in line:
lines.append(self.parseMessage(line, self.colors.CYAN))
else:
lines.append(line)
rec.msg = "\n".join(lines)
frame = frame.f_back
return Logs.formatter.format(self, rec)
def options(opt):
Logs.log.handlers[0].setFormatter(ColorMSVCFormatter(Logs.colors))
| 1,821 | Python | .py | 50 | 31.8 | 87 | 0.665529 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,636 | __init__.py | projecthamster_hamster/waflib/extras/__init__.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
| 71 | Python | .py | 3 | 22.666667 | 30 | 0.705882 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,637 | satellite_assembly.py | projecthamster_hamster/waflib/extras/satellite_assembly.py | #!/usr/bin/python
# encoding: utf-8
# vim: tabstop=4 noexpandtab
"""
Create a satellite assembly from "*.??.txt" files. ?? stands for a language code.
The projects Resources subfolder contains resources.??.txt string files for several languages.
The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll
#gen becomes template (It is called gen because it also uses resx.py).
bld(source='Resources/resources.de.txt',gen=ExeName)
"""
import os, re
from waflib import Task
from waflib.TaskGen import feature,before_method
class al(Task.Task):
run_str = '${AL} ${ALFLAGS}'
@feature('satellite_assembly')
@before_method('process_source')
def satellite_assembly(self):
if not getattr(self, 'gen', None):
self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter')
res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I)
# self.source can contain node objects, so this will break in one way or another
self.source = self.to_list(self.source)
for i, x in enumerate(self.source):
#x = 'resources/resources.de.resx'
#x = 'resources/resources.de.txt'
mo = res_lang.match(x)
if mo:
template = os.path.splitext(self.gen)[0]
templatedir, templatename = os.path.split(template)
res = mo.group(1)
lang = mo.group(2)
#./Resources/resources.de.resources
resources = self.path.find_or_declare(res+ '.' + lang + '.resources')
self.create_task('resgen', self.to_nodes(x), [resources])
#./de/Exename.resources.dll
satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll')
tsk = self.create_task('al',[resources],[satellite])
tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen))
tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath())
tsk.env.append_value('ALFLAGS','/culture:'+lang)
tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath())
self.source[i] = None
# remove the None elements that we just substituted
self.source = list(filter(lambda x:x, self.source))
def configure(ctx):
ctx.find_program('al', var='AL', mandatory=True)
ctx.load('resx')
| 2,161 | Python | .py | 48 | 42.479167 | 104 | 0.727662 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,638 | run_py_script.py | projecthamster_hamster/waflib/extras/run_py_script.py | #!/usr/bin/env python
# encoding: utf-8
# Hans-Martin von Gaudecker, 2012
"""
Run a Python script in the directory specified by **ctx.bldnode**.
Select a Python version by specifying the **version** keyword for
the task generator instance as integer 2 or 3. Default is 3.
If the build environment has an attribute "PROJECT_PATHS" with
a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
Same a string passed to the optional **add_to_pythonpath**
keyword (appended after the PROJECT_ROOT).
Usage::
ctx(features='run_py_script', version=3,
source='some_script.py',
target=['some_table.tex', 'some_figure.eps'],
deps='some_data.csv',
add_to_pythonpath='src/some/library')
"""
import os, re
from waflib import Task, TaskGen, Logs
def configure(conf):
"""TODO: Might need to be updated for Windows once
"PEP 397":http://www.python.org/dev/peps/pep-0397/ is settled.
"""
conf.find_program('python', var='PY2CMD', mandatory=False)
conf.find_program('python3', var='PY3CMD', mandatory=False)
if not conf.env.PY2CMD and not conf.env.PY3CMD:
conf.fatal("No Python interpreter found!")
class run_py_2_script(Task.Task):
"""Run a Python 2 script."""
run_str = '${PY2CMD} ${SRC[0].abspath()}'
shell=True
class run_py_3_script(Task.Task):
"""Run a Python 3 script."""
run_str = '${PY3CMD} ${SRC[0].abspath()}'
shell=True
@TaskGen.feature('run_py_script')
@TaskGen.before_method('process_source')
def apply_run_py_script(tg):
"""Task generator for running either Python 2 or Python 3 on a single
script.
Attributes:
* source -- A **single** source node or string. (required)
* target -- A single target or list of targets (nodes or strings)
* deps -- A single dependency or list of dependencies (nodes or strings)
* add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable
If the build environment has an attribute "PROJECT_PATHS" with
a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
"""
# Set the Python version to use, default to 3.
v = getattr(tg, 'version', 3)
if v not in (2, 3):
raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
# Convert sources and targets to nodes
src_node = tg.path.find_resource(tg.source)
tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
# Create the task.
tsk = tg.create_task('run_py_%d_script' %v, src=src_node, tgt=tgt_nodes)
# custom execution environment
# TODO use a list and os.sep.join(lst) at the end instead of concatenating strings
tsk.env.env = dict(os.environ)
tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '')
project_paths = getattr(tsk.env, 'PROJECT_PATHS', None)
if project_paths and 'PROJECT_ROOT' in project_paths:
tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths['PROJECT_ROOT'].abspath()
if getattr(tg, 'add_to_pythonpath', None):
tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath
# Clean up the PYTHONPATH -- replace double occurrences of path separator
tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH'])
# Clean up the PYTHONPATH -- doesn't like starting with path separator
if tsk.env.env['PYTHONPATH'].startswith(os.pathsep):
tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:]
# dependencies (if the attribute 'deps' changes, trigger a recompilation)
for x in tg.to_list(getattr(tg, 'deps', [])):
node = tg.path.find_resource(x)
if not node:
tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
tsk.dep_nodes.append(node)
Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
# Bypass the execution of process_source by setting the source to an empty list
tg.source = []
| 3,852 | Python | .py | 81 | 44.962963 | 118 | 0.723319 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,639 | make.py | projecthamster_hamster/waflib/extras/make.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
"""
A make-like way of executing the build, following the relationships between inputs/outputs
This algorithm will lead to slower builds, will not be as flexible as "waf build", but
it might be useful for building data files (?)
It is likely to break in the following cases:
- files are created dynamically (no inputs or outputs)
- headers
- building two files from different groups
"""
import re
from waflib import Options, Task
from waflib.Build import BuildContext
class MakeContext(BuildContext):
'''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
cmd = 'make'
fun = 'build'
def __init__(self, **kw):
super(MakeContext, self).__init__(**kw)
self.files = Options.options.files
def get_build_iterator(self):
if not self.files:
while 1:
yield super(MakeContext, self).get_build_iterator()
for g in self.groups:
for tg in g:
try:
f = tg.post
except AttributeError:
pass
else:
f()
provides = {}
uses = {}
all_tasks = []
tasks = []
for pat in self.files.split(','):
matcher = self.get_matcher(pat)
for tg in g:
if isinstance(tg, Task.Task):
lst = [tg]
else:
lst = tg.tasks
for tsk in lst:
all_tasks.append(tsk)
do_exec = False
for node in tsk.inputs:
try:
uses[node].append(tsk)
except:
uses[node] = [tsk]
if matcher(node, output=False):
do_exec = True
break
for node in tsk.outputs:
try:
provides[node].append(tsk)
except:
provides[node] = [tsk]
if matcher(node, output=True):
do_exec = True
break
if do_exec:
tasks.append(tsk)
# so we have the tasks that we need to process, the list of all tasks,
# the map of the tasks providing nodes, and the map of tasks using nodes
if not tasks:
# if there are no tasks matching, return everything in the current group
result = all_tasks
else:
# this is like a big filter...
result = set()
seen = set()
cur = set(tasks)
while cur:
result |= cur
tosee = set()
for tsk in cur:
for node in tsk.inputs:
if node in seen:
continue
seen.add(node)
tosee |= set(provides.get(node, []))
cur = tosee
result = list(result)
Task.set_file_constraints(result)
Task.set_precedence_constraints(result)
yield result
while 1:
yield []
def get_matcher(self, pat):
# this returns a function
inn = True
out = True
if pat.startswith('in:'):
out = False
pat = pat.replace('in:', '')
elif pat.startswith('out:'):
inn = False
pat = pat.replace('out:', '')
anode = self.root.find_node(pat)
pattern = None
if not anode:
if not pat.startswith('^'):
pat = '^.+?%s' % pat
if not pat.endswith('$'):
pat = '%s$' % pat
pattern = re.compile(pat)
def match(node, output):
if output and not out:
return False
if not output and not inn:
return False
if anode:
return anode == node
else:
return pattern.match(node.abspath())
return match
| 3,202 | Python | .py | 120 | 21.883333 | 93 | 0.643137 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,640 | package.py | projecthamster_hamster/waflib/extras/package.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011
"""
Obtain packages, unpack them in a location, and add associated uselib variables
(CFLAGS_pkgname, LIBPATH_pkgname, etc).
The default is use a Dependencies.txt file in the source directory.
This is a work in progress.
Usage:
def options(opt):
opt.load('package')
def configure(conf):
conf.load_packages()
"""
from waflib import Logs
from waflib.Configure import conf
try:
from urllib import request
except ImportError:
from urllib import urlopen
else:
urlopen = request.urlopen
CACHEVAR = 'WAFCACHE_PACKAGE'
@conf
def get_package_cache_dir(self):
cache = None
if CACHEVAR in conf.environ:
cache = conf.environ[CACHEVAR]
cache = self.root.make_node(cache)
elif self.env[CACHEVAR]:
cache = self.env[CACHEVAR]
cache = self.root.make_node(cache)
else:
cache = self.srcnode.make_node('.wafcache_package')
cache.mkdir()
return cache
@conf
def download_archive(self, src, dst):
for x in self.env.PACKAGE_REPO:
url = '/'.join((x, src))
try:
web = urlopen(url)
try:
if web.getcode() != 200:
continue
except AttributeError:
pass
except Exception:
# on python3 urlopen throws an exception
# python 2.3 does not have getcode and throws an exception to fail
continue
else:
tmp = self.root.make_node(dst)
tmp.write(web.read())
Logs.warn('Downloaded %s from %s', tmp.abspath(), url)
break
else:
self.fatal('Could not get the package %s' % src)
@conf
def load_packages(self):
self.get_package_cache_dir()
# read the dependencies, get the archives, ..
| 1,590 | Python | .py | 62 | 23.16129 | 79 | 0.739762 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,641 | gccdeps.py | projecthamster_hamster/waflib/extras/gccdeps.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2010 (ita)
"""
Execute the tasks with gcc -MD, read the dependencies from the .d file
and prepare the dependency calculation for the next run.
This affects the cxx class, so make sure to load Qt5 after this tool.
Usage::
def options(opt):
opt.load('compiler_cxx')
def configure(conf):
conf.load('compiler_cxx gccdeps')
"""
import os, re, threading
from waflib import Task, Logs, Utils, Errors
from waflib.Tools import asm, c, c_preproc, cxx
from waflib.TaskGen import before_method, feature
lock = threading.Lock()
gccdeps_flags = ['-MD']
if not c_preproc.go_absolute:
gccdeps_flags = ['-MMD']
# Third-party tools are allowed to add extra names in here with append()
supported_compilers = ['gas', 'gcc', 'icc', 'clang']
re_o = re.compile(r"\.o$")
re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
def remove_makefile_rule_lhs(line):
# Splitting on a plain colon would accidentally match inside a
# Windows absolute-path filename, so we must search for a colon
# followed by whitespace to find the divider between LHS and RHS
# of the Makefile rule.
rulesep = ': '
sep_idx = line.find(rulesep)
if sep_idx >= 0:
return line[sep_idx + 2:]
else:
return line
def path_to_node(base_node, path, cached_nodes):
# Take the base node and the path and return a node
# Results are cached because searching the node tree is expensive
# The following code is executed by threads, it is not safe, so a lock is needed...
if getattr(path, '__hash__'):
node_lookup_key = (base_node, path)
else:
# Not hashable, assume it is a list and join into a string
node_lookup_key = (base_node, os.path.sep.join(path))
try:
node = cached_nodes[node_lookup_key]
except KeyError:
# retry with lock on cache miss
with lock:
try:
node = cached_nodes[node_lookup_key]
except KeyError:
node = cached_nodes[node_lookup_key] = base_node.find_resource(path)
return node
def post_run(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
return super(self.derived_gccdeps, self).post_run()
deps_filename = self.outputs[0].abspath()
deps_filename = re_o.sub('.d', deps_filename)
try:
deps_txt = Utils.readf(deps_filename)
except EnvironmentError:
Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
raise
# Compilers have the choice to either output the file's dependencies
# as one large Makefile rule:
#
# /path/to/file.o: /path/to/dep1.h \
# /path/to/dep2.h \
# /path/to/dep3.h \
# ...
#
# or as many individual rules:
#
# /path/to/file.o: /path/to/dep1.h
# /path/to/file.o: /path/to/dep2.h
# /path/to/file.o: /path/to/dep3.h
# ...
#
# So the first step is to sanitize the input by stripping out the left-
# hand side of all these lines. After that, whatever remains are the
# implicit dependencies of task.outputs[0]
deps_txt = '\n'.join([remove_makefile_rule_lhs(line) for line in deps_txt.splitlines()])
# Now join all the lines together
deps_txt = deps_txt.replace('\\\n', '')
dep_paths = deps_txt.strip()
dep_paths = [x.replace('\\ ', ' ') for x in re_splitter.split(dep_paths) if x]
resolved_nodes = []
unresolved_names = []
bld = self.generator.bld
# Dynamically bind to the cache
try:
cached_nodes = bld.cached_nodes
except AttributeError:
cached_nodes = bld.cached_nodes = {}
for path in dep_paths:
node = None
if os.path.isabs(path):
node = path_to_node(bld.root, path, cached_nodes)
else:
# TODO waf 1.9 - single cwd value
base_node = getattr(bld, 'cwdx', bld.bldnode)
# when calling find_resource, make sure the path does not contain '..'
path = [k for k in Utils.split_path(path) if k and k != '.']
while '..' in path:
idx = path.index('..')
if idx == 0:
path = path[1:]
base_node = base_node.parent
else:
del path[idx]
del path[idx-1]
node = path_to_node(base_node, path, cached_nodes)
if not node:
raise ValueError('could not find %r for %r' % (path, self))
if id(node) == id(self.inputs[0]):
# ignore the source file, it is already in the dependencies
# this way, successful config tests may be retrieved from the cache
continue
resolved_nodes.append(node)
Logs.debug('deps: gccdeps for %s returned %s', self, resolved_nodes)
bld.node_deps[self.uid()] = resolved_nodes
bld.raw_deps[self.uid()] = unresolved_names
try:
del self.cache_sig
except AttributeError:
pass
Task.Task.post_run(self)
def scan(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
return super(self.derived_gccdeps, self).scan()
resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
unresolved_names = []
return (resolved_nodes, unresolved_names)
def sig_implicit_deps(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
return super(self.derived_gccdeps, self).sig_implicit_deps()
bld = self.generator.bld
try:
return self.compute_sig_implicit_deps()
except Errors.TaskNotReady:
raise ValueError("Please specify the build order precisely with gccdeps (asm/c/c++ tasks)")
except EnvironmentError:
# If a file is renamed, assume the dependencies are stale and must be recalculated
for x in bld.node_deps.get(self.uid(), []):
if not x.is_bld() and not x.exists():
try:
del x.parent.children[x.name]
except KeyError:
pass
key = self.uid()
bld.node_deps[key] = []
bld.raw_deps[key] = []
return Utils.SIG_NIL
def wrap_compiled_task(classname):
derived_class = type(classname, (Task.classes[classname],), {})
derived_class.derived_gccdeps = derived_class
derived_class.post_run = post_run
derived_class.scan = scan
derived_class.sig_implicit_deps = sig_implicit_deps
for k in ('asm', 'c', 'cxx'):
if k in Task.classes:
wrap_compiled_task(k)
@before_method('process_source')
@feature('force_gccdeps')
def force_gccdeps(self):
self.env.ENABLE_GCCDEPS = ['asm', 'c', 'cxx']
def configure(conf):
# in case someone provides a --enable-gccdeps command-line option
if not getattr(conf.options, 'enable_gccdeps', True):
return
global gccdeps_flags
flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
if conf.env.ASM_NAME in supported_compilers:
try:
conf.check(fragment='', features='asm force_gccdeps', asflags=flags, compile_filename='test.S', msg='Checking for asm flags %r' % ''.join(flags))
except Errors.ConfigurationError:
pass
else:
conf.env.append_value('ASFLAGS', flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'asm')
if conf.env.CC_NAME in supported_compilers:
try:
conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
except Errors.ConfigurationError:
pass
else:
conf.env.append_value('CFLAGS', flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'c')
if conf.env.CXX_NAME in supported_compilers:
try:
conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
except Errors.ConfigurationError:
pass
else:
conf.env.append_value('CXXFLAGS', flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
def options(opt):
raise ValueError('Do not load gccdeps options')
| 7,371 | Python | .py | 200 | 34.105 | 148 | 0.708152 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,642 | clangxx_cross.py | projecthamster_hamster/waflib/extras/clangxx_cross.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2009-2018 (ita)
# DragoonX6 2018
"""
Detect the Clang++ C++ compiler
This version is an attempt at supporting the -target and -sysroot flag of Clang++.
"""
from waflib.Tools import ccroot, ar, gxx
from waflib.Configure import conf
import waflib.extras.clang_cross_common
def options(opt):
"""
Target triplet for clang++::
$ waf configure --clangxx-target-triple=x86_64-pc-linux-gnu
"""
cxx_compiler_opts = opt.add_option_group('Configuration options')
cxx_compiler_opts.add_option('--clangxx-target-triple', default=None,
help='Target triple for clang++',
dest='clangxx_target_triple')
cxx_compiler_opts.add_option('--clangxx-sysroot', default=None,
help='Sysroot for clang++',
dest='clangxx_sysroot')
@conf
def find_clangxx(conf):
"""
Finds the program clang++, and executes it to ensure it really is clang++
"""
import os
cxx = conf.find_program('clang++', var='CXX')
if conf.options.clangxx_target_triple != None:
conf.env.append_value('CXX', ['-target', conf.options.clangxx_target_triple])
if conf.options.clangxx_sysroot != None:
sysroot = str()
if os.path.isabs(conf.options.clangxx_sysroot):
sysroot = conf.options.clangxx_sysroot
else:
sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clangxx_sysroot))
conf.env.append_value('CXX', ['--sysroot', sysroot])
conf.get_cc_version(cxx, clang=True)
conf.env.CXX_NAME = 'clang'
@conf
def clangxx_modifier_x86_64_w64_mingw32(conf):
conf.gcc_modifier_win32()
@conf
def clangxx_modifier_i386_w64_mingw32(conf):
conf.gcc_modifier_win32()
@conf
def clangxx_modifier_msvc(conf):
v = conf.env
v.cxxprogram_PATTERN = v.cprogram_PATTERN
v.cxxshlib_PATTERN = v.cshlib_PATTERN
v.CXXFLAGS_cxxshlib = []
v.LINKFLAGS_cxxshlib = v.LINKFLAGS_cshlib
v.cxxstlib_PATTERN = v.cstlib_PATTERN
v.LINK_CXX = v.CXX + ['-fuse-ld=lld', '-nostdlib']
v.CXXLNK_TGT_F = v.CCLNK_TGT_F
@conf
def clangxx_modifier_x86_64_windows_msvc(conf):
conf.clang_modifier_msvc()
conf.clangxx_modifier_msvc()
# Allow the user to override any flags if they so desire.
clang_modifier_user_func = getattr(conf, 'clangxx_modifier_x86_64_windows_msvc_user', None)
if clang_modifier_user_func:
clang_modifier_user_func()
@conf
def clangxx_modifier_i386_windows_msvc(conf):
conf.clang_modifier_msvc()
conf.clangxx_modifier_msvc()
# Allow the user to override any flags if they so desire.
clang_modifier_user_func = getattr(conf, 'clangxx_modifier_i386_windows_msvc_user', None)
if clang_modifier_user_func:
clang_modifier_user_func()
def configure(conf):
conf.find_clangxx()
conf.find_program(['llvm-ar', 'ar'], var='AR')
conf.find_ar()
conf.gxx_common_flags()
# Allow the user to provide flags for the target platform.
conf.gxx_modifier_platform()
# And allow more fine grained control based on the compiler's triplet.
conf.clang_modifier_target_triple(cpp=True)
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
| 3,018 | Python | .py | 85 | 33.352941 | 92 | 0.744505 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,643 | rst.py | projecthamster_hamster/waflib/extras/rst.py | #!/usr/bin/env python
# encoding: utf-8
# Jérôme Carretero, 2013 (zougloub)
"""
reStructuredText support (experimental)
Example::
def configure(conf):
conf.load('rst')
if not conf.env.RST2HTML:
conf.fatal('The program rst2html is required')
def build(bld):
bld(
features = 'rst',
type = 'rst2html', # rst2html, rst2pdf, ...
source = 'index.rst', # mandatory, the source
deps = 'image.png', # to give additional non-trivial dependencies
)
By default the tool looks for a set of programs in PATH.
The tools are defined in `rst_progs`.
To configure with a special program use::
$ RST2HTML=/path/to/rst2html waf configure
This tool is experimental; don't hesitate to contribute to it.
"""
import re
from waflib import Node, Utils, Task, Errors, Logs
from waflib.TaskGen import feature, before_method
rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
def parse_rst_node(task, node, nodes, names, seen, dirs=None):
# TODO add extensibility, to handle custom rst include tags...
if dirs is None:
dirs = (node.parent,node.get_bld().parent)
if node in seen:
return
seen.append(node)
code = node.read()
re_rst = re.compile(r'^\s*.. ((?P<subst>\|\S+\|) )?(?P<type>include|image|figure):: (?P<file>.*)$', re.M)
for match in re_rst.finditer(code):
ipath = match.group('file')
itype = match.group('type')
Logs.debug('rst: visiting %s: %s', itype, ipath)
found = False
for d in dirs:
Logs.debug('rst: looking for %s in %s', ipath, d.abspath())
found = d.find_node(ipath)
if found:
Logs.debug('rst: found %s as %s', ipath, found.abspath())
nodes.append((itype, found))
if itype == 'include':
parse_rst_node(task, found, nodes, names, seen)
break
if not found:
names.append((itype, ipath))
class docutils(Task.Task):
"""
Compile a rst file.
"""
def scan(self):
"""
A recursive regex-based scanner that finds rst dependencies.
"""
nodes = []
names = []
seen = []
node = self.inputs[0]
if not node:
return (nodes, names)
parse_rst_node(self, node, nodes, names, seen)
Logs.debug('rst: %r: found the following file deps: %r', self, nodes)
if names:
Logs.warn('rst: %r: could not find the following file deps: %r', self, names)
return ([v for (t,v) in nodes], [v for (t,v) in names])
def check_status(self, msg, retcode):
"""
Check an exit status and raise an error with a particular message
:param msg: message to display if the code is non-zero
:type msg: string
:param retcode: condition
:type retcode: boolean
"""
if retcode != 0:
raise Errors.WafError('%r command exit status %r' % (msg, retcode))
def run(self):
"""
Runs the rst compilation using docutils
"""
raise NotImplementedError()
class rst2html(docutils):
color = 'BLUE'
def __init__(self, *args, **kw):
docutils.__init__(self, *args, **kw)
self.command = self.generator.env.RST2HTML
self.attributes = ['stylesheet']
def scan(self):
nodes, names = docutils.scan(self)
for attribute in self.attributes:
stylesheet = getattr(self.generator, attribute, None)
if stylesheet is not None:
ssnode = self.generator.to_nodes(stylesheet)[0]
nodes.append(ssnode)
Logs.debug('rst: adding dep to %s %s', attribute, stylesheet)
return nodes, names
def run(self):
cwdn = self.outputs[0].parent
src = self.inputs[0].path_from(cwdn)
dst = self.outputs[0].path_from(cwdn)
cmd = self.command + [src, dst]
cmd += Utils.to_list(getattr(self.generator, 'options', []))
for attribute in self.attributes:
stylesheet = getattr(self.generator, attribute, None)
if stylesheet is not None:
stylesheet = self.generator.to_nodes(stylesheet)[0]
cmd += ['--%s' % attribute, stylesheet.path_from(cwdn)]
return self.exec_command(cmd, cwd=cwdn.abspath())
class rst2s5(rst2html):
def __init__(self, *args, **kw):
rst2html.__init__(self, *args, **kw)
self.command = self.generator.env.RST2S5
self.attributes = ['stylesheet']
class rst2latex(rst2html):
def __init__(self, *args, **kw):
rst2html.__init__(self, *args, **kw)
self.command = self.generator.env.RST2LATEX
self.attributes = ['stylesheet']
class rst2xetex(rst2html):
def __init__(self, *args, **kw):
rst2html.__init__(self, *args, **kw)
self.command = self.generator.env.RST2XETEX
self.attributes = ['stylesheet']
class rst2pdf(docutils):
color = 'BLUE'
def run(self):
cwdn = self.outputs[0].parent
src = self.inputs[0].path_from(cwdn)
dst = self.outputs[0].path_from(cwdn)
cmd = self.generator.env.RST2PDF + [src, '-o', dst]
cmd += Utils.to_list(getattr(self.generator, 'options', []))
return self.exec_command(cmd, cwd=cwdn.abspath())
@feature('rst')
@before_method('process_source')
def apply_rst(self):
"""
Create :py:class:`rst` or other rst-related task objects
"""
if self.target:
if isinstance(self.target, Node.Node):
tgt = self.target
elif isinstance(self.target, str):
tgt = self.path.get_bld().make_node(self.target)
else:
self.bld.fatal("rst: Don't know how to build target name %s which is not a string or Node for %s" % (self.target, self))
else:
tgt = None
tsk_type = getattr(self, 'type', None)
src = self.to_nodes(self.source)
assert len(src) == 1
src = src[0]
if tsk_type is not None and tgt is None:
if tsk_type.startswith('rst2'):
ext = tsk_type[4:]
else:
self.bld.fatal("rst: Could not detect the output file extension for %s" % self)
tgt = src.change_ext('.%s' % ext)
elif tsk_type is None and tgt is not None:
out = tgt.name
ext = out[out.rfind('.')+1:]
self.type = 'rst2' + ext
elif tsk_type is not None and tgt is not None:
# the user knows what he wants
pass
else:
self.bld.fatal("rst: Need to indicate task type or target name for %s" % self)
deps_lst = []
if getattr(self, 'deps', None):
deps = self.to_list(self.deps)
for filename in deps:
n = self.path.find_resource(filename)
if not n:
self.bld.fatal('Could not find %r for %r' % (filename, self))
if not n in deps_lst:
deps_lst.append(n)
try:
task = self.create_task(self.type, src, tgt)
except KeyError:
self.bld.fatal("rst: Task of type %s not implemented (created by %s)" % (self.type, self))
task.env = self.env
# add the manual dependencies
if deps_lst:
try:
lst = self.bld.node_deps[task.uid()]
for n in deps_lst:
if not n in lst:
lst.append(n)
except KeyError:
self.bld.node_deps[task.uid()] = deps_lst
inst_to = getattr(self, 'install_path', None)
if inst_to:
self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:])
self.source = []
def configure(self):
"""
Try to find the rst programs.
Do not raise any error if they are not found.
You'll have to use additional code in configure() to die
if programs were not found.
"""
for p in rst_progs:
self.find_program(p, mandatory=False)
| 6,951 | Python | .py | 207 | 30.555556 | 123 | 0.691882 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,644 | haxe.py | projecthamster_hamster/waflib/extras/haxe.py | import re
from waflib import Utils, Task, Errors, Logs
from waflib.Configure import conf
from waflib.TaskGen import extension, taskgen_method
HAXE_COMPILERS = {
'JS': {'tgt': '--js', 'ext_out': ['.js']},
'LUA': {'tgt': '--lua', 'ext_out': ['.lua']},
'SWF': {'tgt': '--swf', 'ext_out': ['.swf']},
'NEKO': {'tgt': '--neko', 'ext_out': ['.n']},
'PHP': {'tgt': '--php', 'ext_out': ['.php']},
'CPP': {'tgt': '--cpp', 'ext_out': ['.h', '.cpp']},
'CPPIA': {'tgt': '--cppia', 'ext_out': ['.cppia']},
'CS': {'tgt': '--cs', 'ext_out': ['.cs']},
'JAVA': {'tgt': '--java', 'ext_out': ['.java']},
'JVM': {'tgt': '--jvm', 'ext_out': ['.jar']},
'PYTHON': {'tgt': '--python', 'ext_out': ['.py']},
'HL': {'tgt': '--hl', 'ext_out': ['.hl']},
'HLC': {'tgt': '--hl', 'ext_out': ['.h', '.c']},
}
@conf
def check_haxe_pkg(self, **kw):
self.find_program('haxelib')
libs = kw.get('libs')
if not libs or not (type(libs) == str or (type(libs) == list and all(isinstance(s, str) for s in libs))):
self.fatal('Specify correct libs value in ensure call')
return
fetch = kw.get('fetch')
if not fetch is None and not type(fetch) == bool:
self.fatal('Specify correct fetch value in ensure call')
libs = [libs] if type(libs) == str else libs
halt = False
for lib in libs:
try:
self.start_msg('Checking for library %s' % lib)
output = self.cmd_and_log(self.env.HAXELIB + ['list', lib])
except Errors.WafError:
self.end_msg(False)
self.fatal('Can\'t run haxelib list, ensuring halted')
return
if lib in output:
self.end_msg(lib in output)
else:
if not fetch:
self.end_msg(False)
halt = True
continue
try:
status = self.exec_command(self.env.HAXELIB + ['install', lib])
if status:
self.end_msg(False)
self.fatal('Can\'t get %s with haxelib, ensuring halted' % lib)
return
else:
self.end_msg('downloaded', color='YELLOW')
except Errors.WafError:
self.end_msg(False)
self.fatal('Can\'t run haxelib install, ensuring halted')
return
postfix = kw.get('uselib_store') or lib.upper()
self.env.append_unique('LIB_' + postfix, lib)
if halt:
self.fatal('Can\'t find libraries in haxelib list, ensuring halted')
return
class haxe(Task.Task):
vars = ['HAXE_VERSION', 'HAXE_FLAGS']
ext_in = ['.hx']
def run(self):
cmd = self.env.HAXE + self.env.HAXE_FLAGS_DEFAULT + self.env.HAXE_FLAGS
return self.exec_command(cmd)
for COMP in HAXE_COMPILERS:
# create runners for each compile target
type("haxe_" + COMP, (haxe,), {'ext_out': HAXE_COMPILERS[COMP]['ext_out']})
@taskgen_method
def init_haxe(self):
errmsg = '%s not found, specify correct value'
try:
compiler = HAXE_COMPILERS[self.compiler]
comp_tgt = compiler['tgt']
comp_mod = '/main.c' if self.compiler == 'HLC' else ''
except (AttributeError, KeyError):
self.bld.fatal(errmsg % 'COMPILER' + ': ' + ', '.join(HAXE_COMPILERS.keys()))
return
self.env.append_value(
'HAXE_FLAGS',
[comp_tgt, self.path.get_bld().make_node(self.target + comp_mod).abspath()])
if hasattr(self, 'use'):
if not (type(self.use) == str or type(self.use) == list):
self.bld.fatal(errmsg % 'USE')
return
self.use = [self.use] if type(self.use) == str else self.use
for dep in self.use:
if self.env['LIB_' + dep]:
for lib in self.env['LIB_' + dep]:
self.env.append_value('HAXE_FLAGS', ['-lib', lib])
if hasattr(self, 'res'):
if not type(self.res) == str:
self.bld.fatal(errmsg % 'RES')
return
self.env.append_value('HAXE_FLAGS', ['-D', 'resourcesPath=%s' % self.res])
@extension('.hx')
def haxe_hook(self, node):
if len(self.source) > 1:
self.bld.fatal('Use separate task generators for multiple files')
return
src = node
tgt = self.path.get_bld().find_or_declare(self.target)
self.init_haxe()
self.create_task('haxe_' + self.compiler, src, tgt)
@conf
def check_haxe(self, mini=None, maxi=None):
self.start_msg('Checking for haxe version')
try:
curr = re.search(
r'(\d+.?)+',
self.cmd_and_log(self.env.HAXE + ['-version'])).group()
except Errors.WafError:
self.end_msg(False)
self.fatal('Can\'t get haxe version')
return
if mini and Utils.num2ver(curr) < Utils.num2ver(mini):
self.end_msg('wrong', color='RED')
self.fatal('%s is too old, need >= %s' % (curr, mini))
return
if maxi and Utils.num2ver(curr) > Utils.num2ver(maxi):
self.end_msg('wrong', color='RED')
self.fatal('%s is too new, need <= %s' % (curr, maxi))
return
self.end_msg(curr, color='GREEN')
self.env.HAXE_VERSION = curr
def configure(self):
self.env.append_value(
'HAXE_FLAGS_DEFAULT',
['-D', 'no-compilation', '-cp', self.path.abspath()])
Logs.warn('Default flags: %s' % ' '.join(self.env.HAXE_FLAGS_DEFAULT))
self.find_program('haxe')
| 5,455 | Python | .py | 135 | 32.185185 | 109 | 0.55631 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,645 | fc_bgxlf.py | projecthamster_hamster/waflib/extras/fc_bgxlf.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].insert(0, 'fc_bgxlf')
@conf
def find_bgxlf(conf):
fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC')
conf.get_xlf_version(fc)
conf.env.FC_NAME = 'BGXLF'
@conf
def bg_flags(self):
self.env.SONAME_ST = ''
self.env.FCSHLIB_MARKER = ''
self.env.FCSTLIB_MARKER = ''
self.env.FCFLAGS_fcshlib = ['-fPIC']
self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull']
def configure(conf):
conf.find_bgxlf()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.xlf_flags()
conf.bg_flags()
| 726 | Python | .py | 26 | 26.153846 | 62 | 0.717579 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,646 | fc_nfort.py | projecthamster_hamster/waflib/extras/fc_nfort.py | #! /usr/bin/env python
# encoding: utf-8
# Detection of the NEC Fortran compiler for Aurora Tsubasa
import re
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_nfort')
@conf
def find_nfort(conf):
fc=conf.find_program(['nfort'],var='FC')
conf.get_nfort_version(fc)
conf.env.FC_NAME='NFORT'
conf.env.FC_MOD_CAPITALIZATION='lower'
@conf
def nfort_flags(conf):
v=conf.env
v['_FCMODOUTFLAGS']=[]
v['FCFLAGS_DEBUG']=[]
v['FCFLAGS_fcshlib']=[]
v['LINKFLAGS_fcshlib']=[]
v['FCSTLIB_MARKER']=''
v['FCSHLIB_MARKER']=''
@conf
def get_nfort_version(conf,fc):
version_re=re.compile(r"nfort\s*\(NFORT\)\s*(?P<major>\d+)\.(?P<minor>\d+)\.",re.I).search
cmd=fc+['--version']
out,err=fc_config.getoutput(conf,cmd,stdin=False)
if out:
match=version_re(out)
else:
match=version_re(err)
if not match:
return(False)
conf.fatal('Could not determine the NEC NFORT Fortran compiler version.')
else:
k=match.groupdict()
conf.env['FC_VERSION']=(k['major'],k['minor'])
def configure(conf):
conf.find_nfort()
conf.find_program('nar',var='AR')
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
conf.env.ARFLAGS=['rcs']
conf.fc_flags()
conf.fc_add_flags()
conf.nfort_flags()
| 1,304 | Python | .py | 47 | 25.808511 | 91 | 0.721246 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,647 | c_dumbpreproc.py | projecthamster_hamster/waflib/extras/c_dumbpreproc.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"""
Dumb C/C++ preprocessor for finding dependencies
It will look at all include files it can find after removing the comments, so the following
will always add the dependency on both "a.h" and "b.h"::
#include "a.h"
#ifdef B
#include "b.h"
#endif
int main() {
return 0;
}
To use::
def configure(conf):
conf.load('compiler_c')
conf.load('c_dumbpreproc')
"""
import re
from waflib.Tools import c_preproc
re_inc = re.compile(
'^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
re.IGNORECASE | re.MULTILINE)
def lines_includes(node):
code = node.read()
if c_preproc.use_trigraphs:
for (a, b) in c_preproc.trig_def:
code = code.split(a).join(b)
code = c_preproc.re_nl.sub('', code)
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
parser = c_preproc.c_parser
class dumb_parser(parser):
def addlines(self, node):
if node in self.nodes[:-1]:
return
self.currentnode_stack.append(node.parent)
# Avoid reading the same files again
try:
lines = self.parse_cache[node]
except KeyError:
lines = self.parse_cache[node] = lines_includes(node)
self.lines = lines + [(c_preproc.POPFILE, '')] + self.lines
def start(self, node, env):
try:
self.parse_cache = node.ctx.parse_cache
except AttributeError:
self.parse_cache = node.ctx.parse_cache = {}
self.addlines(node)
while self.lines:
(x, y) = self.lines.pop(0)
if x == c_preproc.POPFILE:
self.currentnode_stack.pop()
continue
self.tryfind(y, env=env)
c_preproc.c_parser = dumb_parser
| 1,659 | Python | .py | 57 | 26.438596 | 91 | 0.689981 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,648 | cppcheck.py | projecthamster_hamster/waflib/extras/cppcheck.py | #! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Michel Mooij, michel.mooij7@gmail.com
"""
Tool Description
================
This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
checking tool 'cppcheck'.
See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
itself.
Note that many linux distributions already provide a ready to install version
of cppcheck. On fedora, for instance, it can be installed using yum:
'sudo yum install cppcheck'
Usage
=====
In order to use this waftool simply add it to the 'options' and 'configure'
functions of your main waf script as shown in the example below:
def options(opt):
opt.load('cppcheck', tooldir='./waftools')
def configure(conf):
conf.load('cppcheck')
Note that example shown above assumes that the cppcheck waftool is located in
the sub directory named 'waftools'.
When configured as shown in the example above, cppcheck will automatically
perform a source code analysis on all C/C++ build tasks that have been
defined in your waf build system.
The example shown below for a C program will be used as input for cppcheck when
building the task.
def build(bld):
bld.program(name='foo', src='foobar.c')
The result of the source code analysis will be stored both as xml and html
files in the build location for the task. Should any error be detected by
cppcheck the build will be aborted and a link to the html report will be shown.
By default, one index.html file is created for each task generator. A global
index.html file can be obtained by setting the following variable
in the configuration section:
conf.env.CPPCHECK_SINGLE_HTML = False
When needed source code checking by cppcheck can be disabled per task, per
detected error or warning for a particular task. It can be also be disabled for
all tasks.
In order to exclude a task from source code checking add the skip option to the
task as shown below:
def build(bld):
bld.program(
name='foo',
src='foobar.c'
cppcheck_skip=True
)
When needed problems detected by cppcheck may be suppressed using a file
containing a list of suppression rules. The relative or absolute path to this
file can be added to the build task as shown in the example below:
bld.program(
name='bar',
src='foobar.c',
cppcheck_suppress='bar.suppress'
)
A cppcheck suppress file should contain one suppress rule per line. Each of
these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
Dependencies
================
This waftool depends on the python pygments module, it is used for source code
syntax highlighting when creating the html reports. see http://pygments.org/ for
more information on this package.
Remarks
================
The generation of the html report is originally based on the cppcheck-htmlreport.py
script that comes shipped with the cppcheck tool.
"""
import sys
import xml.etree.ElementTree as ElementTree
from waflib import Task, TaskGen, Logs, Context, Options
PYGMENTS_EXC_MSG= '''
The required module 'pygments' could not be found. Please install it using your
platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
see 'http://pygments.org/download/' for installation instructions.
'''
try:
import pygments
from pygments import formatters, lexers
except ImportError as e:
Logs.warn(PYGMENTS_EXC_MSG)
raise e
def options(opt):
opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
default=False, action='store_true',
help='do not check C/C++ sources (default=False)')
opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
default=False, action='store_true',
help='continue in case of errors (default=False)')
opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
default='warning,performance,portability,style,unusedFunction', action='store',
help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
default='warning,performance,portability,style', action='store',
help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
default='c99', action='store',
help='cppcheck standard to use when checking C (default=c99)')
opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
default='c++03', action='store',
help='cppcheck standard to use when checking C++ (default=c++03)')
opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
default=False, action='store_true',
help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
default='20', action='store',
help='maximum preprocessor (--max-configs) define iterations (default=20)')
opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs',
default='1', action='store',
help='number of jobs (-j) to do the checking work (default=1)')
def configure(conf):
if conf.options.cppcheck_skip:
conf.env.CPPCHECK_SKIP = [True]
conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs
if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable):
Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically')
conf.find_program('cppcheck', var='CPPCHECK')
# set to True to get a single index.html file
conf.env.CPPCHECK_SINGLE_HTML = False
@TaskGen.feature('c')
@TaskGen.feature('cxx')
def cppcheck_execute(self):
if hasattr(self.bld, 'conf'):
return
if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip:
return
if getattr(self, 'cppcheck_skip', False):
return
task = self.create_task('cppcheck')
task.cmd = _tgen_create_cmd(self)
task.fatal = []
if not Options.options.cppcheck_err_resume:
task.fatal.append('error')
def _tgen_create_cmd(self):
features = getattr(self, 'features', [])
std_c = self.env.CPPCHECK_STD_C
std_cxx = self.env.CPPCHECK_STD_CXX
max_configs = self.env.CPPCHECK_MAX_CONFIGS
bin_enable = self.env.CPPCHECK_BIN_ENABLE
lib_enable = self.env.CPPCHECK_LIB_ENABLE
jobs = self.env.CPPCHECK_JOBS
cmd = self.env.CPPCHECK
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
args.append('--max-configs=%s' % max_configs)
args.append('-j %s' % jobs)
if 'cxx' in features:
args.append('--language=c++')
args.append('--std=%s' % std_cxx)
else:
args.append('--language=c')
args.append('--std=%s' % std_c)
if Options.options.cppcheck_check_config:
args.append('--check-config')
if set(['cprogram','cxxprogram']) & set(features):
args.append('--enable=%s' % bin_enable)
else:
args.append('--enable=%s' % lib_enable)
for src in self.to_list(getattr(self, 'source', [])):
if not isinstance(src, str):
src = repr(src)
args.append(src)
for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
if not isinstance(inc, str):
inc = repr(inc)
args.append('-I%s' % inc)
for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
if not isinstance(inc, str):
inc = repr(inc)
args.append('-I%s' % inc)
return cmd + args
class cppcheck(Task.Task):
quiet = True
def run(self):
stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
self._save_xml_report(stderr)
defects = self._get_defects(stderr)
index = self._create_html_report(defects)
self._errors_evaluate(defects, index)
return 0
def _save_xml_report(self, s):
'''use cppcheck xml result string, add the command string used to invoke cppcheck
and save as xml file.
'''
header = '%s\n' % s.splitlines()[0]
root = ElementTree.fromstring(s)
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
cmd.text = str(self.cmd)
body = ElementTree.tostring(root).decode('us-ascii')
body_html_name = 'cppcheck-%s.xml' % self.generator.get_name()
if self.env.CPPCHECK_SINGLE_HTML:
body_html_name = 'cppcheck.xml'
node = self.generator.path.get_bld().find_or_declare(body_html_name)
node.write(header + body)
def _get_defects(self, xml_string):
'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
a list of defects.
'''
defects = []
for error in ElementTree.fromstring(xml_string).iter('error'):
defect = {}
defect['id'] = error.get('id')
defect['severity'] = error.get('severity')
defect['msg'] = str(error.get('msg')).replace('<','<')
defect['verbose'] = error.get('verbose')
for location in error.findall('location'):
defect['file'] = location.get('file')
defect['line'] = str(int(location.get('line')) - 1)
defects.append(defect)
return defects
def _create_html_report(self, defects):
files, css_style_defs = self._create_html_files(defects)
index = self._create_html_index(files)
self._create_css_file(css_style_defs)
return index
def _create_html_files(self, defects):
sources = {}
defects = [defect for defect in defects if 'file' in defect]
for defect in defects:
name = defect['file']
if not name in sources:
sources[name] = [defect]
else:
sources[name].append(defect)
files = {}
css_style_defs = None
bpath = self.generator.path.get_bld().abspath()
names = list(sources.keys())
for i in range(0,len(names)):
name = names[i]
if self.env.CPPCHECK_SINGLE_HTML:
htmlfile = 'cppcheck/%i.html' % (i)
else:
htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i)
errors = sources[name]
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
css_style_defs = self._create_html_file(name, htmlfile, errors)
return files, css_style_defs
def _create_html_file(self, sourcefile, htmlfile, errors):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name
body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'menu':
indexlink = div.find('a')
if self.env.CPPCHECK_SINGLE_HTML:
indexlink.attrib['href'] = 'index.html'
else:
indexlink.attrib['href'] = 'index-%s.html' % name
if div.get('id') == 'content':
content = div
srcnode = self.generator.bld.root.find_node(sourcefile)
hl_lines = [e['line'] for e in errors if 'line' in e]
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
formatter.errors = [e for e in errors if 'line' in e]
css_style_defs = formatter.get_style_defs('.highlight')
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
s = pygments.highlight(srcnode.read(), lexer, formatter)
table = ElementTree.fromstring(s)
content.append(table)
s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare(htmlfile)
node.write(s)
return css_style_defs
def _create_html_index(self, files):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name
body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
self._create_html_table(content, files)
if div.get('id') == 'menu':
indexlink = div.find('a')
if self.env.CPPCHECK_SINGLE_HTML:
indexlink.attrib['href'] = 'index.html'
else:
indexlink.attrib['href'] = 'index-%s.html' % name
s = ElementTree.tostring(root, method='html').decode('us-ascii')
s = CCPCHECK_HTML_TYPE + s
index_html_name = 'cppcheck/index-%s.html' % name
if self.env.CPPCHECK_SINGLE_HTML:
index_html_name = 'cppcheck/index.html'
node = self.generator.path.get_bld().find_or_declare(index_html_name)
node.write(s)
return node
def _create_html_table(self, content, files):
table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
for name, val in files.items():
f = val['htmlfile']
s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
row = ElementTree.fromstring(s)
table.append(row)
errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint)
for e in errors:
if not 'line' in e:
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
else:
attr = ''
if e['severity'] == 'error':
attr = 'class="error"'
s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
row = ElementTree.fromstring(s)
table.append(row)
content.append(table)
def _create_css_file(self, css_style_defs):
css = str(CPPCHECK_CSS_FILE)
if css_style_defs:
css = "%s\n%s\n" % (css, css_style_defs)
node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
node.write(css)
def _errors_evaluate(self, errors, http_index):
name = self.generator.get_name()
fatal = self.fatal
severity = [err['severity'] for err in errors]
problems = [err for err in errors if err['severity'] != 'information']
if set(fatal) & set(severity):
exc = "\n"
exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
exc += "\n file://%r" % (http_index)
exc += "\n"
self.generator.bld.fatal(exc)
elif len(problems):
msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
msg += "\n file://%r" % http_index
msg += "\n"
Logs.error(msg)
class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
errors = []
def wrap(self, source, outfile):
line_no = 1
for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
# If this is a source code line we want to add a span tag at the end.
if i == 1:
for error in self.errors:
if int(error['line']) == line_no:
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
line_no += 1
yield i, t
CCPCHECK_HTML_TYPE = \
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
CPPCHECK_HTML_FILE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp " ">]>
<html>
<head>
<title>cppcheck - report - XXX</title>
<link href="style.css" rel="stylesheet" type="text/css" />
<style type="text/css">
</style>
</head>
<body class="body">
<div id="page-header"> </div>
<div id="page">
<div id="header">
<h1>cppcheck report - XXX</h1>
</div>
<div id="menu">
<a href="index.html">Defect list</a>
</div>
<div id="content">
</div>
<div id="footer">
<div>cppcheck - a tool for static C/C++ code analysis</div>
<div>
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
IRC: #cppcheck at irc.freenode.net
</div>
</div>
</div>
<div id="page-footer"> </div>
</body>
</html>
"""
CPPCHECK_HTML_TABLE = """
<table>
<tr>
<th>Line</th>
<th>Id</th>
<th>Severity</th>
<th>Message</th>
</tr>
</table>
"""
CPPCHECK_HTML_ERROR = \
'<span style="background: #ffaaaa;padding: 3px;"><--- %s</span>\n'
CPPCHECK_CSS_FILE = """
body.body {
font-family: Arial;
font-size: 13px;
background-color: black;
padding: 0px;
margin: 0px;
}
.error {
font-family: Arial;
font-size: 13px;
background-color: #ffb7b7;
padding: 0px;
margin: 0px;
}
th, td {
min-width: 100px;
text-align: left;
}
#page-header {
clear: both;
width: 1200px;
margin: 20px auto 0px auto;
height: 10px;
border-bottom-width: 2px;
border-bottom-style: solid;
border-bottom-color: #aaaaaa;
}
#page {
width: 1160px;
margin: auto;
border-left-width: 2px;
border-left-style: solid;
border-left-color: #aaaaaa;
border-right-width: 2px;
border-right-style: solid;
border-right-color: #aaaaaa;
background-color: White;
padding: 20px;
}
#page-footer {
clear: both;
width: 1200px;
margin: auto;
height: 10px;
border-top-width: 2px;
border-top-style: solid;
border-top-color: #aaaaaa;
}
#header {
width: 100%;
height: 70px;
background-image: url(logo.png);
background-repeat: no-repeat;
background-position: left top;
border-bottom-style: solid;
border-bottom-width: thin;
border-bottom-color: #aaaaaa;
}
#menu {
margin-top: 5px;
text-align: left;
float: left;
width: 100px;
height: 300px;
}
#menu > a {
margin-left: 10px;
display: block;
}
#content {
float: left;
width: 1020px;
margin: 5px;
padding: 0px 10px 10px 10px;
border-left-style: solid;
border-left-width: thin;
border-left-color: #aaaaaa;
}
#footer {
padding-bottom: 5px;
padding-top: 5px;
border-top-style: solid;
border-top-width: thin;
border-top-color: #aaaaaa;
clear: both;
font-size: 10px;
}
#footer > div {
float: left;
width: 33%;
}
"""
| 18,052 | Python | .py | 505 | 32.867327 | 243 | 0.700418 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,649 | objcopy.py | projecthamster_hamster/waflib/extras/objcopy.py | #!/usr/bin/python
# Grygoriy Fuchedzhy 2010
"""
Support for converting linked targets to ihex, srec or binary files using
objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx'
feature. The 'objcopy' feature uses the following attributes:
objcopy_bfdname Target object format name (eg. ihex, srec, binary).
Defaults to ihex.
objcopy_target File name used for objcopy output. This defaults to the
target name with objcopy_bfdname as extension.
objcopy_install_path Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
objcopy_flags Additional flags passed to objcopy.
"""
from waflib.Utils import def_attrs
from waflib import Task, Options
from waflib.TaskGen import feature, after_method
class objcopy(Task.Task):
run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
color = 'CYAN'
@feature('objcopy')
@after_method('apply_link')
def map_objcopy(self):
def_attrs(self,
objcopy_bfdname = 'ihex',
objcopy_target = None,
objcopy_install_path = "${PREFIX}/firmware",
objcopy_flags = '')
link_output = self.link_task.outputs[0]
if not self.objcopy_target:
self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target))
task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
try:
task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
except AttributeError:
pass
if self.objcopy_install_path:
self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0])
def configure(ctx):
program_name = 'objcopy'
prefix = getattr(Options.options, 'cross_prefix', None)
if prefix:
program_name = '{}-{}'.format(prefix, program_name)
ctx.find_program(program_name, var='OBJCOPY', mandatory=True)
| 1,872 | Python | .py | 44 | 40.068182 | 104 | 0.750412 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,650 | syms.py | projecthamster_hamster/waflib/extras/syms.py | #! /usr/bin/env python
# encoding: utf-8
"""
This tool supports the export_symbols_regex to export the symbols in a shared library.
by default, all symbols are exported by gcc, and nothing by msvc.
to use the tool, do something like:
def build(ctx):
ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib')
only the symbols starting with 'mylib_' will be exported.
"""
import re
from waflib.Context import STDOUT
from waflib.Task import Task
from waflib.Errors import WafError
from waflib.TaskGen import feature, after_method
class gen_sym(Task):
def run(self):
obj = self.inputs[0]
kw = {}
reg = getattr(self.generator, 'export_symbols_regex', '.+?')
if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
re_nm = re.compile(r'External\s+\|\s+_(?P<symbol>%s)\b' % reg)
cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
else:
if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
re_nm = re.compile(r'(T|D)\s+_(?P<symbol>%s)\b' % reg)
elif self.env.DEST_BINFMT=='mac-o':
re_nm=re.compile(r'(T|D)\s+(?P<symbol>_?(%s))\b' % reg)
else:
re_nm = re.compile(r'(T|D)\s+(?P<symbol>%s)\b' % reg)
cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()]
syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))]
self.outputs[0].write('%r' % syms)
class compile_sym(Task):
def run(self):
syms = {}
for x in self.inputs:
slist = eval(x.read())
for s in slist:
syms[s] = 1
lsyms = list(syms.keys())
lsyms.sort()
if self.env.DEST_BINFMT == 'pe':
self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms))
elif self.env.DEST_BINFMT == 'elf':
self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n")
elif self.env.DEST_BINFMT=='mac-o':
self.outputs[0].write('\n'.join(lsyms) + '\n')
else:
raise WafError('NotImplemented')
@feature('syms')
@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars')
def do_the_symbol_stuff(self):
def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def'))
compiled_tasks = getattr(self, 'compiled_tasks', None)
if compiled_tasks:
ins = [x.outputs[0] for x in compiled_tasks]
self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node)
link_task = getattr(self, 'link_task', None)
if link_task:
self.link_task.dep_nodes.append(def_node)
if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()])
elif self.env.DEST_BINFMT == 'pe':
# gcc on windows takes *.def as an additional input
self.link_task.inputs.append(def_node)
elif self.env.DEST_BINFMT == 'elf':
self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()])
elif self.env.DEST_BINFMT=='mac-o':
self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()])
else:
raise WafError('NotImplemented')
| 3,208 | Python | .py | 73 | 41.068493 | 109 | 0.670615 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,651 | run_do_script.py | projecthamster_hamster/waflib/extras/run_do_script.py | #!/usr/bin/env python
# encoding: utf-8
# Hans-Martin von Gaudecker, 2012
"""
Run a Stata do-script in the directory specified by **ctx.bldnode**. The
first and only argument will be the name of the do-script (no extension),
which can be accessed inside the do-script by the local macro `1'. Useful
for keeping a log file.
The tool uses the log file that is automatically kept by Stata only
for error-catching purposes, it will be destroyed if the task finished
without error. In case of an error in **some_script.do**, you can inspect
it as **some_script.log** in the **ctx.bldnode** directory.
Note that Stata will not return an error code if it exits abnormally --
catching errors relies on parsing the log file mentioned before. Should
the parser behave incorrectly please send an email to hmgaudecker [at] gmail.
**WARNING**
The tool will not work if multiple do-scripts of the same name---but in
different directories---are run at the same time! Avoid this situation.
Usage::
ctx(features='run_do_script',
source='some_script.do',
target=['some_table.tex', 'some_figure.eps'],
deps='some_data.csv')
"""
import os, re, sys
from waflib import Task, TaskGen, Logs
if sys.platform == 'darwin':
STATA_COMMANDS = ['Stata64MP', 'StataMP',
'Stata64SE', 'StataSE',
'Stata64', 'Stata']
STATAFLAGS = '-e -q do'
STATAENCODING = 'MacRoman'
elif sys.platform.startswith('linux'):
STATA_COMMANDS = ['stata-mp', 'stata-se', 'stata']
STATAFLAGS = '-b -q do'
# Not sure whether this is correct...
STATAENCODING = 'Latin-1'
elif sys.platform.lower().startswith('win'):
STATA_COMMANDS = ['StataMP-64', 'StataMP-ia',
'StataMP', 'StataSE-64',
'StataSE-ia', 'StataSE',
'Stata-64', 'Stata-ia',
'Stata.e', 'WMPSTATA',
'WSESTATA', 'WSTATA']
STATAFLAGS = '/e do'
STATAENCODING = 'Latin-1'
else:
raise Exception("Unknown sys.platform: %s " % sys.platform)
def configure(ctx):
ctx.find_program(STATA_COMMANDS, var='STATACMD', errmsg="""\n
No Stata executable found!\n\n
If Stata is needed:\n
1) Check the settings of your system path.
2) Note we are looking for Stata executables called: %s
If yours has a different name, please report to hmgaudecker [at] gmail\n
Else:\n
Do not load the 'run_do_script' tool in the main wscript.\n\n""" % STATA_COMMANDS)
ctx.env.STATAFLAGS = STATAFLAGS
ctx.env.STATAENCODING = STATAENCODING
class run_do_script_base(Task.Task):
"""Run a Stata do-script from the bldnode directory."""
run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"'
shell = True
class run_do_script(run_do_script_base):
"""Use the log file automatically kept by Stata for error-catching.
Erase it if the task finished without error. If not, it will show
up as do_script.log in the bldnode directory.
"""
def run(self):
run_do_script_base.run(self)
ret, log_tail = self.check_erase_log_file()
if ret:
Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
self.inputs[0], ret, self.env.LOGFILEPATH, log_tail)
return ret
def check_erase_log_file(self):
"""Parse Stata's default log file and erase it if everything okay.
Parser is based on Brendan Halpin's shell script found here:
http://teaching.sociology.ul.ie/bhalpin/wordpress/?p=122
"""
if sys.version_info.major >= 3:
kwargs = {'file': self.env.LOGFILEPATH, 'mode': 'r', 'encoding': self.env.STATAENCODING}
else:
kwargs = {'name': self.env.LOGFILEPATH, 'mode': 'r'}
with open(**kwargs) as log:
log_tail = log.readlines()[-10:]
for line in log_tail:
error_found = re.match(r"r\(([0-9]+)\)", line)
if error_found:
return error_found.group(1), ''.join(log_tail)
else:
pass
# Only end up here if the parser did not identify an error.
os.remove(self.env.LOGFILEPATH)
return None, None
@TaskGen.feature('run_do_script')
@TaskGen.before_method('process_source')
def apply_run_do_script(tg):
"""Task generator customising the options etc. to call Stata in batch
mode for running a do-script.
"""
# Convert sources and targets to nodes
src_node = tg.path.find_resource(tg.source)
tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
tsk = tg.create_task('run_do_script', src=src_node, tgt=tgt_nodes)
tsk.env.DOFILETRUNK = os.path.splitext(src_node.name)[0]
tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s.log' % (tsk.env.DOFILETRUNK))
# dependencies (if the attribute 'deps' changes, trigger a recompilation)
for x in tg.to_list(getattr(tg, 'deps', [])):
node = tg.path.find_resource(x)
if not node:
tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
tsk.dep_nodes.append(node)
Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
# Bypass the execution of process_source by setting the source to an empty list
tg.source = []
| 4,989 | Python | .py | 116 | 39.913793 | 113 | 0.709485 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,652 | c_nec.py | projecthamster_hamster/waflib/extras/c_nec.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
"""
NEC SX Compiler for SX vector systems
"""
import re
from waflib import Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
from waflib.Tools import xlc # method xlc_common_flags
from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'].append('c_nec')
@conf
def find_sxc(conf):
cc = conf.find_program(['sxcc'], var='CC')
conf.get_sxc_version(cc)
conf.env.CC = cc
conf.env.CC_NAME = 'sxcc'
@conf
def get_sxc_version(conf, fc):
version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
out, err = p.communicate()
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the NEC C compiler version.')
k = match.groupdict()
conf.env['C_VERSION'] = (k['major'], k['minor'])
@conf
def sxc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:
v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']=''
v['SONAME_ST']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['LINKFLAGS_cprogram']=['']
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']='lib%s.a'
def configure(conf):
conf.find_sxc()
conf.find_program('sxar',VAR='AR')
conf.sxc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| 1,789 | Python | .py | 66 | 25.227273 | 115 | 0.662974 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,653 | file_to_object.py | projecthamster_hamster/waflib/extras/file_to_object.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Tool to embed file into objects
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"
"""
This tool allows to embed file contents in object files (.o).
It is not exactly portable, and the file contents are reachable
using various non-portable fashions.
The goal here is to provide a functional interface to the embedding
of file data in objects.
See the ``playground/embedded_resources`` example for an example.
Usage::
bld(
name='pipeline',
# ^ Reference this in use="..." for things using the generated code
features='file_to_object',
source='some.file',
# ^ Name of the file to embed in binary section.
)
Known issues:
- Destination is named like source, with extension renamed to .o
eg. some.file -> some.o
"""
import os, sys
from waflib import Task, TaskGen, Errors
def filename_c_escape(x):
return x.replace("\\", "\\\\")
class file_to_object_s(Task.Task):
color = 'CYAN'
vars = ['DEST_CPU', 'DEST_BINFMT']
def run(self):
name = []
for i, x in enumerate(self.inputs[0].name):
if x.isalnum():
name.append(x)
else:
name.append('_')
file = self.inputs[0].abspath()
size = os.path.getsize(file)
if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'):
unit = 'quad'
align = 8
elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'):
unit = 'long'
align = 4
else:
raise Errors.WafError("Unsupported DEST_CPU, please report bug!")
file = filename_c_escape(file)
name = "_binary_" + "".join(name)
rodata = ".section .rodata"
if self.env.DEST_BINFMT == "mac-o":
name = "_" + name
rodata = ".section __TEXT,__const"
with open(self.outputs[0].abspath(), 'w') as f:
f.write(\
"""
.global %(name)s_start
.global %(name)s_end
.global %(name)s_size
%(rodata)s
%(name)s_start:
.incbin "%(file)s"
%(name)s_end:
.align %(align)d
%(name)s_size:
.%(unit)s 0x%(size)x
""" % locals())
class file_to_object_c(Task.Task):
color = 'CYAN'
def run(self):
name = []
for i, x in enumerate(self.inputs[0].name):
if x.isalnum():
name.append(x)
else:
name.append('_')
file = self.inputs[0].abspath()
size = os.path.getsize(file)
name = "_binary_" + "".join(name)
def char_to_num(ch):
if sys.version_info[0] < 3:
return ord(ch)
return ch
data = self.inputs[0].read('rb')
lines, line = [], []
for idx_byte, byte in enumerate(data):
line.append(byte)
if len(line) > 15 or idx_byte == size-1:
lines.append(", ".join(("0x%02x" % char_to_num(x)) for x in line))
line = []
data = ",\n ".join(lines)
self.outputs[0].write(\
"""
unsigned long %(name)s_size = %(size)dL;
char const %(name)s_start[] = {
%(data)s
};
char const %(name)s_end[] = {};
""" % locals())
@TaskGen.feature('file_to_object')
@TaskGen.before_method('process_source')
def tg_file_to_object(self):
bld = self.bld
sources = self.to_nodes(self.source)
targets = []
for src in sources:
if bld.env.F2O_METHOD == ["asm"]:
tgt = src.parent.find_or_declare(src.name + '.f2o.s')
tsk = self.create_task('file_to_object_s', src, tgt)
tsk.cwd = src.parent.abspath() # verify
else:
tgt = src.parent.find_or_declare(src.name + '.f2o.c')
tsk = self.create_task('file_to_object_c', src, tgt)
tsk.cwd = src.parent.abspath() # verify
targets.append(tgt)
self.source = targets
def configure(conf):
conf.load('gas')
conf.env.F2O_METHOD = ["c"]
| 3,481 | Python | .py | 120 | 26.15 | 72 | 0.654573 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,654 | local_rpath.py | projecthamster_hamster/waflib/extras/local_rpath.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011 (ita)
import copy
from waflib.TaskGen import after_method, feature
@after_method('propagate_uselib_vars')
@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
def add_rpath_stuff(self):
all = copy.copy(self.to_list(getattr(self, 'use', [])))
while all:
name = all.pop()
try:
tg = self.bld.get_tgen_by_name(name)
except:
continue
if hasattr(tg, 'link_task'):
self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
all.extend(self.to_list(getattr(tg, 'use', [])))
| 594 | Python | .py | 18 | 30.611111 | 80 | 0.69808 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,655 | msvcdeps.py | projecthamster_hamster/waflib/extras/msvcdeps.py | #!/usr/bin/env python
# encoding: utf-8
# Copyright Garmin International or its subsidiaries, 2012-2013
'''
Off-load dependency scanning from Python code to MSVC compiler
This tool is safe to load in any environment; it will only activate the
MSVC exploits when it finds that a particular taskgen uses MSVC to
compile.
Empirical testing shows about a 10% execution time savings from using
this tool as compared to c_preproc.
The technique of gutting scan() and pushing the dependency calculation
down to post_run() is cribbed from gccdeps.py.
This affects the cxx class, so make sure to load Qt5 after this tool.
Usage::
def options(opt):
opt.load('compiler_cxx')
def configure(conf):
conf.load('compiler_cxx msvcdeps')
'''
import os, sys, tempfile, threading
from waflib import Context, Errors, Logs, Task, Utils
from waflib.Tools import c_preproc, c, cxx, msvc
from waflib.TaskGen import feature, before_method
lock = threading.Lock()
PREPROCESSOR_FLAG = '/showIncludes'
INCLUDE_PATTERN = 'Note: including file:'
# Extensible by outside tools
supported_compilers = ['msvc']
@feature('c', 'cxx')
@before_method('process_source')
def apply_msvcdeps_flags(taskgen):
if taskgen.env.CC_NAME not in supported_compilers:
return
for flag in ('CFLAGS', 'CXXFLAGS'):
if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
def get_correct_path_case(base_path, path):
'''
Return a case-corrected version of ``path`` by searching the filesystem for
``path``, relative to ``base_path``, using the case returned by the filesystem.
'''
components = Utils.split_path(path)
corrected_path = ''
if os.path.isabs(path):
corrected_path = components.pop(0).upper() + os.sep
for part in components:
part = part.lower()
search_path = os.path.join(base_path, corrected_path)
if part == '..':
corrected_path = os.path.join(corrected_path, part)
search_path = os.path.normpath(search_path)
continue
for item in sorted(os.listdir(search_path)):
if item.lower() == part:
corrected_path = os.path.join(corrected_path, item)
break
else:
raise ValueError("Can't find %r in %r" % (part, search_path))
return corrected_path
def path_to_node(base_node, path, cached_nodes):
'''
Take the base node and the path and return a node
Results are cached because searching the node tree is expensive
The following code is executed by threads, it is not safe, so a lock is needed...
'''
# normalize the path to remove parent path components (..)
path = os.path.normpath(path)
# normalize the path case to increase likelihood of a cache hit
node_lookup_key = (base_node, os.path.normcase(path))
try:
node = cached_nodes[node_lookup_key]
except KeyError:
# retry with lock on cache miss
with lock:
try:
node = cached_nodes[node_lookup_key]
except KeyError:
path = get_correct_path_case(base_node.abspath(), path)
node = cached_nodes[node_lookup_key] = base_node.find_node(path)
return node
def post_run(self):
if self.env.CC_NAME not in supported_compilers:
return super(self.derived_msvcdeps, self).post_run()
# TODO this is unlikely to work with netcache
if getattr(self, 'cached', None):
return Task.Task.post_run(self)
resolved_nodes = []
unresolved_names = []
bld = self.generator.bld
# Dynamically bind to the cache
try:
cached_nodes = bld.cached_nodes
except AttributeError:
cached_nodes = bld.cached_nodes = {}
for path in self.msvcdeps_paths:
node = None
if os.path.isabs(path):
node = path_to_node(bld.root, path, cached_nodes)
else:
# when calling find_resource, make sure the path does not begin with '..'
base_node = bld.bldnode
path = [k for k in Utils.split_path(path) if k and k != '.']
while path[0] == '..':
path.pop(0)
base_node = base_node.parent
path = os.sep.join(path)
node = path_to_node(base_node, path, cached_nodes)
if not node:
raise ValueError('could not find %r for %r' % (path, self))
else:
if not c_preproc.go_absolute:
if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
# System library
Logs.debug('msvcdeps: Ignoring system include %r', node)
continue
if id(node) == id(self.inputs[0]):
# ignore the source file, it is already in the dependencies
# this way, successful config tests may be retrieved from the cache
continue
resolved_nodes.append(node)
Logs.debug('deps: msvcdeps for %s returned %s', self, resolved_nodes)
bld.node_deps[self.uid()] = resolved_nodes
bld.raw_deps[self.uid()] = unresolved_names
try:
del self.cache_sig
except AttributeError:
pass
Task.Task.post_run(self)
def scan(self):
if self.env.CC_NAME not in supported_compilers:
return super(self.derived_msvcdeps, self).scan()
resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
unresolved_names = []
return (resolved_nodes, unresolved_names)
def sig_implicit_deps(self):
if self.env.CC_NAME not in supported_compilers:
return super(self.derived_msvcdeps, self).sig_implicit_deps()
bld = self.generator.bld
try:
return self.compute_sig_implicit_deps()
except Errors.TaskNotReady:
raise ValueError("Please specify the build order precisely with msvcdeps (c/c++ tasks)")
except EnvironmentError:
# If a file is renamed, assume the dependencies are stale and must be recalculated
for x in bld.node_deps.get(self.uid(), []):
if not x.is_bld() and not x.exists():
try:
del x.parent.children[x.name]
except KeyError:
pass
key = self.uid()
bld.node_deps[key] = []
bld.raw_deps[key] = []
return Utils.SIG_NIL
def exec_command(self, cmd, **kw):
if self.env.CC_NAME not in supported_compilers:
return super(self.derived_msvcdeps, self).exec_command(cmd, **kw)
if not 'cwd' in kw:
kw['cwd'] = self.get_cwd()
if self.env.PATH:
env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
# The Visual Studio IDE adds an environment variable that causes
# the MS compiler to send its textual output directly to the
# debugging window rather than normal stdout/stderr.
#
# This is unrecoverably bad for this tool because it will cause
# all the dependency scanning to see an empty stdout stream and
# assume that the file being compiled uses no headers.
#
# See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
#
# Attempting to repair the situation by deleting the offending
# envvar at this point in tool execution will not be good enough--
# its presence poisons the 'waf configure' step earlier. We just
# want to put a sanity check here in order to help developers
# quickly diagnose the issue if an otherwise-good Waf tree
# is then executed inside the MSVS IDE.
assert 'VS_UNICODE_OUTPUT' not in kw['env']
cmd, args = self.split_argfile(cmd)
try:
(fd, tmp) = tempfile.mkstemp()
os.write(fd, '\r\n'.join(args).encode())
os.close(fd)
self.msvcdeps_paths = []
kw['env'] = kw.get('env', os.environ.copy())
kw['cwd'] = kw.get('cwd', os.getcwd())
kw['quiet'] = Context.STDOUT
kw['output'] = Context.STDOUT
out = []
if Logs.verbose:
Logs.debug('argfile: @%r -> %r', tmp, args)
try:
raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw)
ret = 0
except Errors.WafError as e:
# Use e.msg if e.stdout is not set
raw_out = getattr(e, 'stdout', e.msg)
# Return non-zero error code even if we didn't
# get one from the exception object
ret = getattr(e, 'returncode', 1)
Logs.debug('msvcdeps: Running for: %s' % self.inputs[0])
for line in raw_out.splitlines():
if line.startswith(INCLUDE_PATTERN):
# Only strip whitespace after log to preserve
# dependency structure in debug output
inc_path = line[len(INCLUDE_PATTERN):]
Logs.debug('msvcdeps: Regex matched %s', inc_path)
self.msvcdeps_paths.append(inc_path.strip())
else:
out.append(line)
# Pipe through the remaining stdout content (not related to /showIncludes)
if self.generator.bld.logger:
self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
else:
sys.stdout.write(os.linesep.join(out) + os.linesep)
return ret
finally:
try:
os.remove(tmp)
except OSError:
# anti-virus and indexers can keep files open -_-
pass
def wrap_compiled_task(classname):
derived_class = type(classname, (Task.classes[classname],), {})
derived_class.derived_msvcdeps = derived_class
derived_class.post_run = post_run
derived_class.scan = scan
derived_class.sig_implicit_deps = sig_implicit_deps
derived_class.exec_command = exec_command
for k in ('c', 'cxx'):
if k in Task.classes:
wrap_compiled_task(k)
def options(opt):
raise ValueError('Do not load msvcdeps options')
| 8,835 | Python | .py | 235 | 34.638298 | 99 | 0.726964 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,656 | clang_compilation_database.py | projecthamster_hamster/waflib/extras/clang_compilation_database.py | #!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013
# Alibek Omarov, 2019
"""
Writes the c and cpp compile commands into build/compile_commands.json
see http://clang.llvm.org/docs/JSONCompilationDatabase.html
Usage:
Load this tool in `options` to be able to generate database
by request in command-line and before build:
$ waf clangdb
def options(opt):
opt.load('clang_compilation_database')
Otherwise, load only in `configure` to generate it always before build.
def configure(conf):
conf.load('compiler_cxx')
...
conf.load('clang_compilation_database')
"""
from waflib import Logs, TaskGen, Task, Build, Scripting
Task.Task.keep_last_cmd = True
class ClangDbContext(Build.BuildContext):
'''generates compile_commands.json by request'''
cmd = 'clangdb'
def write_compilation_database(self):
"""
Write the clang compilation database as JSON
"""
database_file = self.bldnode.make_node('compile_commands.json')
Logs.info('Build commands will be stored in %s', database_file.path_from(self.path))
try:
root = database_file.read_json()
except IOError:
root = []
clang_db = dict((x['file'], x) for x in root)
for task in self.clang_compilation_database_tasks:
try:
cmd = task.last_cmd
except AttributeError:
continue
f_node = task.inputs[0]
filename = f_node.path_from(task.get_cwd())
entry = {
"directory": task.get_cwd().abspath(),
"arguments": cmd,
"file": filename,
}
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write_json(root)
def execute(self):
"""
Build dry run
"""
self.restore()
self.cur_tasks = []
self.clang_compilation_database_tasks = []
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])
self.pre_build()
# we need only to generate last_cmd, so override
# exec_command temporarily
def exec_command(self, *k, **kw):
return 0
for g in self.groups:
for tg in g:
try:
f = tg.post
except AttributeError:
pass
else:
f()
if isinstance(tg, Task.Task):
lst = [tg]
else: lst = tg.tasks
for tsk in lst:
if tsk.__class__.__name__ == "swig":
tsk.runnable_status()
if hasattr(tsk, 'more_tasks'):
lst.extend(tsk.more_tasks)
# Not all dynamic tasks can be processed, in some cases
# one may have to call the method "run()" like this:
#elif tsk.__class__.__name__ == 'src2c':
# tsk.run()
# if hasattr(tsk, 'more_tasks'):
# lst.extend(tsk.more_tasks)
tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
if isinstance(tsk, tup):
self.clang_compilation_database_tasks.append(tsk)
tsk.nocache = True
old_exec = tsk.exec_command
tsk.exec_command = exec_command
tsk.run()
tsk.exec_command = old_exec
self.write_compilation_database()
EXECUTE_PATCHED = False
def patch_execute():
global EXECUTE_PATCHED
if EXECUTE_PATCHED:
return
def new_execute_build(self):
"""
Invoke clangdb command before build
"""
if self.cmd.startswith('build'):
Scripting.run_command(self.cmd.replace('build','clangdb'))
old_execute_build(self)
old_execute_build = getattr(Build.BuildContext, 'execute_build', None)
setattr(Build.BuildContext, 'execute_build', new_execute_build)
EXECUTE_PATCHED = True
patch_execute()
| 3,371 | Python | .py | 112 | 26.294643 | 86 | 0.691095 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,657 | fc_open64.py | projecthamster_hamster/waflib/extras/fc_open64.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
import re
from waflib import Utils
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].insert(0, 'fc_open64')
@conf
def find_openf95(conf):
"""Find the Open64 Fortran Compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['openf95', 'openf90'], var='FC')
conf.get_open64_version(fc)
conf.env.FC_NAME = 'OPEN64'
conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
@conf
def openf95_flags(conf):
v = conf.env
v['FCFLAGS_DEBUG'] = ['-fullwarn']
@conf
def openf95_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None)
if openf95_modifier_func:
openf95_modifier_func()
@conf
def get_open64_version(conf, fc):
"""Get the Open64 compiler version"""
version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-version']
out, err = fc_config.getoutput(conf,cmd,stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the Open64 version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])
def configure(conf):
conf.find_openf95()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.openf95_flags()
conf.openf95_modifier_platform()
| 1,510 | Python | .py | 47 | 30.191489 | 105 | 0.72865 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,658 | fc_pgfortran.py | projecthamster_hamster/waflib/extras/fc_pgfortran.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
import re
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_pgfortran')
@conf
def find_pgfortran(conf):
"""Find the PGI fortran compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC')
conf.get_pgfortran_version(fc)
conf.env.FC_NAME = 'PGFC'
@conf
def pgfortran_flags(conf):
v = conf.env
v['FCFLAGS_fcshlib'] = ['-shared']
v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not
v['FCSTLIB_MARKER'] = '-Bstatic'
v['FCSHLIB_MARKER'] = '-Bdynamic'
v['SONAME_ST'] = '-soname %s'
@conf
def get_pgfortran_version(conf,fc):
version_re = re.compile(r"The Portland Group", re.I).search
cmd = fc + ['-V']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not verify PGI signature')
cmd = fc + ['-help=variable']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out.find('COMPVER')<0:
conf.fatal('Could not determine the compiler type')
k = {}
prevk = ''
out = out.splitlines()
for line in out:
lst = line.partition('=')
if lst[1] == '=':
key = lst[0].rstrip()
if key == '':
key = prevk
val = lst[2].rstrip()
k[key] = val
else:
prevk = line.partition(' ')[0]
def isD(var):
return var in k
def isT(var):
return var in k and k[var]!='0'
conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))
def configure(conf):
conf.find_pgfortran()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.pgfortran_flags()
| 1,780 | Python | .py | 61 | 26.147541 | 81 | 0.658294 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,659 | sas.py | projecthamster_hamster/waflib/extras/sas.py | #!/usr/bin/env python
# encoding: utf-8
# Mark Coggeshall, 2010
"SAS support"
import os
from waflib import Task, Errors, Logs
from waflib.TaskGen import feature, before_method
sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False)
class sas(Task.Task):
vars = ['SAS', 'SASFLAGS']
def run(task):
command = 'SAS'
fun = sas_fun
node = task.inputs[0]
logfilenode = node.change_ext('.log')
lstfilenode = node.change_ext('.lst')
# set the cwd
task.cwd = task.inputs[0].parent.get_src().abspath()
Logs.debug('runner: %r on %r', command, node)
SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep
task.env.env = {'SASINPUTS': SASINPUTS}
task.env.SRCFILE = node.abspath()
task.env.LOGFILE = logfilenode.abspath()
task.env.LSTFILE = lstfilenode.abspath()
ret = fun(task)
if ret:
Logs.error('Running %s on %r returned a non-zero exit', command, node)
Logs.error('SRCFILE = %r', node)
Logs.error('LOGFILE = %r', logfilenode)
Logs.error('LSTFILE = %r', lstfilenode)
return ret
@feature('sas')
@before_method('process_source')
def apply_sas(self):
if not getattr(self, 'type', None) in ('sas',):
self.type = 'sas'
self.env['logdir'] = getattr(self, 'logdir', 'log')
self.env['lstdir'] = getattr(self, 'lstdir', 'lst')
deps_lst = []
if getattr(self, 'deps', None):
deps = self.to_list(self.deps)
for filename in deps:
n = self.path.find_resource(filename)
if not n:
n = self.bld.root.find_resource(filename)
if not n:
raise Errors.WafError('cannot find input file %s for processing' % filename)
if not n in deps_lst:
deps_lst.append(n)
for node in self.to_nodes(self.source):
if self.type == 'sas':
task = self.create_task('sas', src=node)
task.dep_nodes = deps_lst
self.source = []
def configure(self):
self.find_program('sas', var='SAS', mandatory=False)
| 1,946 | Python | .py | 56 | 31.857143 | 105 | 0.686933 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,660 | protoc.py | projecthamster_hamster/waflib/extras/protoc.py | #!/usr/bin/env python
# encoding: utf-8
# Philipp Bender, 2012
# Matt Clarkson, 2012
import re, os
from waflib.Task import Task
from waflib.TaskGen import extension
from waflib import Errors, Context, Logs
"""
A simple tool to integrate protocol buffers into your build system.
Example for C++:
def configure(conf):
conf.load('compiler_cxx cxx protoc')
def build(bld):
bld(
features = 'cxx cxxprogram'
source = 'main.cpp file1.proto proto/file2.proto',
includes = '. proto',
target = 'executable')
Example for Python:
def configure(conf):
conf.load('python protoc')
def build(bld):
bld(
features = 'py'
source = 'main.py file1.proto proto/file2.proto',
protoc_includes = 'proto')
Example for both Python and C++ at same time:
def configure(conf):
conf.load('cxx python protoc')
def build(bld):
bld(
features = 'cxx py'
source = 'file1.proto proto/file2.proto',
protoc_includes = 'proto') # or includes
Example for Java:
def options(opt):
opt.load('java')
def configure(conf):
conf.load('python java protoc')
# Here you have to point to your protobuf-java JAR and have it in classpath
conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar']
def build(bld):
bld(
features = 'javac protoc',
name = 'pbjava',
srcdir = 'inc/ src', # directories used by javac
source = ['inc/message_inc.proto', 'inc/message.proto'],
# source is used by protoc for .proto files
use = 'PROTOBUF',
protoc_includes = ['inc']) # for protoc to search dependencies
Protoc includes passed via protoc_includes are either relative to the taskgen
or to the project and are searched in this order.
Include directories external to the waf project can also be passed to the
extra by using protoc_extincludes
protoc_extincludes = ['/usr/include/pblib']
Notes when using this tool:
- protoc command line parsing is tricky.
The generated files can be put in subfolders which depend on
the order of the include paths.
Try to be simple when creating task generators
containing protoc stuff.
"""
class protoc(Task):
run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${PROTOC_ST:PROTOC_EXTINCPATHS} ${SRC[0].bldpath()}'
color = 'BLUE'
ext_out = ['.h', 'pb.cc', '.py', '.java']
def scan(self):
"""
Scan .proto dependencies
"""
node = self.inputs[0]
nodes = []
names = []
seen = []
search_nodes = []
if not node:
return (nodes, names)
if 'cxx' in self.generator.features:
search_nodes = self.generator.includes_nodes
if 'py' in self.generator.features or 'javac' in self.generator.features:
for incpath in getattr(self.generator, 'protoc_includes', []):
incpath_node = self.generator.path.find_node(incpath)
if incpath_node:
search_nodes.append(incpath_node)
else:
# Check if relative to top-level for extra tg dependencies
incpath_node = self.generator.bld.path.find_node(incpath)
if incpath_node:
search_nodes.append(incpath_node)
else:
raise Errors.WafError('protoc: include path %r does not exist' % incpath)
def parse_node(node):
if node in seen:
return
seen.append(node)
code = node.read().splitlines()
for line in code:
m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
if m:
dep = m.groups()[0]
for incnode in search_nodes:
found = incnode.find_resource(dep)
if found:
nodes.append(found)
parse_node(found)
else:
names.append(dep)
parse_node(node)
# Add also dependencies path to INCPATHS so protoc will find the included file
for deppath in nodes:
self.env.append_unique('INCPATHS', deppath.parent.bldpath())
return (nodes, names)
@extension('.proto')
def process_protoc(self, node):
incdirs = []
out_nodes = []
protoc_flags = []
# ensure PROTOC_FLAGS is a list; a copy is used below anyway
self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS)
if 'cxx' in self.features:
cpp_node = node.change_ext('.pb.cc')
hpp_node = node.change_ext('.pb.h')
self.source.append(cpp_node)
out_nodes.append(cpp_node)
out_nodes.append(hpp_node)
protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath())
if 'py' in self.features:
py_node = node.change_ext('_pb2.py')
self.source.append(py_node)
out_nodes.append(py_node)
protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath())
if 'javac' in self.features:
# Make javac get also pick java code generated in build
if not node.parent.get_bld() in self.javac_task.srcdir:
self.javac_task.srcdir.append(node.parent.get_bld())
protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath())
node.parent.get_bld().mkdir()
tsk = self.create_task('protoc', node, out_nodes)
tsk.env.append_value('PROTOC_FLAGS', protoc_flags)
if 'javac' in self.features:
self.javac_task.set_run_after(tsk)
# Instruct protoc where to search for .proto included files.
# For C++ standard include files dirs are used,
# but this doesn't apply to Python for example
for incpath in getattr(self, 'protoc_includes', []):
incpath_node = self.path.find_node(incpath)
if incpath_node:
incdirs.append(incpath_node.bldpath())
else:
# Check if relative to top-level for extra tg dependencies
incpath_node = self.bld.path.find_node(incpath)
if incpath_node:
incdirs.append(incpath_node.bldpath())
else:
raise Errors.WafError('protoc: include path %r does not exist' % incpath)
tsk.env.PROTOC_INCPATHS = incdirs
# Include paths external to the waf project (ie. shared pb repositories)
tsk.env.PROTOC_EXTINCPATHS = getattr(self, 'protoc_extincludes', [])
# PR2115: protoc generates output of .proto files in nested
# directories by canonicalizing paths. To avoid this we have to pass
# as first include the full directory file of the .proto file
tsk.env.prepend_value('INCPATHS', node.parent.bldpath())
use = getattr(self, 'use', '')
if not 'PROTOBUF' in use:
self.use = self.to_list(use) + ['PROTOBUF']
def configure(conf):
conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs'])
conf.find_program('protoc', var='PROTOC')
conf.start_msg('Checking for protoc version')
protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH)
protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:]
conf.end_msg(protocver)
conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')]
conf.env.PROTOC_ST = '-I%s'
conf.env.PROTOC_FL = '%s'
| 6,873 | Python | .py | 176 | 33.9375 | 151 | 0.676493 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,661 | xcode6.py | projecthamster_hamster/waflib/extras/xcode6.py | #! /usr/bin/env python
# encoding: utf-8
# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf
# Based on work by Nicolas Mercier 2011
# Extended by Simon Warg 2015, https://github.com/mimon
# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html
"""
See playground/xcode6/ for usage examples.
"""
from waflib import Context, TaskGen, Build, Utils, Errors, Logs
import os, sys
# FIXME too few extensions
XCODE_EXTS = ['.c', '.cpp', '.m', '.mm']
HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
MAP_EXT = {
'': "folder",
'.h' : "sourcecode.c.h",
'.hh': "sourcecode.cpp.h",
'.inl': "sourcecode.cpp.h",
'.hpp': "sourcecode.cpp.h",
'.c': "sourcecode.c.c",
'.m': "sourcecode.c.objc",
'.mm': "sourcecode.cpp.objcpp",
'.cc': "sourcecode.cpp.cpp",
'.cpp': "sourcecode.cpp.cpp",
'.C': "sourcecode.cpp.cpp",
'.cxx': "sourcecode.cpp.cpp",
'.c++': "sourcecode.cpp.cpp",
'.l': "sourcecode.lex", # luthor
'.ll': "sourcecode.lex",
'.y': "sourcecode.yacc",
'.yy': "sourcecode.yacc",
'.plist': "text.plist.xml",
".nib": "wrapper.nib",
".xib": "text.xib",
}
# Used in PBXNativeTarget elements
PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application'
PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework'
PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool'
PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static'
PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic'
PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension'
PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit'
# Used in PBXFileReference elements
FILE_TYPE_APPLICATION = 'wrapper.cfbundle'
FILE_TYPE_FRAMEWORK = 'wrapper.framework'
FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib'
FILE_TYPE_LIB_STATIC = 'archive.ar'
FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable'
# Tuple packs of the above
TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework')
TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app')
TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib')
TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a')
TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '')
# Maps target type string to its data
TARGET_TYPES = {
'framework': TARGET_TYPE_FRAMEWORK,
'app': TARGET_TYPE_APPLICATION,
'dylib': TARGET_TYPE_DYNAMIC_LIB,
'stlib': TARGET_TYPE_STATIC_LIB,
'exe' :TARGET_TYPE_EXECUTABLE,
}
def delete_invalid_values(dct):
""" Deletes entries that are dictionaries or sets """
for k, v in list(dct.items()):
if isinstance(v, dict) or isinstance(v, set):
del dct[k]
return dct
"""
Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION'
which is a dictionary of configuration name and buildsettings pair.
E.g.:
env.PROJ_CONFIGURATION = {
'Debug': {
'ARCHS': 'x86',
...
}
'Release': {
'ARCHS': x86_64'
...
}
}
The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created
based on env variable
"""
def configure(self):
if not self.env.PROJ_CONFIGURATION:
self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n")
# Check for any added config files added by the tool 'c_config'.
if 'cfg_files' in self.env:
self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files]
# Create default project configuration?
if 'PROJ_CONFIGURATION' not in self.env:
defaults = delete_invalid_values(self.env.get_merged_dict())
self.env.PROJ_CONFIGURATION = {
"Debug": defaults,
"Release": defaults,
}
# Some build settings are required to be present by XCode. We will supply default values
# if user hasn't defined any.
defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
for cfgname,settings in self.env.PROJ_CONFIGURATION.items():
for default_var, default_val in defaults_required:
if default_var not in settings:
settings[default_var] = default_val
# Error check customization
if not isinstance(self.env.PROJ_CONFIGURATION, dict):
raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.")
part1 = 0
part2 = 10000
part3 = 0
id = 562000999
def newid():
global id
id += 1
return "%04X%04X%04X%012d" % (0, 10000, 0, id)
"""
Represents a tree node in the XCode project plist file format.
When written to a file, all attributes of XCodeNode are stringified together with
its value. However, attributes starting with an underscore _ are ignored
during that process and allows you to store arbitrary values that are not supposed
to be written out.
"""
class XCodeNode(object):
def __init__(self):
self._id = newid()
self._been_written = False
def tostring(self, value):
if isinstance(value, dict):
result = "{\n"
for k,v in value.items():
result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v))
result = result + "\t\t}"
return result
elif isinstance(value, str):
return '"%s"' % value.replace('"', '\\\\\\"')
elif isinstance(value, list):
result = "(\n"
for i in value:
result = result + "\t\t\t\t%s,\n" % self.tostring(i)
result = result + "\t\t\t)"
return result
elif isinstance(value, XCodeNode):
return value._id
else:
return str(value)
def write_recursive(self, value, file):
if isinstance(value, dict):
for k,v in value.items():
self.write_recursive(v, file)
elif isinstance(value, list):
for i in value:
self.write_recursive(i, file)
elif isinstance(value, XCodeNode):
value.write(file)
def write(self, file):
if not self._been_written:
self._been_written = True
for attribute,value in self.__dict__.items():
if attribute[0] != '_':
self.write_recursive(value, file)
w = file.write
w("\t%s = {\n" % self._id)
w("\t\tisa = %s;\n" % self.__class__.__name__)
for attribute,value in self.__dict__.items():
if attribute[0] != '_':
w("\t\t%s = %s;\n" % (attribute, self.tostring(value)))
w("\t};\n\n")
# Configurations
class XCBuildConfiguration(XCodeNode):
def __init__(self, name, settings = {}, env=None):
XCodeNode.__init__(self)
self.baseConfigurationReference = ""
self.buildSettings = settings
self.name = name
if env and env.ARCH:
settings['ARCHS'] = " ".join(env.ARCH)
class XCConfigurationList(XCodeNode):
def __init__(self, configlst):
""" :param configlst: list of XCConfigurationList """
XCodeNode.__init__(self)
self.buildConfigurations = configlst
self.defaultConfigurationIsVisible = 0
self.defaultConfigurationName = configlst and configlst[0].name or ""
# Group/Files
class PBXFileReference(XCodeNode):
def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):
XCodeNode.__init__(self)
self.fileEncoding = 4
if not filetype:
_, ext = os.path.splitext(name)
filetype = MAP_EXT.get(ext, 'text')
self.lastKnownFileType = filetype
self.explicitFileType = filetype
self.name = name
self.path = path
self.sourceTree = sourcetree
def __hash__(self):
return (self.path+self.name).__hash__()
def __eq__(self, other):
return (self.path, self.name) == (other.path, other.name)
class PBXBuildFile(XCodeNode):
""" This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """
def __init__(self, fileRef, settings={}):
XCodeNode.__init__(self)
# fileRef is a reference to a PBXFileReference object
self.fileRef = fileRef
# A map of key/value pairs for additional settings.
self.settings = settings
def __hash__(self):
return (self.fileRef).__hash__()
def __eq__(self, other):
return self.fileRef == other.fileRef
class PBXGroup(XCodeNode):
def __init__(self, name, sourcetree = 'SOURCE_TREE'):
XCodeNode.__init__(self)
self.children = []
self.name = name
self.sourceTree = sourcetree
# Maintain a lookup table for all PBXFileReferences
# that are contained in this group.
self._filerefs = {}
def add(self, sources):
"""
Add a list of PBXFileReferences to this group
:param sources: list of PBXFileReferences objects
"""
self._filerefs.update(dict(zip(sources, sources)))
self.children.extend(sources)
def get_sub_groups(self):
"""
Returns all child PBXGroup objects contained in this group
"""
return list(filter(lambda x: isinstance(x, PBXGroup), self.children))
def find_fileref(self, fileref):
"""
Recursively search this group for an existing PBXFileReference. Returns None
if none were found.
The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy.
If it isn't, the consequence is that certain UI features like 'Reveal in Finder'
stops working.
"""
if fileref in self._filerefs:
return self._filerefs[fileref]
elif self.children:
for childgroup in self.get_sub_groups():
f = childgroup.find_fileref(fileref)
if f:
return f
return None
class PBXContainerItemProxy(XCodeNode):
""" This is the element for to decorate a target item. """
def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1):
XCodeNode.__init__(self)
self.containerPortal = containerPortal # PBXProject
self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget
self.remoteInfo = remoteInfo # Target name
self.proxyType = proxyType
class PBXTargetDependency(XCodeNode):
""" This is the element for referencing other target through content proxies. """
def __init__(self, native_target, proxy):
XCodeNode.__init__(self)
self.target = native_target
self.targetProxy = proxy
class PBXFrameworksBuildPhase(XCodeNode):
""" This is the element for the framework link build phase, i.e. linking to frameworks """
def __init__(self, pbxbuildfiles):
XCodeNode.__init__(self)
self.buildActionMask = 2147483647
self.runOnlyForDeploymentPostprocessing = 0
self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
class PBXHeadersBuildPhase(XCodeNode):
""" This is the element for adding header files to be packaged into the .framework """
def __init__(self, pbxbuildfiles):
XCodeNode.__init__(self)
self.buildActionMask = 2147483647
self.runOnlyForDeploymentPostprocessing = 0
self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
class PBXCopyFilesBuildPhase(XCodeNode):
"""
Represents the PBXCopyFilesBuildPhase section. PBXBuildFile
can be added to this node to copy files after build is done.
"""
def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs):
XCodeNode.__init__(self)
self.files = pbxbuildfiles
self.dstPath = dstpath
self.dstSubfolderSpec = dstSubpathSpec
class PBXSourcesBuildPhase(XCodeNode):
""" Represents the 'Compile Sources' build phase in a Xcode target """
def __init__(self, buildfiles):
XCodeNode.__init__(self)
self.files = buildfiles # List of PBXBuildFile objects
class PBXLegacyTarget(XCodeNode):
def __init__(self, action, target=''):
XCodeNode.__init__(self)
self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
if not target:
self.buildArgumentsString = "%s %s" % (sys.argv[0], action)
else:
self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target)
self.buildPhases = []
self.buildToolPath = sys.executable
self.buildWorkingDirectory = ""
self.dependencies = []
self.name = target or action
self.productName = target or action
self.passBuildSettingsInEnvironment = 0
class PBXShellScriptBuildPhase(XCodeNode):
def __init__(self, action, target):
XCodeNode.__init__(self)
self.buildActionMask = 2147483647
self.files = []
self.inputPaths = []
self.outputPaths = []
self.runOnlyForDeploymentPostProcessing = 0
self.shellPath = "/bin/sh"
self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target)
class PBXNativeTarget(XCodeNode):
""" Represents a target in XCode, e.g. App, DyLib, Framework etc. """
def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]):
XCodeNode.__init__(self)
product_type = target_type[0]
file_type = target_type[1]
self.buildConfigurationList = XCConfigurationList(configlist)
self.buildPhases = buildphases
self.buildRules = []
self.dependencies = []
self.name = target
self.productName = target
self.productType = product_type # See TARGET_TYPE_ tuples constants
self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '')
def add_configuration(self, cf):
""" :type cf: XCBuildConfiguration """
self.buildConfigurationList.buildConfigurations.append(cf)
def add_build_phase(self, phase):
# Some build phase types may appear only once. If a phase type already exists, then merge them.
if ( (phase.__class__ == PBXFrameworksBuildPhase)
or (phase.__class__ == PBXSourcesBuildPhase) ):
for b in self.buildPhases:
if b.__class__ == phase.__class__:
b.files.extend(phase.files)
return
self.buildPhases.append(phase)
def add_dependency(self, depnd):
self.dependencies.append(depnd)
# Root project object
class PBXProject(XCodeNode):
def __init__(self, name, version, env):
XCodeNode.__init__(self)
if not isinstance(env.PROJ_CONFIGURATION, dict):
raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?")
# Retrieve project configuration
configurations = []
for config_name, settings in env.PROJ_CONFIGURATION.items():
cf = XCBuildConfiguration(config_name, settings)
configurations.append(cf)
self.buildConfigurationList = XCConfigurationList(configurations)
self.compatibilityVersion = version[0]
self.hasScannedForEncodings = 1
self.mainGroup = PBXGroup(name)
self.projectRoot = ""
self.projectDirPath = ""
self.targets = []
self._objectVersion = version[1]
def create_target_dependency(self, target, name):
""" : param target : PXBNativeTarget """
proxy = PBXContainerItemProxy(self, target, name)
dependency = PBXTargetDependency(target, proxy)
return dependency
def write(self, file):
# Make sure this is written only once
if self._been_written:
return
w = file.write
w("// !$*UTF8*$!\n")
w("{\n")
w("\tarchiveVersion = 1;\n")
w("\tclasses = {\n")
w("\t};\n")
w("\tobjectVersion = %d;\n" % self._objectVersion)
w("\tobjects = {\n\n")
XCodeNode.write(self, file)
w("\t};\n")
w("\trootObject = %s;\n" % self._id)
w("}\n")
def add_target(self, target):
self.targets.append(target)
def get_target(self, name):
""" Get a reference to PBXNativeTarget if it exists """
for t in self.targets:
if t.name == name:
return t
return None
@TaskGen.feature('c', 'cxx')
@TaskGen.after('propagate_uselib_vars', 'apply_incpaths')
def process_xcode(self):
bld = self.bld
try:
p = bld.project
except AttributeError:
return
if not hasattr(self, 'target_type'):
return
products_group = bld.products_group
target_group = PBXGroup(self.name)
p.mainGroup.children.append(target_group)
# Determine what type to build - framework, app bundle etc.
target_type = getattr(self, 'target_type', 'app')
if target_type not in TARGET_TYPES:
raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
else:
target_type = TARGET_TYPES[target_type]
file_ext = target_type[2]
# Create the output node
target_node = self.path.find_or_declare(self.name+file_ext)
target = PBXNativeTarget(self.name, target_node, target_type, [], [])
products_group.children.append(target.productReference)
# Pull source files from the 'source' attribute and assign them to a UI group.
# Use a default UI group named 'Source' unless the user
# provides a 'group_files' dictionary to customize the UI grouping.
sources = getattr(self, 'source', [])
if hasattr(self, 'group_files'):
group_files = getattr(self, 'group_files', [])
for grpname,files in group_files.items():
group = bld.create_group(grpname, files)
target_group.children.append(group)
else:
group = bld.create_group('Source', sources)
target_group.children.append(group)
# Create a PBXFileReference for each source file.
# If the source file already exists as a PBXFileReference in any of the UI groups, then
# reuse that PBXFileReference object (XCode does not like it if we don't reuse)
for idx, path in enumerate(sources):
fileref = PBXFileReference(path.name, path.abspath())
existing_fileref = target_group.find_fileref(fileref)
if existing_fileref:
sources[idx] = existing_fileref
else:
sources[idx] = fileref
# If the 'source' attribute contains any file extension that XCode can't work with,
# then remove it. The allowed file extensions are defined in XCODE_EXTS.
is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS
sources = list(filter(is_valid_file_extension, sources))
buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
target.add_build_phase(PBXSourcesBuildPhase(buildfiles))
# Check if any framework to link against is some other target we've made
libs = getattr(self, 'tmp_use_seen', [])
for lib in libs:
use_target = p.get_target(lib)
if use_target:
# Create an XCode dependency so that XCode knows to build the other target before this target
dependency = p.create_target_dependency(use_target, use_target.name)
target.add_dependency(dependency)
buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)])
target.add_build_phase(buildphase)
if lib in self.env.LIB:
self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))
# If 'export_headers' is present, add files to the Headers build phase in xcode.
# These are files that'll get packed into the Framework for instance.
exp_hdrs = getattr(self, 'export_headers', [])
hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs]
files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]
buildphase = PBXHeadersBuildPhase(files)
target.add_build_phase(buildphase)
# Merge frameworks and libs into one list, and prefix the frameworks
frameworks = Utils.to_list(self.env.FRAMEWORK)
frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks])
libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)
# Override target specific build settings
bldsettings = {
'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR),
'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
'OTHER_LDFLAGS': libs + ' ' + frameworks + ' ' + ' '.join(bld.env['LINKFLAGS']),
'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
'INSTALL_PATH': [],
'GCC_PREPROCESSOR_DEFINITIONS': self.env['DEFINES']
}
# Install path
installpaths = Utils.to_list(getattr(self, 'install', []))
prodbuildfile = PBXBuildFile(target.productReference)
for instpath in installpaths:
bldsettings['INSTALL_PATH'].append(instpath)
target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))
if not bldsettings['INSTALL_PATH']:
del bldsettings['INSTALL_PATH']
# Create build settings which can override the project settings. Defaults to none if user
# did not pass argument. This will be filled up with target specific
# search paths, libs to link etc.
settings = getattr(self, 'settings', {})
# The keys represents different build configuration, e.g. Debug, Release and so on..
# Insert our generated build settings to all configuration names
keys = set(settings.keys()) | set(bld.env.PROJ_CONFIGURATION.keys())
for k in keys:
if k in settings:
settings[k].update(bldsettings)
else:
settings[k] = bldsettings
for k,v in settings.items():
target.add_configuration(XCBuildConfiguration(k, v))
p.add_target(target)
class xcode(Build.BuildContext):
cmd = 'xcode6'
fun = 'build'
def as_nodes(self, files):
""" Returns a list of waflib.Nodes from a list of string of file paths """
nodes = []
for x in files:
if not isinstance(x, str):
d = x
else:
d = self.srcnode.find_node(x)
if not d:
raise Errors.WafError('File \'%s\' was not found' % x)
nodes.append(d)
return nodes
def create_group(self, name, files):
"""
Returns a new PBXGroup containing the files (paths) passed in the files arg
:type files: string
"""
group = PBXGroup(name)
"""
Do not use unique file reference here, since XCode seem to allow only one file reference
to be referenced by a group.
"""
files_ = []
for d in self.as_nodes(Utils.to_list(files)):
fileref = PBXFileReference(d.name, d.abspath())
files_.append(fileref)
group.add(files_)
return group
def unique_buildfile(self, buildfile):
"""
Returns a unique buildfile, possibly an existing one.
Use this after you've constructed a PBXBuildFile to make sure there is
only one PBXBuildFile for the same file in the same project.
"""
try:
build_files = self.build_files
except AttributeError:
build_files = self.build_files = {}
if buildfile not in build_files:
build_files[buildfile] = buildfile
return build_files[buildfile]
def execute(self):
"""
Entry point
"""
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])
appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
p = PBXProject(appname, ('Xcode 3.2', 46), self.env)
# If we don't create a Products group, then
# XCode will create one, which entails that
# we'll start to see duplicate files in the UI
# for some reason.
products_group = PBXGroup('Products')
p.mainGroup.children.append(products_group)
self.project = p
self.products_group = products_group
# post all task generators
# the process_xcode method above will be called for each target
if self.targets and self.targets != '*':
(self._min_grp, self._exact_tg) = self.get_targets()
self.current_group = 0
while self.current_group < len(self.groups):
self.post_group()
self.current_group += 1
node = self.bldnode.make_node('%s.xcodeproj' % appname)
node.mkdir()
node = node.make_node('project.pbxproj')
with open(node.abspath(), 'w') as f:
p.write(f)
Logs.pprint('GREEN', 'Wrote %r' % node.abspath())
def bind_fun(tgtype):
def fun(self, *k, **kw):
tgtype = fun.__name__
if tgtype == 'shlib' or tgtype == 'dylib':
features = 'cxx cxxshlib'
tgtype = 'dylib'
elif tgtype == 'framework':
features = 'cxx cxxshlib'
tgtype = 'framework'
elif tgtype == 'program':
features = 'cxx cxxprogram'
tgtype = 'exe'
elif tgtype == 'app':
features = 'cxx cxxprogram'
tgtype = 'app'
elif tgtype == 'stlib':
features = 'cxx cxxstlib'
tgtype = 'stlib'
lst = kw['features'] = Utils.to_list(kw.get('features', []))
for x in features.split():
if not x in kw['features']:
lst.append(x)
kw['target_type'] = tgtype
return self(*k, **kw)
fun.__name__ = tgtype
setattr(Build.BuildContext, tgtype, fun)
return fun
for xx in 'app framework dylib shlib stlib program'.split():
bind_fun(xx)
| 24,215 | Python | .py | 612 | 36.72549 | 361 | 0.724327 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,662 | boost.py | projecthamster_hamster/waflib/extras/boost.py | #!/usr/bin/env python
# encoding: utf-8
#
# partially based on boost.py written by Gernot Vormayr
# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
# modified by Bjoern Michaelsen, 2008
# modified by Luca Fossati, 2008
# rewritten for waf 1.5.1, Thomas Nagy, 2008
# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
'''
This is an extra tool, not bundled with the default waf binary.
To add the boost tool to the waf file:
$ ./waf-light --tools=compat15,boost
or, if you have waf >= 1.6.2
$ ./waf update --files=boost
When using this tool, the wscript will look like:
def options(opt):
opt.load('compiler_cxx boost')
def configure(conf):
conf.load('compiler_cxx boost')
conf.check_boost(lib='system filesystem')
def build(bld):
bld(source='main.cpp', target='app', use='BOOST')
Options are generated, in order to specify the location of boost includes/libraries.
The `check_boost` configuration function allows to specify the used boost libraries.
It can also provide default arguments to the --boost-mt command-line arguments.
Everything will be packaged together in a BOOST component that you can use.
When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
- you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
Errors: C4530
- boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
So before calling `conf.check_boost` you might want to disabling by adding
conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
Errors:
- boost might also be compiled with /MT, which links the runtime statically.
If you have problems with redefined symbols,
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
'''
import sys
import re
from waflib import Utils, Logs, Errors
from waflib.Configure import conf
from waflib.TaskGen import feature, after_method
BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
BOOST_VERSION_FILE = 'boost/version.hpp'
BOOST_VERSION_CODE = '''
#include <iostream>
#include <boost/version.hpp>
int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
'''
BOOST_ERROR_CODE = '''
#include <boost/system/error_code.hpp>
int main() { boost::system::error_code c; }
'''
PTHREAD_CODE = '''
#include <pthread.h>
static void* f(void*) { return 0; }
int main() {
pthread_t th;
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_create(&th, &attr, &f, 0);
pthread_join(th, 0);
pthread_cleanup_push(0, 0);
pthread_cleanup_pop(0);
pthread_attr_destroy(&attr);
}
'''
BOOST_THREAD_CODE = '''
#include <boost/thread.hpp>
int main() { boost::thread t; }
'''
BOOST_LOG_CODE = '''
#include <boost/log/trivial.hpp>
#include <boost/log/utility/setup/console.hpp>
#include <boost/log/utility/setup/common_attributes.hpp>
int main() {
using namespace boost::log;
add_common_attributes();
add_console_log(std::clog, keywords::format = "%Message%");
BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
}
'''
# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
PLATFORM = Utils.unversioned_sys_platform()
detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
BOOST_TOOLSETS = {
'borland': 'bcb',
'clang': detect_clang,
'como': 'como',
'cw': 'cw',
'darwin': 'xgcc',
'edg': 'edg',
'g++': detect_mingw,
'gcc': detect_mingw,
'icpc': detect_intel,
'intel': detect_intel,
'kcc': 'kcc',
'kylix': 'bck',
'mipspro': 'mp',
'mingw': 'mgw',
'msvc': 'vc',
'qcc': 'qcc',
'sun': 'sw',
'sunc++': 'sw',
'tru64cxx': 'tru',
'vacpp': 'xlc'
}
def options(opt):
opt = opt.add_option_group('Boost Options')
opt.add_option('--boost-includes', type='string',
default='', dest='boost_includes',
help='''path to the directory where the boost includes are,
e.g., /path/to/boost_1_55_0/stage/include''')
opt.add_option('--boost-libs', type='string',
default='', dest='boost_libs',
help='''path to the directory where the boost libs are,
e.g., path/to/boost_1_55_0/stage/lib''')
opt.add_option('--boost-mt', action='store_true',
default=False, dest='boost_mt',
help='select multi-threaded libraries')
opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
help='''select libraries with tags (gd for debug, static is automatically added),
see doc Boost, Getting Started, chapter 6.1''')
opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
opt.add_option('--boost-toolset', type='string',
default='', dest='boost_toolset',
help='force a toolset e.g. msvc, vc90, \
gcc, mingw, mgw45 (default: auto)')
py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
opt.add_option('--boost-python', type='string',
default=py_version, dest='boost_python',
help='select the lib python with this version \
(default: %s)' % py_version)
@conf
def __boost_get_version_file(self, d):
if not d:
return None
dnode = self.root.find_dir(d)
if dnode:
return dnode.find_node(BOOST_VERSION_FILE)
return None
@conf
def boost_get_version(self, d):
"""silently retrieve the boost version number"""
node = self.__boost_get_version_file(d)
if node:
try:
txt = node.read()
except EnvironmentError:
Logs.error("Could not read the file %r", node.abspath())
else:
re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
m1 = re_but1.search(txt)
re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
m2 = re_but2.search(txt)
if m1 and m2:
return (m1.group(1), m2.group(1))
return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
@conf
def boost_get_includes(self, *k, **kw):
includes = k and k[0] or kw.get('includes')
if includes and self.__boost_get_version_file(includes):
return includes
for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
if self.__boost_get_version_file(d):
return d
if includes:
self.end_msg('headers not found in %s' % includes)
self.fatal('The configuration failed')
else:
self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
self.fatal('The configuration failed')
@conf
def boost_get_toolset(self, cc):
toolset = cc
if not cc:
build_platform = Utils.unversioned_sys_platform()
if build_platform in BOOST_TOOLSETS:
cc = build_platform
else:
cc = self.env.CXX_NAME
if cc in BOOST_TOOLSETS:
toolset = BOOST_TOOLSETS[cc]
return isinstance(toolset, str) and toolset or toolset(self.env)
@conf
def __boost_get_libs_path(self, *k, **kw):
''' return the lib path and all the files in it '''
if 'files' in kw:
return self.root.find_dir('.'), Utils.to_list(kw['files'])
libs = k and k[0] or kw.get('libs')
if libs:
path = self.root.find_dir(libs)
files = path.ant_glob('*boost_*')
if not libs or not files:
for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
if not d:
continue
path = self.root.find_dir(d)
if path:
files = path.ant_glob('*boost_*')
if files:
break
path = self.root.find_dir(d + '64')
if path:
files = path.ant_glob('*boost_*')
if files:
break
if not path:
if libs:
self.end_msg('libs not found in %s' % libs)
self.fatal('The configuration failed')
else:
self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
self.fatal('The configuration failed')
self.to_log('Found the boost path in %r with the libraries:' % path)
for x in files:
self.to_log(' %r' % x)
return path, files
@conf
def boost_get_libs(self, *k, **kw):
'''
return the lib path and the required libs
according to the parameters
'''
path, files = self.__boost_get_libs_path(**kw)
files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
toolset = self.boost_get_toolset(kw.get('toolset', ''))
toolset_pat = '(-%s[0-9]{0,3})' % toolset
version = '-%s' % self.env.BOOST_VERSION
def find_lib(re_lib, files):
for file in files:
if re_lib.search(file.name):
self.to_log('Found boost lib %s' % file)
return file
return None
# extensions from Tools.ccroot.lib_patterns
wo_ext = re.compile(r"\.(a|so|lib|dll|dylib)(\.[0-9\.]+)?$")
def format_lib_name(name):
if name.startswith('lib') and self.env.CC_NAME != 'msvc':
name = name[3:]
return wo_ext.sub("", name)
def match_libs(lib_names, is_static):
libs = []
lib_names = Utils.to_list(lib_names)
if not lib_names:
return libs
t = []
if kw.get('mt', False):
t.append('-mt')
if kw.get('abi'):
t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
elif is_static:
t.append('-s')
tags_pat = t and ''.join(t) or ''
ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
for lib in lib_names:
if lib == 'python':
# for instance, with python='27',
# accepts '-py27', '-py2', '27', '-2.7' and '2'
# but will reject '-py3', '-py26', '26' and '3'
tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
else:
tags = tags_pat
# Trying libraries, from most strict match to least one
for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
'boost_%s%s%s%s$' % (lib, tags, version, ext),
# Give up trying to find the right version
'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
'boost_%s%s%s$' % (lib, tags, ext),
'boost_%s%s$' % (lib, ext),
'boost_%s' % lib]:
self.to_log('Trying pattern %s' % pattern)
file = find_lib(re.compile(pattern), files)
if file:
libs.append(format_lib_name(file.name))
break
else:
self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
self.fatal('The configuration failed')
return libs
return path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)
@conf
def _check_pthread_flag(self, *k, **kw):
'''
Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode
Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
boost/thread.hpp will trigger a #error if -pthread isn't used:
boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
is not turned on. Please set the correct command line options for
threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4
'''
var = kw.get('uselib_store', 'BOOST')
self.start_msg('Checking the flags needed to use pthreads')
# The ordering *is* (sometimes) important. Some notes on the
# individual items follow:
# (none): in case threads are in libc; should be tried before -Kthread and
# other compiler flags to prevent continual compiler warnings
# -lpthreads: AIX (must check this before -lpthread)
# -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
# -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
# -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
# -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
# -pthreads: Solaris/GCC
# -mthreads: MinGW32/GCC, Lynx/GCC
# -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
# doesn't hurt to check since this sometimes defines pthreads too;
# also defines -D_REENTRANT)
# ... -mt is also the pthreads flag for HP/aCC
# -lpthread: GNU Linux, etc.
# --thread-safe: KAI C++
if Utils.unversioned_sys_platform() == "sunos":
# On Solaris (at least, for some versions), libc contains stubbed
# (non-functional) versions of the pthreads routines, so link-based
# tests will erroneously succeed. (We need to link with -pthreads/-mt/
# -lpthread.) (The stubs are missing pthread_cleanup_push, or rather
# a function called by this macro, so we could check for that, but
# who knows whether they'll stub that too in a future libc.) So,
# we'll just look for -pthreads and -lpthread first:
boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"]
else:
boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread",
"-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"]
for boost_pthread_flag in boost_pthread_flags:
try:
self.env.stash()
self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag)
self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag)
self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False)
self.end_msg(boost_pthread_flag)
return
except self.errors.ConfigurationError:
self.env.revert()
self.end_msg('None')
@conf
def check_boost(self, *k, **kw):
"""
Initialize boost libraries to be used.
Keywords: you can pass the same parameters as with the command line (without "--boost-").
Note that the command line has the priority, and should preferably be used.
"""
if not self.env['CXX']:
self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
params = {
'lib': k and k[0] or kw.get('lib'),
'stlib': kw.get('stlib')
}
for key, value in self.options.__dict__.items():
if not key.startswith('boost_'):
continue
key = key[len('boost_'):]
params[key] = value and value or kw.get(key, '')
var = kw.get('uselib_store', 'BOOST')
self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False)
if self.env.DPKG_ARCHITECTURE:
deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH'])
BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip())
self.start_msg('Checking boost includes')
self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
versions = self.boost_get_version(inc)
self.env.BOOST_VERSION = versions[0]
self.env.BOOST_VERSION_NUMBER = int(versions[1])
self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
int(versions[1]) / 100 % 1000,
int(versions[1]) % 100))
if Logs.verbose:
Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])
if not params['lib'] and not params['stlib']:
return
if 'static' in kw or 'static' in params:
Logs.warn('boost: static parameter is deprecated, use stlib instead.')
self.start_msg('Checking boost libs')
path, libs, stlibs = self.boost_get_libs(**params)
self.env['LIBPATH_%s' % var] = [path]
self.env['STLIBPATH_%s' % var] = [path]
self.env['LIB_%s' % var] = libs
self.env['STLIB_%s' % var] = stlibs
self.end_msg('ok')
if Logs.verbose:
Logs.pprint('CYAN', ' path : %s' % path)
Logs.pprint('CYAN', ' shared libs : %s' % libs)
Logs.pprint('CYAN', ' static libs : %s' % stlibs)
def has_shlib(lib):
return params['lib'] and lib in params['lib']
def has_stlib(lib):
return params['stlib'] and lib in params['stlib']
def has_lib(lib):
return has_shlib(lib) or has_stlib(lib)
if has_lib('thread'):
# not inside try_link to make check visible in the output
self._check_pthread_flag(k, kw)
def try_link():
if has_lib('system'):
self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
if has_lib('thread'):
self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
if has_lib('log'):
if not has_lib('thread'):
self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
if has_shlib('log'):
self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
if params.get('linkage_autodetect', False):
self.start_msg("Attempting to detect boost linkage flags")
toolset = self.boost_get_toolset(kw.get('toolset', ''))
if toolset in ('vc',):
# disable auto-linking feature, causing error LNK1181
# because the code wants to be linked against
self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
# if no dlls are present, we guess the .lib files are not stubs
has_dlls = False
for x in Utils.listdir(path):
if x.endswith(self.env.cxxshlib_PATTERN % ''):
has_dlls = True
break
if not has_dlls:
self.env['STLIBPATH_%s' % var] = [path]
self.env['STLIB_%s' % var] = libs
del self.env['LIB_%s' % var]
del self.env['LIBPATH_%s' % var]
# we attempt to play with some known-to-work CXXFLAGS combinations
for cxxflags in (['/MD', '/EHsc'], []):
self.env.stash()
self.env["CXXFLAGS_%s" % var] += cxxflags
try:
try_link()
except Errors.ConfigurationError as e:
self.env.revert()
exc = e
else:
self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
exc = None
self.env.commit()
break
if exc is not None:
self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
self.fatal('The configuration failed')
else:
self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
self.fatal('The configuration failed')
else:
self.start_msg('Checking for boost linkage')
try:
try_link()
except Errors.ConfigurationError as e:
self.end_msg("Could not link against boost libraries using supplied options")
self.fatal('The configuration failed')
self.end_msg('ok')
@feature('cxx')
@after_method('apply_link')
def install_boost(self):
if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
return
install_boost.done = True
inst_to = getattr(self, 'install_path', '${BINDIR}')
for lib in self.env.LIB_BOOST:
try:
file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
except:
continue
install_boost.done = False
| 18,557 | Python | .py | 470 | 36.395745 | 164 | 0.680717 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,663 | fc_solstudio.py | projecthamster_hamster/waflib/extras/fc_solstudio.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
import re
from waflib import Utils
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_solstudio')
@conf
def find_solstudio(conf):
"""Find the Solaris Studio compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC')
conf.get_solstudio_version(fc)
conf.env.FC_NAME = 'SOL'
@conf
def solstudio_flags(conf):
v = conf.env
v['FCFLAGS_fcshlib'] = ['-Kpic']
v['FCFLAGS_DEBUG'] = ['-w3']
v['LINKFLAGS_fcshlib'] = ['-G']
v['FCSTLIB_MARKER'] = '-Bstatic'
v['FCSHLIB_MARKER'] = '-Bdynamic'
v['SONAME_ST'] = '-h %s'
@conf
def solstudio_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None)
if solstudio_modifier_func:
solstudio_modifier_func()
@conf
def get_solstudio_version(conf, fc):
"""Get the compiler version"""
version_re = re.compile(r"Sun Fortran 95 *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
out, err = fc_config.getoutput(conf,cmd,stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the Sun Studio Fortran version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])
def configure(conf):
conf.find_solstudio()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.solstudio_flags()
conf.solstudio_modifier_platform()
| 1,642 | Python | .py | 51 | 30.254902 | 88 | 0.710127 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,664 | proc.py | projecthamster_hamster/waflib/extras/proc.py | #! /usr/bin/env python
# per rosengren 2011
from os import environ, path
from waflib import TaskGen, Utils
def options(opt):
grp = opt.add_option_group('Oracle ProC Options')
grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server')
def configure(cnf):
env = cnf.env
if not env.PROC_ORACLE:
env.PROC_ORACLE = cnf.options.oracle_home
if not env.PROC_TNS_ADMIN:
env.PROC_TNS_ADMIN = cnf.options.tns_admin
if not env.PROC_CONNECTION:
env.PROC_CONNECTION = cnf.options.connection
cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')
def proc(tsk):
env = tsk.env
gen = tsk.generator
inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
cmd = (
[env.PROC] +
['SQLCHECK=SEMANTICS'] +
(['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
if env.PROC_INCLUDES else []) +
['INCLUDE=(' + ','.join(
[i.bldpath() for i in inc_nodes]
) + ')'] +
['userid=' + env.PROC_CONNECTION] +
['INAME=' + tsk.inputs[0].bldpath()] +
['ONAME=' + tsk.outputs[0].bldpath()]
)
exec_env = {
'ORACLE_HOME': env.PROC_ORACLE,
'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
}
if env.PROC_TNS_ADMIN:
exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
return tsk.exec_command(cmd, env=exec_env)
TaskGen.declare_chain(
name = 'proc',
rule = proc,
ext_in = '.pc',
ext_out = '.c',
)
| 1,725 | Python | .py | 47 | 34.361702 | 139 | 0.685218 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,665 | msvc_pdb.py | projecthamster_hamster/waflib/extras/msvc_pdb.py | #!/usr/bin/env python
# encoding: utf-8
# Rafaël Kooi 2019
from waflib import TaskGen
@TaskGen.feature('c', 'cxx', 'fc')
@TaskGen.after_method('propagate_uselib_vars')
def add_pdb_per_object(self):
"""For msvc/fortran, specify a unique compile pdb per object, to work
around LNK4099. Flags are updated with a unique /Fd flag based on the
task output name. This is separate from the link pdb.
"""
if not hasattr(self, 'compiled_tasks'):
return
link_task = getattr(self, 'link_task', None)
for task in self.compiled_tasks:
if task.inputs and task.inputs[0].name.lower().endswith('.rc'):
continue
add_pdb = False
for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'):
# several languages may be used at once
for flag in task.env[flagname]:
if flag[1:].lower() == 'zi':
add_pdb = True
break
if add_pdb:
node = task.outputs[0].change_ext('.pdb')
pdb_flag = '/Fd:' + node.abspath()
for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'):
buf = [pdb_flag]
for flag in task.env[flagname]:
if flag[1:3] == 'Fd' or flag[1:].lower() == 'fs' or flag[1:].lower() == 'mp':
continue
buf.append(flag)
task.env[flagname] = buf
if link_task and not node in link_task.dep_nodes:
link_task.dep_nodes.append(node)
if not node in task.outputs:
task.outputs.append(node)
| 1,339 | Python | .py | 38 | 31.631579 | 82 | 0.676721 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,666 | cython.py | projecthamster_hamster/waflib/extras/cython.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010-2015
import re
from waflib import Task, Logs
from waflib.TaskGen import extension
cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
re_cyt = re.compile(r"""
^\s* # must begin with some whitespace characters
(?:from\s+(\w+)(?:\.\w+)*\s+)? # optionally match "from foo(.baz)" and capture foo
c?import\s(\w+|[*]) # require "import bar" and capture bar
""", re.M | re.VERBOSE)
@extension('.pyx')
def add_cython_file(self, node):
"""
Process a *.pyx* file given in the list of source files. No additional
feature is required::
def build(bld):
bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
"""
ext = '.c'
if 'cxx' in self.features:
self.env.append_unique('CYTHONFLAGS', '--cplus')
ext = '.cc'
for x in getattr(self, 'cython_includes', []):
# TODO re-use these nodes in "scan" below
d = self.path.find_dir(x)
if d:
self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())
tsk = self.create_task('cython', node, node.change_ext(ext))
self.source += tsk.outputs
class cython(Task.Task):
run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
color = 'GREEN'
vars = ['INCLUDES']
"""
Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
by the metaclass.
"""
ext_out = ['.h']
"""
The creation of a .h file is known only after the build has begun, so it is not
possible to compute a build order just by looking at the task inputs/outputs.
"""
def runnable_status(self):
"""
Perform a double-check to add the headers created by cython
to the output nodes. The scanner is executed only when the cython task
must be executed (optimization).
"""
ret = super(cython, self).runnable_status()
if ret == Task.ASK_LATER:
return ret
for x in self.generator.bld.raw_deps[self.uid()]:
if x.startswith('header:'):
self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
return super(cython, self).runnable_status()
def post_run(self):
for x in self.outputs:
if x.name.endswith('.h'):
if not x.exists():
if Logs.verbose:
Logs.warn('Expected %r', x.abspath())
x.write('')
return Task.Task.post_run(self)
def scan(self):
"""
Return the dependent files (.pxd) by looking in the include folders.
Put the headers to generate in the custom list "bld.raw_deps".
To inspect the scanne results use::
$ waf clean build --zones=deps
"""
node = self.inputs[0]
txt = node.read()
mods = set()
for m in re_cyt.finditer(txt):
if m.group(1): # matches "from foo import bar"
mods.add(m.group(1))
else:
mods.add(m.group(2))
Logs.debug('cython: mods %r', mods)
incs = getattr(self.generator, 'cython_includes', [])
incs = [self.generator.path.find_dir(x) for x in incs]
incs.append(node.parent)
found = []
missing = []
for x in sorted(mods):
for y in incs:
k = y.find_resource(x + '.pxd')
if k:
found.append(k)
break
else:
missing.append(x)
# the cython file implicitly depends on a pxd file that might be present
implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
if implicit:
found.append(implicit)
Logs.debug('cython: found %r', found)
# Now the .h created - store them in bld.raw_deps for later use
has_api = False
has_public = False
for l in txt.splitlines():
if cy_api_pat.match(l):
if ' api ' in l:
has_api = True
if ' public ' in l:
has_public = True
name = node.name.replace('.pyx', '')
if has_api:
missing.append('header:%s_api.h' % name)
if has_public:
missing.append('header:%s.h' % name)
return (found, missing)
def options(ctx):
ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')
def configure(ctx):
if not ctx.env.CC and not ctx.env.CXX:
ctx.fatal('Load a C/C++ compiler first')
if not ctx.env.PYTHON:
ctx.fatal('Load the python tool first!')
ctx.find_program('cython', var='CYTHON')
if hasattr(ctx.options, 'cython_flags'):
ctx.env.CYTHONFLAGS = ctx.options.cython_flags
| 4,197 | Python | .py | 124 | 30.701613 | 117 | 0.671111 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,667 | pep8.py | projecthamster_hamster/waflib/extras/pep8.py | #! /usr/bin/env python
# encoding: utf-8
#
# written by Sylvain Rouquette, 2011
'''
Install pep8 module:
$ easy_install pep8
or
$ pip install pep8
To add the pep8 tool to the waf file:
$ ./waf-light --tools=compat15,pep8
or, if you have waf >= 1.6.2
$ ./waf update --files=pep8
Then add this to your wscript:
[at]extension('.py', 'wscript')
def run_pep8(self, node):
self.create_task('Pep8', node)
'''
import threading
from waflib import Task, Options
pep8 = __import__('pep8')
class Pep8(Task.Task):
color = 'PINK'
lock = threading.Lock()
def check_options(self):
if pep8.options:
return
pep8.options = Options.options
pep8.options.prog = 'pep8'
excl = pep8.options.exclude.split(',')
pep8.options.exclude = [s.rstrip('/') for s in excl]
if pep8.options.filename:
pep8.options.filename = pep8.options.filename.split(',')
if pep8.options.select:
pep8.options.select = pep8.options.select.split(',')
else:
pep8.options.select = []
if pep8.options.ignore:
pep8.options.ignore = pep8.options.ignore.split(',')
elif pep8.options.select:
# Ignore all checks which are not explicitly selected
pep8.options.ignore = ['']
elif pep8.options.testsuite or pep8.options.doctest:
# For doctest and testsuite, all checks are required
pep8.options.ignore = []
else:
# The default choice: ignore controversial checks
pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
pep8.options.physical_checks = pep8.find_checks('physical_line')
pep8.options.logical_checks = pep8.find_checks('logical_line')
pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
pep8.options.messages = {}
def run(self):
with Pep8.lock:
self.check_options()
pep8.input_file(self.inputs[0].abspath())
return 0 if not pep8.get_count() else -1
def options(opt):
opt.add_option('-q', '--quiet', default=0, action='count',
help="report only file names, or nothing with -qq")
opt.add_option('-r', '--repeat', action='store_true',
help="show all occurrences of the same error")
opt.add_option('--exclude', metavar='patterns',
default=pep8.DEFAULT_EXCLUDE,
help="exclude files or directories which match these "
"comma separated patterns (default: %s)" %
pep8.DEFAULT_EXCLUDE,
dest='exclude')
opt.add_option('--filename', metavar='patterns', default='*.py',
help="when parsing directories, only check filenames "
"matching these comma separated patterns (default: "
"*.py)")
opt.add_option('--select', metavar='errors', default='',
help="select errors and warnings (e.g. E,W6)")
opt.add_option('--ignore', metavar='errors', default='',
help="skip errors and warnings (e.g. E4,W)")
opt.add_option('--show-source', action='store_true',
help="show source code for each error")
opt.add_option('--show-pep8', action='store_true',
help="show text of PEP 8 for each error")
opt.add_option('--statistics', action='store_true',
help="count errors and warnings")
opt.add_option('--count', action='store_true',
help="print total number of errors and warnings "
"to standard error and set exit code to 1 if "
"total is not null")
opt.add_option('--benchmark', action='store_true',
help="measure processing speed")
opt.add_option('--testsuite', metavar='dir',
help="run regression tests from dir")
opt.add_option('--doctest', action='store_true',
help="run doctest on myself")
| 3,476 | Python | .py | 92 | 34.01087 | 66 | 0.693472 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,668 | stracedeps.py | projecthamster_hamster/waflib/extras/stracedeps.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015 (ita)
"""
Execute tasks through strace to obtain dependencies after the process is run. This
scheme is similar to that of the Fabricate script.
To use::
def configure(conf):
conf.load('strace')
WARNING:
* This will not work when advanced scanners are needed (qt4/qt5)
* The overhead of running 'strace' is significant (56s -> 1m29s)
* It will not work on Windows :-)
"""
import os, re, threading
from waflib import Task, Logs, Utils
#TRACECALLS = 'trace=access,chdir,clone,creat,execve,exit_group,fork,lstat,lstat64,mkdir,open,rename,stat,stat64,symlink,vfork'
TRACECALLS = 'trace=process,file'
BANNED = ('/tmp', '/proc', '/sys', '/dev')
s_process = r'(?:clone|fork|vfork)\(.*?(?P<npid>\d+)'
s_file = r'(?P<call>\w+)\("(?P<path>([^"\\]|\\.)*)"(.*)'
re_lines = re.compile(r'^(?P<pid>\d+)\s+(?:(?:%s)|(?:%s))\r*$' % (s_file, s_process), re.IGNORECASE | re.MULTILINE)
strace_lock = threading.Lock()
def configure(conf):
conf.find_program('strace')
def task_method(func):
# Decorator function to bind/replace methods on the base Task class
#
# The methods Task.exec_command and Task.sig_implicit_deps already exists and are rarely overridden
# we thus expect that we are the only ones doing this
try:
setattr(Task.Task, 'nostrace_%s' % func.__name__, getattr(Task.Task, func.__name__))
except AttributeError:
pass
setattr(Task.Task, func.__name__, func)
return func
@task_method
def get_strace_file(self):
try:
return self.strace_file
except AttributeError:
pass
if self.outputs:
ret = self.outputs[0].abspath() + '.strace'
else:
ret = '%s%s%d%s' % (self.generator.bld.bldnode.abspath(), os.sep, id(self), '.strace')
self.strace_file = ret
return ret
@task_method
def get_strace_args(self):
return (self.env.STRACE or ['strace']) + ['-e', TRACECALLS, '-f', '-o', self.get_strace_file()]
@task_method
def exec_command(self, cmd, **kw):
bld = self.generator.bld
if not 'cwd' in kw:
kw['cwd'] = self.get_cwd()
args = self.get_strace_args()
fname = self.get_strace_file()
if isinstance(cmd, list):
cmd = args + cmd
else:
cmd = '%s %s' % (' '.join(args), cmd)
try:
ret = bld.exec_command(cmd, **kw)
finally:
if not ret:
self.parse_strace_deps(fname, kw['cwd'])
return ret
@task_method
def sig_implicit_deps(self):
# bypass the scanner functions
return
@task_method
def parse_strace_deps(self, path, cwd):
# uncomment the following line to disable the dependencies and force a file scan
# return
try:
cnt = Utils.readf(path)
finally:
try:
os.remove(path)
except OSError:
pass
if not isinstance(cwd, str):
cwd = cwd.abspath()
nodes = []
bld = self.generator.bld
try:
cache = bld.strace_cache
except AttributeError:
cache = bld.strace_cache = {}
# chdir and relative paths
pid_to_cwd = {}
global BANNED
done = set()
for m in re.finditer(re_lines, cnt):
# scraping the output of strace
pid = m.group('pid')
if m.group('npid'):
npid = m.group('npid')
pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd)
continue
p = m.group('path').replace('\\"', '"')
if p == '.' or m.group().find('= -1 ENOENT') > -1:
# just to speed it up a bit
continue
if not os.path.isabs(p):
p = os.path.join(pid_to_cwd.get(pid, cwd), p)
call = m.group('call')
if call == 'chdir':
pid_to_cwd[pid] = p
continue
if p in done:
continue
done.add(p)
for x in BANNED:
if p.startswith(x):
break
else:
if p.endswith('/') or os.path.isdir(p):
continue
try:
node = cache[p]
except KeyError:
strace_lock.acquire()
try:
cache[p] = node = bld.root.find_node(p)
if not node:
continue
finally:
strace_lock.release()
nodes.append(node)
# record the dependencies then force the task signature recalculation for next time
if Logs.verbose:
Logs.debug('deps: real scanner for %r returned %r', self, nodes)
bld = self.generator.bld
bld.node_deps[self.uid()] = nodes
bld.raw_deps[self.uid()] = []
try:
del self.cache_sig
except AttributeError:
pass
self.signature()
| 4,102 | Python | .py | 143 | 25.958042 | 127 | 0.68279 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,669 | cabal.py | projecthamster_hamster/waflib/extras/cabal.py | #!/usr/bin/env python
# encoding: utf-8
# Anton Feldmann, 2012
# "Base for cabal"
from waflib import Task, Utils
from waflib.TaskGen import extension
from waflib.Utils import threading
from shutil import rmtree
lock = threading.Lock()
registering = False
def configure(self):
self.find_program('cabal', var='CABAL')
self.find_program('ghc-pkg', var='GHCPKG')
pkgconfd = self.bldnode.abspath() + '/package.conf.d'
self.env.PREFIX = self.bldnode.abspath() + '/dist'
self.env.PKGCONFD = pkgconfd
if self.root.find_node(pkgconfd + '/package.cache'):
self.msg('Using existing package database', pkgconfd, color='CYAN')
else:
pkgdir = self.root.find_dir(pkgconfd)
if pkgdir:
self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
rmtree(pkgdir.abspath())
pkgdir = None
self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
@extension('.cabal')
def process_cabal(self, node):
out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
package_node = node.change_ext('.package')
package_node = out_dir_node.find_or_declare(package_node.name)
build_node = node.parent.get_bld()
build_path = build_node.abspath()
config_node = build_node.find_or_declare('setup-config')
inplace_node = build_node.find_or_declare('package.conf.inplace')
config_task = self.create_task('cabal_configure', node)
config_task.cwd = node.parent.abspath()
config_task.depends_on = getattr(self, 'depends_on', '')
config_task.build_path = build_path
config_task.set_outputs(config_node)
build_task = self.create_task('cabal_build', config_node)
build_task.cwd = node.parent.abspath()
build_task.build_path = build_path
build_task.set_outputs(inplace_node)
copy_task = self.create_task('cabal_copy', inplace_node)
copy_task.cwd = node.parent.abspath()
copy_task.depends_on = getattr(self, 'depends_on', '')
copy_task.build_path = build_path
last_task = copy_task
task_list = [config_task, build_task, copy_task]
if (getattr(self, 'register', False)):
register_task = self.create_task('cabal_register', inplace_node)
register_task.cwd = node.parent.abspath()
register_task.set_run_after(copy_task)
register_task.build_path = build_path
pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
pkgreg_task.cwd = node.parent.abspath()
pkgreg_task.set_run_after(register_task)
pkgreg_task.build_path = build_path
last_task = pkgreg_task
task_list += [register_task, pkgreg_task]
touch_task = self.create_task('cabal_touch', inplace_node)
touch_task.set_run_after(last_task)
touch_task.set_outputs(package_node)
touch_task.build_path = build_path
task_list += [touch_task]
return task_list
def get_all_src_deps(node):
hs_deps = node.ant_glob('**/*.hs')
hsc_deps = node.ant_glob('**/*.hsc')
lhs_deps = node.ant_glob('**/*.lhs')
c_deps = node.ant_glob('**/*.c')
cpp_deps = node.ant_glob('**/*.cpp')
proto_deps = node.ant_glob('**/*.proto')
return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
class Cabal(Task.Task):
def scan(self):
return (get_all_src_deps(self.generator.path), ())
class cabal_configure(Cabal):
run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
shell = True
def scan(self):
out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
return (deps, ())
class cabal_build(Cabal):
run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
shell = True
class cabal_copy(Cabal):
run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
shell = True
class cabal_register(Cabal):
run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
shell = True
class ghcpkg_register(Cabal):
run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
shell = True
def runnable_status(self):
global lock, registering
val = False
lock.acquire()
val = registering
lock.release()
if val:
return Task.ASK_LATER
ret = Task.Task.runnable_status(self)
if ret == Task.RUN_ME:
lock.acquire()
registering = True
lock.release()
return ret
def post_run(self):
global lock, registering
lock.acquire()
registering = False
lock.release()
return Task.Task.post_run(self)
class cabal_touch(Cabal):
run_str = 'touch ${TGT}'
| 5,165 | Python | .py | 119 | 35.966387 | 128 | 0.639759 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,670 | cross_gnu.py | projecthamster_hamster/waflib/extras/cross_gnu.py | #!/usr/bin/python
# -*- coding: utf-8 vi:ts=4:noexpandtab
# Tool to provide dedicated variables for cross-compilation
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"
"""
This tool allows to use environment variables to define cross-compilation
variables intended for build variants.
The variables are obtained from the environment in 3 ways:
1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
2. By defining HOST_x
3. By defining ${CHOST//-/_}_x
else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``.
Usage:
- In your build script::
def configure(cfg):
...
for variant in x_variants:
setenv(variant)
conf.load('cross_gnu')
conf.xcheck_host_var('POUET')
...
- Then::
CHOST=arm-hardfloat-linux-gnueabi waf configure
env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
HOST_CC="clang -..." waf configure
This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH):
.. code:: python
from waflib import Configure
#from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971
import waf_variants
variants='pc fw/variant1 fw/variant2'.split()
top = "."
out = "../build"
PIC = '33FJ128GP804' #dsPICxxx
@Configure.conf
def gcc_modifier_xc16(cfg):
v = cfg.env
v.cprogram_PATTERN = '%s.elf'
v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld',
'--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections',
'--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem'])
v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1',
'-msfr-warn=off','-mno-override-inline','-finline','-Winline']
def configure(cfg):
if 'fw' in cfg.variant: #firmware
cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too
cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16']
...
else: #configure for pc SW
...
def build(bld):
if 'fw' in bld.variant: #firmware
bld.program(source='maintst.c', target='maintst');
bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf")
else: #build for pc SW
...
"""
import os
from waflib import Utils, Configure
from waflib.Tools import ccroot, gcc
try:
from shlex import quote
except ImportError:
from pipes import quote
def get_chost_stuff(conf):
"""
Get the CHOST environment variable contents
"""
chost = None
chost_envar = None
if conf.env.CHOST:
chost = conf.env.CHOST[0]
chost_envar = chost.replace('-', '_')
return chost, chost_envar
@Configure.conf
def xcheck_var(conf, name, wafname=None, cross=False):
wafname = wafname or name
if wafname in conf.env:
value = conf.env[wafname]
if isinstance(value, str):
value = [value]
else:
envar = os.environ.get(name)
if not envar:
return
value = Utils.to_list(envar) if envar != '' else [envar]
conf.env[wafname] = value
if cross:
pretty = 'cross-compilation %s' % wafname
else:
pretty = wafname
conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value))
@Configure.conf
def xcheck_host_prog(conf, name, tool, wafname=None):
wafname = wafname or name
chost, chost_envar = get_chost_stuff(conf)
specific = None
if chost:
specific = os.environ.get('%s_%s' % (chost_envar, name))
if specific:
value = Utils.to_list(specific)
conf.env[wafname] += value
conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
" ".join(quote(x) for x in value))
return
else:
envar = os.environ.get('HOST_%s' % name)
if envar is not None:
value = Utils.to_list(envar)
conf.env[wafname] = value
conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
" ".join(quote(x) for x in value))
return
if conf.env[wafname]:
return
value = None
if chost:
value = '%s-%s' % (chost, tool)
if value:
conf.env[wafname] = value
conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)
@Configure.conf
def xcheck_host_envar(conf, name, wafname=None):
wafname = wafname or name
chost, chost_envar = get_chost_stuff(conf)
specific = None
if chost:
specific = os.environ.get('%s_%s' % (chost_envar, name))
if specific:
value = Utils.to_list(specific)
conf.env[wafname] += value
conf.msg('Will use cross-compilation %s from %s_%s' \
% (name, chost_envar, name),
" ".join(quote(x) for x in value))
return
envar = os.environ.get('HOST_%s' % name)
if envar is None:
return
value = Utils.to_list(envar) if envar != '' else [envar]
conf.env[wafname] = value
conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
" ".join(quote(x) for x in value))
@Configure.conf
def xcheck_host(conf):
conf.xcheck_var('CHOST', cross=True)
conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS]
conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_')
conf.xcheck_host_prog('CC', 'gcc')
conf.xcheck_host_prog('CXX', 'g++')
conf.xcheck_host_prog('LINK_CC', 'gcc')
conf.xcheck_host_prog('LINK_CXX', 'g++')
conf.xcheck_host_prog('AR', 'ar')
conf.xcheck_host_prog('AS', 'as')
conf.xcheck_host_prog('LD', 'ld')
conf.xcheck_host_envar('CFLAGS')
conf.xcheck_host_envar('CXXFLAGS')
conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
conf.xcheck_host_envar('LIB')
conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
conf.xcheck_host_envar('PKG_CONFIG_PATH')
if not conf.env.env:
conf.env.env = {}
conf.env.env.update(os.environ)
if conf.env.PKG_CONFIG_LIBDIR:
conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
if conf.env.PKG_CONFIG_PATH:
conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
def configure(conf):
"""
Configuration example for gcc, it will not work for g++/clang/clang++
"""
conf.xcheck_host()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| 6,109 | Python | .py | 179 | 31.206704 | 105 | 0.68901 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,671 | fc_nag.py | projecthamster_hamster/waflib/extras/fc_nag.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
import re
from waflib import Utils
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].insert(0, 'fc_nag')
@conf
def find_nag(conf):
"""Find the NAG Fortran Compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['nagfor'], var='FC')
conf.get_nag_version(fc)
conf.env.FC_NAME = 'NAG'
conf.env.FC_MOD_CAPITALIZATION = 'lower'
@conf
def nag_flags(conf):
v = conf.env
v.FCFLAGS_DEBUG = ['-C=all']
v.FCLNK_TGT_F = ['-o', '']
v.FC_TGT_F = ['-c', '-o', '']
@conf
def nag_modifier_platform(conf):
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None)
if nag_modifier_func:
nag_modifier_func()
@conf
def get_nag_version(conf, fc):
"""Get the NAG compiler version"""
version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P<major>\d*)\.(?P<minor>\d*)", re.M).search
cmd = fc + ['-V']
out, err = fc_config.getoutput(conf,cmd,stdin=False)
if out:
match = version_re(out)
if not match:
match = version_re(err)
else: match = version_re(err)
if not match:
conf.fatal('Could not determine the NAG version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])
def configure(conf):
conf.find_nag()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.nag_flags()
conf.nag_modifier_platform()
| 1,520 | Python | .py | 50 | 28.42 | 104 | 0.70048 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,672 | build_file_tracker.py | projecthamster_hamster/waflib/extras/build_file_tracker.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2015
"""
Force files to depend on the timestamps of those located in the build directory. You may
want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
or to hash the file in the build directory with its timestamp
"""
import os
from waflib import Node, Utils
def get_bld_sig(self):
if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
return Utils.h_file(self.abspath())
try:
# add the creation time to the signature
return self.sig + str(os.stat(self.abspath()).st_mtime)
except AttributeError:
return None
Node.Node.get_bld_sig = get_bld_sig
| 898 | Python | .py | 21 | 40.904762 | 116 | 0.777011 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,673 | midl.py | projecthamster_hamster/waflib/extras/midl.py | #!/usr/bin/env python
# Issue 1185 ultrix gmail com
"""
Microsoft Interface Definition Language support. Given ComObject.idl, this tool
will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c
To declare targets using midl::
def configure(conf):
conf.load('msvc')
conf.load('midl')
def build(bld):
bld(
features='c cshlib',
# Note: ComObject_i.c is generated from ComObject.idl
source = 'main.c ComObject.idl ComObject_i.c',
target = 'ComObject.dll')
"""
from waflib import Task, Utils
from waflib.TaskGen import feature, before_method
import os
def configure(conf):
conf.find_program(['midl'], var='MIDL')
conf.env.MIDLFLAGS = [
'/nologo',
'/D',
'_DEBUG',
'/W1',
'/char',
'signed',
'/Oicf',
]
@feature('c', 'cxx')
@before_method('process_source')
def idl_file(self):
# Do this before process_source so that the generated header can be resolved
# when scanning source dependencies.
idl_nodes = []
src_nodes = []
for node in Utils.to_list(self.source):
if str(node).endswith('.idl'):
idl_nodes.append(node)
else:
src_nodes.append(node)
for node in self.to_nodes(idl_nodes):
t = node.change_ext('.tlb')
h = node.change_ext('_i.h')
c = node.change_ext('_i.c')
p = node.change_ext('_p.c')
d = node.parent.find_or_declare('dlldata.c')
self.create_task('midl', node, [t, h, c, p, d])
self.source = src_nodes
class midl(Task.Task):
"""
Compile idl files
"""
color = 'YELLOW'
run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}'
before = ['winrc']
| 1,699 | Python | .py | 57 | 27.333333 | 197 | 0.682822 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,674 | wafcache.py | projecthamster_hamster/waflib/extras/wafcache.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2019 (ita)
"""
Filesystem-based cache system to share and re-use build artifacts
Cache access operations (copy to and from) are delegated to
independent pre-forked worker subprocesses.
The following environment variables may be set:
* WAFCACHE: several possibilities:
- File cache:
absolute path of the waf cache (~/.cache/wafcache_user,
where `user` represents the currently logged-in user)
- URL to a cache server, for example:
export WAFCACHE=http://localhost:8080/files/
in that case, GET/POST requests are made to urls of the form
http://localhost:8080/files/000000000/0 (cache management is delegated to the server)
- GCS, S3 or MINIO bucket
gs://my-bucket/ (uses gsutil command line tool or WAFCACHE_CMD)
s3://my-bucket/ (uses aws command line tool or WAFCACHE_CMD)
minio://my-bucket/ (uses mc command line tool or WAFCACHE_CMD)
* WAFCACHE_CMD: bucket upload/download command, for example:
WAFCACHE_CMD="gsutil cp %{SRC} %{TGT}"
Note that the WAFCACHE bucket value is used for the source or destination
depending on the operation (upload or download). For example, with:
WAFCACHE="gs://mybucket/"
the following commands may be run:
gsutil cp build/myprogram gs://mybucket/aa/aaaaa/1
gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile
* WAFCACHE_NO_PUSH: if set, disables pushing to the cache
* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations
* WAFCACHE_STATS: if set, displays cache usage statistics on exit
File cache specific options:
Files are copied using hard links by default; if the cache is located
onto another partition, the system switches to file copies instead.
* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M)
* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
and trim the cache (3 minutes)
Upload specific options:
* WAFCACHE_ASYNC_WORKERS: define a number of workers to upload results asynchronously
this may improve build performance with many/long file uploads
the default is unset (synchronous uploads)
* WAFCACHE_ASYNC_NOWAIT: do not wait for uploads to complete (default: False)
this requires asynchonous uploads to have an effect
Usage::
def build(bld):
bld.load('wafcache')
...
To troubleshoot::
waf clean build --zone=wafcache
"""
import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, threading, traceback, urllib3, shlex
try:
import subprocess32 as subprocess
except ImportError:
import subprocess
base_cache = os.path.expanduser('~/.cache/')
if not os.path.isdir(base_cache):
base_cache = '/tmp/'
default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser())
CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir)
WAFCACHE_CMD = os.environ.get('WAFCACHE_CMD')
TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000))
EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3))
EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
WAFCACHE_STATS = 1 if os.environ.get('WAFCACHE_STATS') else 0
WAFCACHE_ASYNC_WORKERS = os.environ.get('WAFCACHE_ASYNC_WORKERS')
WAFCACHE_ASYNC_NOWAIT = os.environ.get('WAFCACHE_ASYNC_NOWAIT')
OK = "ok"
re_waf_cmd = re.compile('(?P<src>%{SRC})|(?P<tgt>%{TGT})')
try:
import cPickle
except ImportError:
import pickle as cPickle
if __name__ != '__main__':
from waflib import Task, Logs, Utils, Build
def can_retrieve_cache(self):
"""
New method for waf Task classes
"""
if not self.outputs:
return False
self.cached = False
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)
if WAFCACHE_STATS:
self.generator.bld.cache_reqs += 1
files_to = [node.abspath() for node in self.outputs]
proc = get_process()
err = cache_command(proc, ssig, [], files_to)
process_pool.append(proc)
if err.startswith(OK):
if WAFCACHE_VERBOSITY:
Logs.pprint('CYAN', ' Fetched %r from cache' % files_to)
else:
Logs.debug('wafcache: fetched %r from cache', files_to)
if WAFCACHE_STATS:
self.generator.bld.cache_hits += 1
else:
if WAFCACHE_VERBOSITY:
Logs.pprint('YELLOW', ' No cache entry %s' % files_to)
else:
Logs.debug('wafcache: No cache entry %s: %s', files_to, err)
return False
self.cached = True
return True
def put_files_cache(self):
"""
New method for waf Task classes
"""
if WAFCACHE_NO_PUSH or getattr(self, 'cached', None) or not self.outputs:
return
files_from = []
for node in self.outputs:
path = node.abspath()
if not os.path.isfile(path):
return
files_from.append(path)
bld = self.generator.bld
old_sig = self.signature()
for node in self.inputs:
try:
del node.ctx.cache_sig[node]
except KeyError:
pass
delattr(self, 'cache_sig')
sig = self.signature()
def _async_put_files_cache(bld, ssig, files_from):
proc = get_process()
if WAFCACHE_ASYNC_WORKERS:
with bld.wafcache_lock:
if bld.wafcache_stop:
process_pool.append(proc)
return
bld.wafcache_procs.add(proc)
err = cache_command(proc, ssig, files_from, [])
process_pool.append(proc)
if err.startswith(OK):
if WAFCACHE_VERBOSITY:
Logs.pprint('CYAN', ' Successfully uploaded %s to cache' % files_from)
else:
Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
if WAFCACHE_STATS:
bld.cache_puts += 1
else:
if WAFCACHE_VERBOSITY:
Logs.pprint('RED', ' Error caching step results %s: %s' % (files_from, err))
else:
Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
if old_sig == sig:
ssig = Utils.to_hex(self.uid() + sig)
if WAFCACHE_ASYNC_WORKERS:
fut = bld.wafcache_executor.submit(_async_put_files_cache, bld, ssig, files_from)
bld.wafcache_uploads.append(fut)
else:
_async_put_files_cache(bld, ssig, files_from)
else:
Logs.debug('wafcache: skipped %r upload due to late input modifications %r', self.outputs, self.inputs)
bld.task_sigs[self.uid()] = self.cache_sig
def hash_env_vars(self, env, vars_lst):
"""
Reimplement BuildContext.hash_env_vars so that the resulting hash does not depend on local paths
"""
if not env.table:
env = env.parent
if not env:
return Utils.SIG_NIL
idx = str(id(env)) + str(vars_lst)
try:
cache = self.cache_env
except AttributeError:
cache = self.cache_env = {}
else:
try:
return self.cache_env[idx]
except KeyError:
pass
v = str([env[a] for a in vars_lst])
v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
m = Utils.md5()
m.update(v.encode())
ret = m.digest()
Logs.debug('envhash: %r %r', ret, v)
cache[idx] = ret
return ret
def uid(self):
"""
Reimplement Task.uid() so that the signature does not depend on local paths
"""
try:
return self.uid_
except AttributeError:
m = Utils.md5()
src = self.generator.bld.srcnode
up = m.update
up(self.__class__.__name__.encode())
for x in self.inputs + self.outputs:
up(x.path_from(src).encode())
self.uid_ = m.digest()
return self.uid_
def make_cached(cls):
"""
Enable the waf cache for a given task class
"""
if getattr(cls, 'nocache', None) or getattr(cls, 'has_cache', False):
return
full_name = "%s.%s" % (cls.__module__, cls.__name__)
if full_name in ('waflib.Tools.ccroot.vnum', 'waflib.Build.inst'):
return
m1 = getattr(cls, 'run', None)
def run(self):
if getattr(self, 'nocache', False):
return m1(self)
if self.can_retrieve_cache():
return 0
return m1(self)
cls.run = run
m2 = getattr(cls, 'post_run', None)
def post_run(self):
if getattr(self, 'nocache', False):
return m2(self)
ret = m2(self)
self.put_files_cache()
return ret
cls.post_run = post_run
cls.has_cache = True
process_pool = []
def get_process():
"""
Returns a worker process that can process waf cache commands
The worker process is assumed to be returned to the process pool when unused
"""
try:
return process_pool.pop()
except IndexError:
filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'wafcache.py'
cmd = [sys.executable, '-c', Utils.readf(filepath)]
return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
def atexit_pool():
for proc in process_pool:
proc.kill()
atexit.register(atexit_pool)
def build(bld):
"""
Called during the build process to enable file caching
"""
if WAFCACHE_ASYNC_WORKERS:
try:
num_workers = int(WAFCACHE_ASYNC_WORKERS)
except ValueError:
Logs.warn('Invalid WAFCACHE_ASYNC_WORKERS specified: %r' % WAFCACHE_ASYNC_WORKERS)
else:
from concurrent.futures import ThreadPoolExecutor
bld.wafcache_executor = ThreadPoolExecutor(max_workers=num_workers)
bld.wafcache_uploads = []
bld.wafcache_procs = set([])
bld.wafcache_stop = False
bld.wafcache_lock = threading.Lock()
def finalize_upload_async(bld):
if WAFCACHE_ASYNC_NOWAIT:
with bld.wafcache_lock:
bld.wafcache_stop = True
for fut in reversed(bld.wafcache_uploads):
fut.cancel()
for proc in bld.wafcache_procs:
proc.kill()
bld.wafcache_procs.clear()
else:
Logs.pprint('CYAN', '... waiting for wafcache uploads to complete (%s uploads)' % len(bld.wafcache_uploads))
bld.wafcache_executor.shutdown(wait=True)
bld.add_post_fun(finalize_upload_async)
if WAFCACHE_STATS:
# Init counter for statistics and hook to print results at the end
bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0
def printstats(bld):
hit_ratio = 0
if bld.cache_reqs > 0:
hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100
Logs.pprint('CYAN', ' wafcache stats: %s requests, %s hits (ratio: %.2f%%), %s writes' %
(bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) )
bld.add_post_fun(printstats)
if process_pool:
# already called once
return
# pre-allocation
processes = [get_process() for x in range(bld.jobs)]
process_pool.extend(processes)
Task.Task.can_retrieve_cache = can_retrieve_cache
Task.Task.put_files_cache = put_files_cache
Task.Task.uid = uid
Build.BuildContext.hash_env_vars = hash_env_vars
for x in reversed(list(Task.classes.values())):
make_cached(x)
def cache_command(proc, sig, files_from, files_to):
"""
Create a command for cache worker processes, returns a pickled
base64-encoded tuple containing the task signature, a list of files to
cache and a list of files files to get from cache (one of the lists
is assumed to be empty)
"""
obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to]))
proc.stdin.write(obj)
proc.stdin.write('\n'.encode())
proc.stdin.flush()
obj = proc.stdout.readline()
if not obj:
raise OSError('Preforked sub-process %r died' % proc.pid)
return cPickle.loads(base64.b64decode(obj))
try:
copyfun = os.link
except NameError:
copyfun = shutil.copy2
def atomic_copy(orig, dest):
"""
Copy files to the cache, the operation is atomic for a given file
"""
global copyfun
tmp = dest + '.tmp'
up = os.path.dirname(dest)
try:
os.makedirs(up)
except OSError:
pass
try:
copyfun(orig, tmp)
except OSError as e:
if e.errno == errno.EXDEV:
copyfun = shutil.copy2
copyfun(orig, tmp)
else:
raise
os.rename(tmp, dest)
def lru_trim():
"""
the cache folders take the form:
`CACHE_DIR/0b/0b180f82246d726ece37c8ccd0fb1cde2650d7bfcf122ec1f169079a3bfc0ab9`
they are listed in order of last access, and then removed
until the amount of folders is within TRIM_MAX_FOLDERS and the total space
taken by files is less than EVICT_MAX_BYTES
"""
lst = []
for up in os.listdir(CACHE_DIR):
if len(up) == 2:
sub = os.path.join(CACHE_DIR, up)
for hval in os.listdir(sub):
path = os.path.join(sub, hval)
size = 0
for fname in os.listdir(path):
try:
size += os.lstat(os.path.join(path, fname)).st_size
except OSError:
pass
lst.append((os.stat(path).st_mtime, size, path))
lst.sort(key=lambda x: x[0])
lst.reverse()
tot = sum(x[1] for x in lst)
while tot > EVICT_MAX_BYTES or len(lst) > TRIM_MAX_FOLDERS:
_, tmp_size, path = lst.pop()
tot -= tmp_size
tmp = path + '.remove'
try:
shutil.rmtree(tmp)
except OSError:
pass
try:
os.rename(path, tmp)
except OSError:
sys.stderr.write('Could not rename %r to %r\n' % (path, tmp))
else:
try:
shutil.rmtree(tmp)
except OSError:
sys.stderr.write('Could not remove %r\n' % tmp)
sys.stderr.write("Cache trimmed: %r bytes in %r folders left\n" % (tot, len(lst)))
def lru_evict():
"""
Reduce the cache size
"""
lockfile = os.path.join(CACHE_DIR, 'all.lock')
try:
st = os.stat(lockfile)
except EnvironmentError as e:
if e.errno == errno.ENOENT:
with open(lockfile, 'w') as f:
f.write('')
return
else:
raise
if st.st_mtime < time.time() - EVICT_INTERVAL_MINUTES * 60:
# check every EVICT_INTERVAL_MINUTES minutes if the cache is too big
# OCLOEXEC is unnecessary because no processes are spawned
fd = os.open(lockfile, os.O_RDWR | os.O_CREAT, 0o755)
try:
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except EnvironmentError:
if WAFCACHE_VERBOSITY:
sys.stderr.write('wafcache: another cleaning process is running\n')
else:
# now dow the actual cleanup
lru_trim()
os.utime(lockfile, None)
finally:
os.close(fd)
class netcache(object):
def __init__(self):
self.http = urllib3.PoolManager()
def url_of(self, sig, i):
return "%s/%s/%s" % (CACHE_DIR, sig, i)
def upload(self, file_path, sig, i):
url = self.url_of(sig, i)
with open(file_path, 'rb') as f:
file_data = f.read()
r = self.http.request('POST', url, timeout=60,
fields={ 'file': ('%s/%s' % (sig, i), file_data), })
if r.status >= 400:
raise OSError("Invalid status %r %r" % (url, r.status))
def download(self, file_path, sig, i):
url = self.url_of(sig, i)
with self.http.request('GET', url, preload_content=False, timeout=60) as inf:
if inf.status >= 400:
raise OSError("Invalid status %r %r" % (url, inf.status))
with open(file_path, 'wb') as out:
shutil.copyfileobj(inf, out)
def copy_to_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_from):
if not os.path.islink(x):
self.upload(x, sig, i)
except Exception:
return traceback.format_exc()
return OK
def copy_from_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_to):
self.download(x, sig, i)
except Exception:
return traceback.format_exc()
return OK
class fcache(object):
def __init__(self):
if not os.path.exists(CACHE_DIR):
try:
os.makedirs(CACHE_DIR)
except OSError:
pass
if not os.path.exists(CACHE_DIR):
raise ValueError('Could not initialize the cache directory')
def copy_to_cache(self, sig, files_from, files_to):
"""
Copy files to the cache, existing files are overwritten,
and the copy is atomic only for a given file, not for all files
that belong to a given task object
"""
try:
for i, x in enumerate(files_from):
dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
atomic_copy(x, dest)
except Exception:
return traceback.format_exc()
else:
# attempt trimming if caching was successful:
# we may have things to trim!
try:
lru_evict()
except Exception:
return traceback.format_exc()
return OK
def copy_from_cache(self, sig, files_from, files_to):
"""
Copy files from the cache
"""
try:
for i, x in enumerate(files_to):
orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
atomic_copy(orig, x)
# success! update the cache time
os.utime(os.path.join(CACHE_DIR, sig[:2], sig), None)
except Exception:
return traceback.format_exc()
return OK
class bucket_cache(object):
def bucket_copy(self, source, target):
if WAFCACHE_CMD:
def replacer(match):
if match.group('src'):
return source
elif match.group('tgt'):
return target
cmd = [re_waf_cmd.sub(replacer, x) for x in shlex.split(WAFCACHE_CMD)]
elif CACHE_DIR.startswith('s3://'):
cmd = ['aws', 's3', 'cp', source, target]
elif CACHE_DIR.startswith('gs://'):
cmd = ['gsutil', 'cp', source, target]
else:
cmd = ['mc', 'cp', source, target]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode:
raise OSError('Error copy %r to %r using: %r (exit %r):\n out:%s\n err:%s' % (
source, target, cmd, proc.returncode, out.decode(errors='replace'), err.decode(errors='replace')))
def copy_to_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_from):
dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
self.bucket_copy(x, dest)
except Exception:
return traceback.format_exc()
return OK
def copy_from_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_to):
orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
self.bucket_copy(orig, x)
except EnvironmentError:
return traceback.format_exc()
return OK
def loop(service):
"""
This function is run when this file is run as a standalone python script,
it assumes a parent process that will communicate the commands to it
as pickled-encoded tuples (one line per command)
The commands are to copy files to the cache or copy files from the
cache to a target destination
"""
# one operation is performed at a single time by a single process
# therefore stdin never has more than one line
txt = sys.stdin.readline().strip()
if not txt:
# parent process probably ended
sys.exit(1)
ret = OK
[sig, files_from, files_to] = cPickle.loads(base64.b64decode(txt))
if files_from:
# TODO return early when pushing files upstream
ret = service.copy_to_cache(sig, files_from, files_to)
elif files_to:
# the build process waits for workers to (possibly) obtain files from the cache
ret = service.copy_from_cache(sig, files_from, files_to)
else:
ret = "Invalid command"
obj = base64.b64encode(cPickle.dumps(ret))
sys.stdout.write(obj.decode())
sys.stdout.write('\n')
sys.stdout.flush()
if __name__ == '__main__':
if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://') or CACHE_DIR.startswith('minio://'):
if CACHE_DIR.startswith('minio://'):
CACHE_DIR = CACHE_DIR[8:] # minio doesn't need the protocol part, uses config aliases
service = bucket_cache()
elif CACHE_DIR.startswith('http'):
service = netcache()
else:
service = fcache()
while 1:
try:
loop(service)
except KeyboardInterrupt:
break
| 18,813 | Python | .py | 561 | 30.299465 | 112 | 0.706028 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,675 | review.py | projecthamster_hamster/waflib/extras/review.py | #!/usr/bin/env python
# encoding: utf-8
# Laurent Birtz, 2011
# moved the code into a separate tool (ita)
"""
There are several things here:
- a different command-line option management making options persistent
- the review command to display the options set
Assumptions:
- configuration options are not always added to the right group (and do not count on the users to do it...)
- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration
- when the options change, the build is invalidated (forcing a reconfiguration)
"""
import os, textwrap, shutil
from waflib import Logs, Context, ConfigSet, Options, Build, Configure
class Odict(dict):
"""Ordered dictionary"""
def __init__(self, data=None):
self._keys = []
dict.__init__(self)
if data:
# we were provided a regular dict
if isinstance(data, dict):
self.append_from_dict(data)
# we were provided a tuple list
elif type(data) == list:
self.append_from_plist(data)
# we were provided invalid input
else:
raise Exception("expected a dict or a tuple list")
def append_from_dict(self, dict):
map(self.__setitem__, dict.keys(), dict.values())
def append_from_plist(self, plist):
for pair in plist:
if len(pair) != 2:
raise Exception("invalid pairs list")
for (k, v) in plist:
self.__setitem__(k, v)
def __delitem__(self, key):
if not key in self._keys:
raise KeyError(key)
dict.__delitem__(self, key)
self._keys.remove(key)
def __setitem__(self, key, item):
dict.__setitem__(self, key, item)
if key not in self._keys:
self._keys.append(key)
def clear(self):
dict.clear(self)
self._keys = []
def copy(self):
return Odict(self.plist())
def items(self):
return zip(self._keys, self.values())
def keys(self):
return list(self._keys) # return a copy of the list
def values(self):
return map(self.get, self._keys)
def plist(self):
p = []
for k, v in self.items():
p.append( (k, v) )
return p
def __str__(self):
buf = []
buf.append("{ ")
for k, v in self.items():
buf.append('%r : %r, ' % (k, v))
buf.append("}")
return ''.join(buf)
review_options = Odict()
"""
Ordered dictionary mapping configuration option names to their optparse option.
"""
review_defaults = {}
"""
Dictionary mapping configuration option names to their default value.
"""
old_review_set = None
"""
Review set containing the configuration values before parsing the command line.
"""
new_review_set = None
"""
Review set containing the configuration values after parsing the command line.
"""
class OptionsReview(Options.OptionsContext):
def __init__(self, **kw):
super(self.__class__, self).__init__(**kw)
def prepare_config_review(self):
"""
Find the configuration options that are reviewable, detach
their default value from their optparse object and store them
into the review dictionaries.
"""
gr = self.get_option_group('configure options')
for opt in gr.option_list:
if opt.action != 'store' or opt.dest in ("out", "top"):
continue
review_options[opt.dest] = opt
review_defaults[opt.dest] = opt.default
if gr.defaults.has_key(opt.dest):
del gr.defaults[opt.dest]
opt.default = None
def parse_args(self):
self.prepare_config_review()
self.parser.get_option('--prefix').help = 'installation prefix'
super(OptionsReview, self).parse_args()
Context.create_context('review').refresh_review_set()
class ReviewContext(Context.Context):
'''reviews the configuration values'''
cmd = 'review'
def __init__(self, **kw):
super(self.__class__, self).__init__(**kw)
out = Options.options.out
if not out:
out = getattr(Context.g_module, Context.OUT, None)
if not out:
out = Options.lockfile.replace('.lock-waf', '')
self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath()
"""Path to the build directory"""
self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR)
"""Path to the cache directory"""
self.review_path = os.path.join(self.cache_path, 'review.cache')
"""Path to the review cache file"""
def execute(self):
"""
Display and store the review set. Invalidate the cache as required.
"""
if not self.compare_review_set(old_review_set, new_review_set):
self.invalidate_cache()
self.store_review_set(new_review_set)
print(self.display_review_set(new_review_set))
def invalidate_cache(self):
"""Invalidate the cache to prevent bad builds."""
try:
Logs.warn("Removing the cached configuration since the options have changed")
shutil.rmtree(self.cache_path)
except:
pass
def refresh_review_set(self):
"""
Obtain the old review set and the new review set, and import the new set.
"""
global old_review_set, new_review_set
old_review_set = self.load_review_set()
new_review_set = self.update_review_set(old_review_set)
self.import_review_set(new_review_set)
def load_review_set(self):
"""
Load and return the review set from the cache if it exists.
Otherwise, return an empty set.
"""
if os.path.isfile(self.review_path):
return ConfigSet.ConfigSet(self.review_path)
return ConfigSet.ConfigSet()
def store_review_set(self, review_set):
"""
Store the review set specified in the cache.
"""
if not os.path.isdir(self.cache_path):
os.makedirs(self.cache_path)
review_set.store(self.review_path)
def update_review_set(self, old_set):
"""
Merge the options passed on the command line with those imported
from the previous review set and return the corresponding
preview set.
"""
# Convert value to string. It's important that 'None' maps to
# the empty string.
def val_to_str(val):
if val == None or val == '':
return ''
return str(val)
new_set = ConfigSet.ConfigSet()
opt_dict = Options.options.__dict__
for name in review_options.keys():
# the option is specified explicitly on the command line
if name in opt_dict:
# if the option is the default, pretend it was never specified
if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]):
new_set[name] = opt_dict[name]
# the option was explicitly specified in a previous command
elif name in old_set:
new_set[name] = old_set[name]
return new_set
def import_review_set(self, review_set):
"""
Import the actual value of the reviewable options in the option
dictionary, given the current review set.
"""
for name in review_options.keys():
if name in review_set:
value = review_set[name]
else:
value = review_defaults[name]
setattr(Options.options, name, value)
def compare_review_set(self, set1, set2):
"""
Return true if the review sets specified are equal.
"""
if len(set1.keys()) != len(set2.keys()):
return False
for key in set1.keys():
if not key in set2 or set1[key] != set2[key]:
return False
return True
def display_review_set(self, review_set):
"""
Return the string representing the review set specified.
"""
term_width = Logs.get_term_cols()
lines = []
for dest in review_options.keys():
opt = review_options[dest]
name = ", ".join(opt._short_opts + opt._long_opts)
help = opt.help
actual = None
if dest in review_set:
actual = review_set[dest]
default = review_defaults[dest]
lines.append(self.format_option(name, help, actual, default, term_width))
return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
def format_option(self, name, help, actual, default, term_width):
"""
Return the string representing the option specified.
"""
def val_to_str(val):
if val == None or val == '':
return "(void)"
return str(val)
max_name_len = 20
sep_len = 2
w = textwrap.TextWrapper()
w.width = term_width - 1
if w.width < 60:
w.width = 60
out = ""
# format the help
out += w.fill(help) + "\n"
# format the name
name_len = len(name)
out += Logs.colors.CYAN + name + Logs.colors.NORMAL
# set the indentation used when the value wraps to the next line
w.subsequent_indent = " ".rjust(max_name_len + sep_len)
w.width -= (max_name_len + sep_len)
# the name string is too long, switch to the next line
if name_len > max_name_len:
out += "\n" + w.subsequent_indent
# fill the remaining of the line with spaces
else:
out += " ".rjust(max_name_len + sep_len - name_len)
# format the actual value, if there is one
if actual != None:
out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent
# format the default value
default_fmt = val_to_str(default)
if actual != None:
default_fmt = "default: " + default_fmt
out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL
return out
# Monkey-patch ConfigurationContext.execute() to have it store the review set.
old_configure_execute = Configure.ConfigurationContext.execute
def new_configure_execute(self):
old_configure_execute(self)
Context.create_context('review').store_review_set(new_review_set)
Configure.ConfigurationContext.execute = new_configure_execute
| 9,090 | Python | .py | 268 | 30.794776 | 122 | 0.706218 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,676 | fc_cray.py | projecthamster_hamster/waflib/extras/fc_cray.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
import re
from waflib.Tools import fc, fc_config, fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_cray')
@conf
def find_crayftn(conf):
"""Find the Cray fortran compiler (will look in the environment variable 'FC')"""
fc = conf.find_program(['crayftn'], var='FC')
conf.get_crayftn_version(fc)
conf.env.FC_NAME = 'CRAY'
conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
@conf
def crayftn_flags(conf):
v = conf.env
v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directory
v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings
v['FCFLAGS_fcshlib'] = ['-h pic']
v['LINKFLAGS_fcshlib'] = ['-h shared']
v['FCSTLIB_MARKER'] = '-h static'
v['FCSHLIB_MARKER'] = '-h dynamic'
@conf
def get_crayftn_version(conf, fc):
version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
cmd = fc + ['-V']
out,err = fc_config.getoutput(conf, cmd, stdin=False)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the Cray Fortran compiler version.')
k = match.groupdict()
conf.env['FC_VERSION'] = (k['major'], k['minor'])
def configure(conf):
conf.find_crayftn()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.crayftn_flags()
| 1,446 | Python | .py | 43 | 31.44186 | 102 | 0.693907 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,677 | blender.py | projecthamster_hamster/waflib/extras/blender.py | #!/usr/bin/env python
# encoding: utf-8
# Michal Proszek, 2014 (poxip)
"""
Detect the version of Blender, path
and install the extension:
def options(opt):
opt.load('blender')
def configure(cnf):
cnf.load('blender')
def build(bld):
bld(name='io_mesh_raw',
feature='blender',
files=['file1.py', 'file2.py']
)
If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
Use ./waf configure --system to set the installation directory to system path
"""
import os
import re
from getpass import getuser
from waflib import Utils
from waflib.TaskGen import feature
from waflib.Configure import conf
def options(opt):
opt.add_option(
'-s', '--system',
dest='directory_system',
default=False,
action='store_true',
help='determines installation directory (default: user)'
)
@conf
def find_blender(ctx):
'''Return version number of blender, if not exist return None'''
blender = ctx.find_program('blender')
output = ctx.cmd_and_log(blender + ['--version'])
m = re.search(r'Blender\s*((\d+(\.|))*)', output)
if not m:
ctx.fatal('Could not retrieve blender version')
try:
blender_version = m.group(1)
except IndexError:
ctx.fatal('Could not retrieve blender version')
ctx.env['BLENDER_VERSION'] = blender_version
return blender
@conf
def configure_paths(ctx):
"""Setup blender paths"""
# Get the username
user = getuser()
_platform = Utils.unversioned_sys_platform()
config_path = {'user': '', 'system': ''}
if _platform.startswith('linux'):
config_path['user'] = '/home/%s/.config/blender/' % user
config_path['system'] = '/usr/share/blender/'
elif _platform == 'darwin':
# MAC OS X
config_path['user'] = \
'/Users/%s/Library/Application Support/Blender/' % user
config_path['system'] = '/Library/Application Support/Blender/'
elif Utils.is_win32:
# Windows
appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
config_path['system'] = \
'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
else:
ctx.fatal(
'Unsupported platform. '
'Available platforms: Linux, OSX, MS-Windows.'
)
blender_version = ctx.env['BLENDER_VERSION']
config_path['user'] += blender_version + '/'
config_path['system'] += blender_version + '/'
ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
if ctx.options.directory_system:
ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
)
Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
def configure(ctx):
ctx.find_blender()
ctx.configure_paths()
@feature('blender_list')
def blender(self):
# Two ways to install a blender extension: as a module or just .py files
dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
Utils.check_dir(dest_dir)
self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
| 3,069 | Python | .py | 92 | 31.021739 | 95 | 0.715299 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,678 | bjam.py | projecthamster_hamster/waflib/extras/bjam.py | #! /usr/bin/env python
# per rosengren 2011
from os import sep, readlink
from waflib import Logs
from waflib.TaskGen import feature, after_method
from waflib.Task import Task, always_run
def options(opt):
grp = opt.add_option_group('Bjam Options')
grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
grp.add_option('--bjam_config', default=None)
grp.add_option('--bjam_toolset', default=None)
def configure(cnf):
if not cnf.env.BJAM_SRC:
cnf.env.BJAM_SRC = cnf.options.bjam_src
if not cnf.env.BJAM_UNAME:
cnf.env.BJAM_UNAME = cnf.options.bjam_uname
try:
cnf.find_program('bjam', path_list=[
cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
])
except Exception:
cnf.env.BJAM = None
if not cnf.env.BJAM_CONFIG:
cnf.env.BJAM_CONFIG = cnf.options.bjam_config
if not cnf.env.BJAM_TOOLSET:
cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset
@feature('bjam')
@after_method('process_rule')
def process_bjam(self):
if not self.bld.env.BJAM:
self.create_task('bjam_creator')
self.create_task('bjam_build')
self.create_task('bjam_installer')
if getattr(self, 'always', False):
always_run(bjam_creator)
always_run(bjam_build)
always_run(bjam_installer)
class bjam_creator(Task):
ext_out = 'bjam_exe'
vars=['BJAM_SRC', 'BJAM_UNAME']
def run(self):
env = self.env
gen = self.generator
bjam = gen.bld.root.find_dir(env.BJAM_SRC)
if not bjam:
Logs.error('Can not find bjam source')
return -1
bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
bjam_exe = bjam.find_resource(bjam_exe_relpath)
if bjam_exe:
env.BJAM = bjam_exe.srcpath()
return 0
bjam_cmd = ['./build.sh']
Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
if not result == 0:
Logs.error('bjam failed')
return -1
bjam_exe = bjam.find_resource(bjam_exe_relpath)
if bjam_exe:
env.BJAM = bjam_exe.srcpath()
return 0
Logs.error('bjam failed')
return -1
class bjam_build(Task):
ext_in = 'bjam_exe'
ext_out = 'install'
vars = ['BJAM_TOOLSET']
def run(self):
env = self.env
gen = self.generator
path = gen.path
bld = gen.bld
if hasattr(gen, 'root'):
build_root = path.find_node(gen.root)
else:
build_root = path
jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
if jam:
Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
jam_rel = jam.relpath_gen(build_root)
else:
Logs.warn('No build configuration in build_config/user-config.jam. Using default')
jam_rel = None
bjam_exe = bld.srcnode.find_node(env.BJAM)
if not bjam_exe:
Logs.error('env.BJAM is not set')
return -1
bjam_exe_rel = bjam_exe.relpath_gen(build_root)
cmd = ([bjam_exe_rel] +
(['--user-config=' + jam_rel] if jam_rel else []) +
['--stagedir=' + path.get_bld().path_from(build_root)] +
['--debug-configuration'] +
['--with-' + lib for lib in self.generator.target] +
(['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
['link=' + 'shared'] +
['variant=' + 'release']
)
Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
ret = self.exec_command(cmd, cwd=build_root.srcpath())
if ret != 0:
return ret
self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
return 0
class bjam_installer(Task):
ext_in = 'install'
def run(self):
gen = self.generator
path = gen.path
for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
files = []
for n in path.get_bld().ant_glob(pat):
try:
t = readlink(n.srcpath())
gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
except OSError:
files.append(n)
gen.bld.install_files(idir, files, postpone=False)
return 0
| 3,893 | Python | .py | 120 | 29.475 | 102 | 0.678353 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,679 | remote.py | projecthamster_hamster/waflib/extras/remote.py | #!/usr/bin/env python
# encoding: utf-8
# Remote Builds tool using rsync+ssh
__author__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2013"
"""
Simple Remote Builds
********************
This tool is an *experimental* tool (meaning, do not even try to pollute
the waf bug tracker with bugs in here, contact me directly) providing simple
remote builds.
It uses rsync and ssh to perform the remote builds.
It is intended for performing cross-compilation on platforms where
a cross-compiler is either unavailable (eg. MacOS, QNX) a specific product
does not exist (eg. Windows builds using Visual Studio) or simply not installed.
This tool sends the sources and the waf script to the remote host,
and commands the usual waf execution.
There are alternatives to using this tool, such as setting up shared folders,
logging on to remote machines, and building on the shared folders.
Electing one method or another depends on the size of the program.
Usage
=====
1. Set your wscript file so it includes a list of variants,
e.g.::
from waflib import Utils
top = '.'
out = 'build'
variants = [
'linux_64_debug',
'linux_64_release',
'linux_32_debug',
'linux_32_release',
]
from waflib.extras import remote
def options(opt):
# normal stuff from here on
opt.load('compiler_c')
def configure(conf):
if not conf.variant:
return
# normal stuff from here on
conf.load('compiler_c')
def build(bld):
if not bld.variant:
return
# normal stuff from here on
bld(features='c cprogram', target='app', source='main.c')
2. Build the waf file, so it includes this tool, and put it in the current
directory
.. code:: bash
./waf-light --tools=remote
3. Set the host names to access the hosts:
.. code:: bash
export REMOTE_QNX=user@kiunix
4. Setup the ssh server and ssh keys
The ssh key should not be protected by a password, or it will prompt for it every time.
Create the key on the client:
.. code:: bash
ssh-keygen -t rsa -f foo.rsa
Then copy foo.rsa.pub to the remote machine (user@kiunix:/home/user/.ssh/authorized_keys),
and make sure the permissions are correct (chmod go-w ~ ~/.ssh ~/.ssh/authorized_keys)
A separate key for the build processes can be set in the environment variable WAF_SSH_KEY.
The tool will then use 'ssh-keyscan' to avoid prompting for remote hosts, so
be warned to use this feature on internal networks only (MITM).
.. code:: bash
export WAF_SSH_KEY=~/foo.rsa
5. Perform the build:
.. code:: bash
waf configure_all build_all --remote
"""
import getpass, os, re, sys
from collections import OrderedDict
from waflib import Context, Options, Utils, ConfigSet
from waflib.Build import BuildContext, CleanContext, InstallContext, UninstallContext
from waflib.Configure import ConfigurationContext
is_remote = False
if '--remote' in sys.argv:
is_remote = True
sys.argv.remove('--remote')
class init(Context.Context):
"""
Generates the *_all commands
"""
cmd = 'init'
fun = 'init'
def execute(self):
for x in list(Context.g_module.variants):
self.make_variant(x)
lst = ['remote']
for k in Options.commands:
if k.endswith('_all'):
name = k.replace('_all', '')
for x in Context.g_module.variants:
lst.append('%s_%s' % (name, x))
else:
lst.append(k)
del Options.commands[:]
Options.commands += lst
def make_variant(self, x):
for y in (BuildContext, CleanContext, InstallContext, UninstallContext):
name = y.__name__.replace('Context','').lower()
class tmp(y):
cmd = name + '_' + x
fun = 'build'
variant = x
class tmp(ConfigurationContext):
cmd = 'configure_' + x
fun = 'configure'
variant = x
def __init__(self, **kw):
ConfigurationContext.__init__(self, **kw)
self.setenv(x)
class remote(BuildContext):
cmd = 'remote'
fun = 'build'
def get_ssh_hosts(self):
lst = []
for v in Context.g_module.variants:
self.env.HOST = self.login_to_host(self.variant_to_login(v))
cmd = Utils.subst_vars('${SSH_KEYSCAN} -t rsa,ecdsa ${HOST}', self.env)
out, err = self.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH)
lst.append(out.strip())
return lst
def setup_private_ssh_key(self):
"""
When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory
Make sure that the ssh key does not prompt for a password
"""
key = os.environ.get('WAF_SSH_KEY', '')
if not key:
return
if not os.path.isfile(key):
self.fatal('Key in WAF_SSH_KEY must point to a valid file')
self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh')
self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts')
self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key))
self.ssh_config = os.path.join(self.ssh_dir, 'config')
for x in self.ssh_hosts, self.ssh_key, self.ssh_config:
if not os.path.isfile(x):
if not os.path.isdir(self.ssh_dir):
os.makedirs(self.ssh_dir)
Utils.writef(self.ssh_key, Utils.readf(key), 'wb')
os.chmod(self.ssh_key, 448)
Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts()))
os.chmod(self.ssh_key, 448)
Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb')
os.chmod(self.ssh_config, 448)
self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key]
self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh')
def skip_unbuildable_variant(self):
# skip variants that cannot be built on this OS
for k in Options.commands:
a, _, b = k.partition('_')
if b in Context.g_module.variants:
c, _, _ = b.partition('_')
if c != Utils.unversioned_sys_platform():
Options.commands.remove(k)
def login_to_host(self, login):
return re.sub(r'(\w+@)', '', login)
def variant_to_login(self, variant):
"""linux_32_debug -> search env.LINUX_32 and then env.LINUX"""
x = variant[:variant.rfind('_')]
ret = os.environ.get('REMOTE_' + x.upper(), '')
if not ret:
x = x[:x.find('_')]
ret = os.environ.get('REMOTE_' + x.upper(), '')
if not ret:
ret = '%s@localhost' % getpass.getuser()
return ret
def execute(self):
global is_remote
if not is_remote:
self.skip_unbuildable_variant()
else:
BuildContext.execute(self)
def restore(self):
self.top_dir = os.path.abspath(Context.g_module.top)
self.srcnode = self.root.find_node(self.top_dir)
self.path = self.srcnode
self.out_dir = os.path.join(self.top_dir, Context.g_module.out)
self.bldnode = self.root.make_node(self.out_dir)
self.bldnode.mkdir()
self.env = ConfigSet.ConfigSet()
def extract_groups_of_builds(self):
"""Return a dict mapping each variants to the commands to build"""
self.vgroups = {}
for x in reversed(Options.commands):
_, _, variant = x.partition('_')
if variant in Context.g_module.variants:
try:
dct = self.vgroups[variant]
except KeyError:
dct = self.vgroups[variant] = OrderedDict()
try:
dct[variant].append(x)
except KeyError:
dct[variant] = [x]
Options.commands.remove(x)
def custom_options(self, login):
try:
return Context.g_module.host_options[login]
except (AttributeError, KeyError):
return {}
def recurse(self, *k, **kw):
self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx')
self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh')
self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan')
try:
self.env.WAF = getattr(Context.g_module, 'waf')
except AttributeError:
try:
os.stat('waf')
except KeyError:
self.fatal('Put a waf file in the directory (./waf-light --tools=remote)')
else:
self.env.WAF = './waf'
self.extract_groups_of_builds()
self.setup_private_ssh_key()
for k, v in self.vgroups.items():
task = self(rule=rsync_and_ssh, always=True)
task.env.login = self.variant_to_login(k)
task.env.commands = []
for opt, value in v.items():
task.env.commands += value
task.env.variant = task.env.commands[0].partition('_')[2]
for opt, value in self.custom_options(k):
task.env[opt] = value
self.jobs = len(self.vgroups)
def make_mkdir_command(self, task):
return Utils.subst_vars('${SSH} ${SSH_OPTS} ${login} "rm -fr ${remote_dir} && mkdir -p ${remote_dir}"', task.env)
def make_send_command(self, task):
return Utils.subst_vars('${RSYNC} ${RSYNC_SEND_OPTS} -e "${SSH} ${SSH_OPTS}" ${local_dir} ${login}:${remote_dir}', task.env)
def make_exec_command(self, task):
txt = '''${SSH} ${SSH_OPTS} ${login} "cd ${remote_dir} && ${WAF} ${commands}"'''
return Utils.subst_vars(txt, task.env)
def make_save_command(self, task):
return Utils.subst_vars('${RSYNC} ${RSYNC_SAVE_OPTS} -e "${SSH} ${SSH_OPTS}" ${login}:${remote_dir_variant} ${build_dir}', task.env)
def rsync_and_ssh(task):
# remove a warning
task.uid_ = id(task)
bld = task.generator.bld
task.env.user, _, _ = task.env.login.partition('@')
task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
task.env.local_dir = bld.srcnode.abspath() + '/'
task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
task.env.build_dir = bld.bldnode.abspath()
ret = task.exec_command(bld.make_mkdir_command(task))
if ret:
return ret
ret = task.exec_command(bld.make_send_command(task))
if ret:
return ret
ret = task.exec_command(bld.make_exec_command(task))
if ret:
return ret
ret = task.exec_command(bld.make_save_command(task))
if ret:
return ret
| 9,786 | Python | .py | 259 | 33.926641 | 134 | 0.684611 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,680 | qt4.py | projecthamster_hamster/waflib/extras/qt4.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"""
Tool Description
================
This tool helps with finding Qt4 tools and libraries,
and also provides syntactic sugar for using Qt4 tools.
The following snippet illustrates the tool usage::
def options(opt):
opt.load('compiler_cxx qt4')
def configure(conf):
conf.load('compiler_cxx qt4')
def build(bld):
bld(
features = 'qt4 cxx cxxprogram',
uselib = 'QTCORE QTGUI QTOPENGL QTSVG',
source = 'main.cpp textures.qrc aboutDialog.ui',
target = 'window',
)
Here, the UI description and resource files will be processed
to generate code.
Usage
=====
Load the "qt4" tool.
You also need to edit your sources accordingly:
- the normal way of doing things is to have your C++ files
include the .moc file.
This is regarded as the best practice (and provides much faster
compilations).
It also implies that the include paths have beenset properly.
- to have the include paths added automatically, use the following::
from waflib.TaskGen import feature, before_method, after_method
@feature('cxx')
@after_method('process_source')
@before_method('apply_incpaths')
def add_includes_paths(self):
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
self.includes = sorted(incs)
Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.
A few options (--qt{dir,bin,...}) and environment variables
(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
tool path selection, etc; please read the source for more info.
"""
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True
import os, sys
from waflib.Tools import cxx
from waflib import Task, Utils, Options, Errors, Context
from waflib.TaskGen import feature, after_method, extension
from waflib.Configure import conf
from waflib import Logs
MOC_H = ['.h', '.hpp', '.hxx', '.hh']
"""
File extensions associated to the .moc files
"""
EXT_RCC = ['.qrc']
"""
File extension for the resource (.qrc) files
"""
EXT_UI = ['.ui']
"""
File extension for the user interface (.ui) files
"""
EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
"""
File extensions of C++ files that may require a .moc processing
"""
QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"
class qxx(Task.classes['cxx']):
"""
Each C++ file can have zero or several .moc files to create.
They are known only when the files are scanned (preprocessor)
To avoid scanning the c++ files each time (parsing C/C++), the results
are retrieved from the task cache (bld.node_deps/bld.raw_deps).
The moc tasks are also created *dynamically* during the build.
"""
def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0
def runnable_status(self):
"""
Compute the task signature to make sure the scanner was executed. Create the
moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
then postpone the task execution (there is no need to recompute the task signature).
"""
if self.moc_done:
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
self.add_moc_tasks()
return Task.Task.runnable_status(self)
def create_moc_task(self, h_node, m_node):
"""
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
It is not possible to change the file names, but we can assume that the moc transformation will be identical,
and the moc tasks can be shared in a global cache.
The defines passed to moc will then depend on task generator order. If this is not acceptable, then
use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
"""
try:
moc_cache = self.generator.bld.moc_cache
except AttributeError:
moc_cache = self.generator.bld.moc_cache = {}
try:
return moc_cache[h_node]
except KeyError:
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)
if self.generator:
self.generator.tasks.append(tsk)
# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
gen.outstanding.append(tsk)
gen.total += 1
return tsk
def moc_h_ext(self):
ext = []
try:
ext = Options.options.qt_header_ext.split()
except AttributeError:
pass
if not ext:
ext = MOC_H
return ext
def add_moc_tasks(self):
"""
Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
"""
node = self.inputs[0]
bld = self.generator.bld
try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')
include_nodes = [node.parent] + self.generator.includes_nodes
moctasks = []
mocfiles = set()
for d in bld.raw_deps.get(self.uid(), []):
if not d.endswith('.moc'):
continue
# process that base.moc only once
if d in mocfiles:
continue
mocfiles.add(d)
# find the source associated with the moc file
h_node = None
base2 = d[:-4]
for x in include_nodes:
for e in self.moc_h_ext():
h_node = x.find_node(base2 + e)
if h_node:
break
if h_node:
m_node = h_node.change_ext('.moc')
break
else:
# foo.cpp -> foo.cpp.moc
for k in EXT_QT4:
if base2.endswith(k):
for x in include_nodes:
h_node = x.find_node(base2)
if h_node:
break
if h_node:
m_node = h_node.change_ext(k + '.moc')
break
if not h_node:
raise Errors.WafError('No source found for %r which is a moc file' % d)
# create the moc task
task = self.create_moc_task(h_node, m_node)
moctasks.append(task)
# simple scheduler dependency: run the moc task before others
self.run_after.update(set(moctasks))
self.moc_done = 1
class trans_update(Task.Task):
"""Update a .ts files from a list of C++ files"""
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
color = 'BLUE'
class XMLHandler(ContentHandler):
"""
Parser for *.qrc* files
"""
def __init__(self):
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(str(''.join(self.buf)))
def characters(self, cars):
self.buf.append(cars)
@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Create rcc and cxx tasks for *.qrc* files"
rcnode = node.change_ext('_rc.cpp')
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
self.compiled_tasks.append(cpptask)
except AttributeError:
self.compiled_tasks = [cpptask]
return cpptask
@extension(*EXT_UI)
def create_uic_task(self, node):
"hook for uic tasks"
uictask = self.create_task('ui4', node)
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
@extension('.ts')
def add_lang(self, node):
"""add all the .ts file into self.lang"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
@feature('qt4')
@after_method('apply_link')
def apply_qt4(self):
"""
Add MOC_FLAGS which may be necessary for moc::
def build(bld):
bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')
The additional parameters are:
:param lang: list of translation files (\\*.ts) to process
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
:param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**)
:type update: bool
:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
"""
if getattr(self, 'lang', None):
qmtasks = []
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
if getattr(self, 'update', None) and Options.options.trans_qt4:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)
if getattr(self, 'langname', None):
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + '.qrc')
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst = []
for flag in self.to_list(self.env['CXXFLAGS']):
if len(flag) < 2:
continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
lst.append('-' + flag[1:])
else:
lst.append(flag)
self.env.append_value('MOC_FLAGS', lst)
@extension(*EXT_QT4)
def cxx_hook(self, node):
"""
Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
"""
return self.create_compiled_task('qxx', node)
class rcc(Task.Task):
"""
Process *.qrc* files
"""
color = 'BLUE'
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
ext_out = ['.h']
def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]
def scan(self):
"""Parse the *.qrc* files"""
if not has_xml:
Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
return ([], [])
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
fi = open(self.inputs[0].abspath(), 'r')
try:
parser.parse(fi)
finally:
fi.close()
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd:
nodes.append(nd)
else:
names.append(x)
return (nodes, names)
class moc(Task.Task):
"""
Create *.moc* files
"""
color = 'BLUE'
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
def keyword(self):
return "Creating"
def __str__(self):
return self.outputs[0].path_from(self.generator.bld.launch_node())
class ui4(Task.Task):
"""
Process *.ui* files
"""
color = 'BLUE'
run_str = '${QT_UIC} ${SRC} -o ${TGT}'
ext_out = ['.h']
class ts2qm(Task.Task):
"""
Create *.qm* files from *.ts* files
"""
color = 'BLUE'
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
class qm2rcc(Task.Task):
"""
Transform *.qm* files into *.rc* files
"""
color = 'BLUE'
after = 'ts2qm'
def run(self):
"""Create a qrc file including the inputs"""
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
self.outputs[0].write(code)
def configure(self):
"""
Besides the configuration options, the environment variable QT4_ROOT may be used
to give the location of the qt4 libraries (absolute path).
The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
"""
self.find_qt4_binaries()
self.set_qt4_libs_to_check()
self.set_qt4_defines()
self.find_qt4_libraries()
self.add_qt4_rpath()
self.simplify_qt4_libs()
@conf
def find_qt4_binaries(self):
env = self.env
opt = Options.options
qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')
paths = []
if qtdir:
qtbin = os.path.join(qtdir, 'bin')
# the qt directory has been given from QT4_ROOT - deduce the qt binary path
if not qtdir:
qtdir = os.environ.get('QT4_ROOT', '')
qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin')
if qtbin:
paths = [qtbin]
# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = os.environ.get('PATH', '').split(os.pathsep)
paths.append('/usr/share/qt4/bin/')
try:
lst = Utils.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()
# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)
# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['4', '0', '0']
for qmk in ('qmake-qt4', 'qmake4', 'qmake'):
try:
qmake = self.find_program(qmk, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
try:
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver > prev_ver:
cand = qmake
prev_ver = new_ver
if cand:
self.env.QMAKE = cand
else:
self.fatal('Could not find qmake for qt4')
qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep
def find_bin(lst, var):
if var in env:
return
for f in lst:
try:
ret = self.find_program(f, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
env[var]=ret
break
find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
find_bin(['uic-qt4', 'uic'], 'QT_UIC')
if not env.QT_UIC:
self.fatal('cannot find the uic compiler for qt4')
self.start_msg('Checking for uic version')
uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH)
uicver = ''.join(uicver).strip()
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1:
self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
find_bin(['moc-qt4', 'moc'], 'QT_MOC')
find_bin(['rcc-qt4', 'rcc'], 'QT_RCC')
find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
env['UIC3_ST']= '%s -o %s'
env['UIC_ST'] = '%s -o %s'
env['MOC_ST'] = '-o'
env['ui_PATTERN'] = 'ui_%s.h'
env['QT_LRELEASE_FLAGS'] = ['-silent']
env.MOCCPPPATH_ST = '-I%s'
env.MOCDEFINES_ST = '-D%s'
@conf
def find_qt4_libraries(self):
qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR")
if not qtlibs:
try:
qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
except Errors.WafError:
qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
qtlibs = os.path.join(qtdir, 'lib')
self.msg('Found the Qt4 libraries in', qtlibs)
qtincludes = os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
env = self.env
if not 'PKG_CONFIG_PATH' in os.environ:
os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
try:
if os.environ.get("QT4_XCOMPILE"):
raise self.errors.ConfigurationError()
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
for i in self.qt4_vars:
uselib = i.upper()
if Utils.unversioned_sys_platform() == "darwin":
# Since at least qt 4.7.3 each library locates in separate directory
frameworkName = i + ".framework"
qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
if os.path.exists(qtDynamicLib):
env.append_unique('FRAMEWORK_' + uselib, i)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
elif env.DEST_OS != "win32":
qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
if os.path.exists(qtDynamicLib):
env.append_unique('LIB_' + uselib, i)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
elif os.path.exists(qtStaticLib):
env.append_unique('LIB_' + uselib, i)
self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
else:
# Release library names are like QtCore4
for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
lib = os.path.join(qtlibs, k % i)
if os.path.exists(lib):
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
self.msg('Checking for %s' % i, lib, 'GREEN')
break
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
# Debug library names are like QtCore4d
uselib = i.upper() + "_debug"
for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
lib = os.path.join(qtlibs, k % i)
if os.path.exists(lib):
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
self.msg('Checking for %s' % i, lib, 'GREEN')
break
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
else:
for i in self.qt4_vars_debug + self.qt4_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False)
@conf
def simplify_qt4_libs(self):
# the libpaths make really long command-lines
# remove the qtcore ones from qtgui, etc
env = self.env
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE':
continue
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core:
continue
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(self.qt4_vars, 'LIBPATH_QTCORE')
process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
@conf
def add_qt4_rpath(self):
# rpath if wanted
env = self.env
if getattr(Options.options, 'want_rpath', False):
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_'+var] = accu
process_rpath(self.qt4_vars, 'LIBPATH_QTCORE')
process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
@conf
def set_qt4_libs_to_check(self):
if not hasattr(self, 'qt4_vars'):
self.qt4_vars = QT4_LIBS
self.qt4_vars = Utils.to_list(self.qt4_vars)
if not hasattr(self, 'qt4_vars_debug'):
self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)
@conf
def set_qt4_defines(self):
if sys.platform != 'win32':
return
for x in self.qt4_vars:
y = x[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
def options(opt):
"""
Command-line options
"""
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
opt.add_option('--header-ext',
type='string',
default='',
help='header extension for moc files',
dest='qt_header_ext')
for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
| 20,306 | Python | .py | 593 | 30.897133 | 154 | 0.674009 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,681 | pyqt5.py | projecthamster_hamster/waflib/extras/pyqt5.py | #!/usr/bin/env python
# encoding: utf-8
# Federico Pellegrin, 2016-2022 (fedepell) adapted for Python
"""
This tool helps with finding Python Qt5 tools and libraries,
and provides translation from QT5 files to Python code.
The following snippet illustrates the tool usage::
def options(opt):
opt.load('py pyqt5')
def configure(conf):
conf.load('py pyqt5')
def build(bld):
bld(
features = 'py pyqt5',
source = 'main.py textures.qrc aboutDialog.ui',
)
Here, the UI description and resource files will be processed
to generate code.
Usage
=====
Load the "pyqt5" tool.
Add into the sources list also the qrc resources files or ui5
definition files and they will be translated into python code
with the system tools (PyQt5, PySide2, PyQt4 are searched in this
order) and then compiled
"""
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True
import os
from waflib.Tools import python
from waflib import Task, Options
from waflib.TaskGen import feature, extension
from waflib.Configure import conf
from waflib import Logs
EXT_RCC = ['.qrc']
"""
File extension for the resource (.qrc) files
"""
EXT_UI = ['.ui']
"""
File extension for the user interface (.ui) files
"""
class XMLHandler(ContentHandler):
"""
Parses ``.qrc`` files
"""
def __init__(self):
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(str(''.join(self.buf)))
def characters(self, cars):
self.buf.append(cars)
@extension(*EXT_RCC)
def create_pyrcc_task(self, node):
"Creates rcc and py task for ``.qrc`` files"
rcnode = node.change_ext('.py')
self.create_task('pyrcc', node, rcnode)
if getattr(self, 'install_from', None):
self.install_from = self.install_from.get_bld()
else:
self.install_from = self.path.get_bld()
self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
self.process_py(rcnode)
@extension(*EXT_UI)
def create_pyuic_task(self, node):
"Create uic tasks and py for user interface ``.ui`` definition files"
uinode = node.change_ext('.py')
self.create_task('ui5py', node, uinode)
if getattr(self, 'install_from', None):
self.install_from = self.install_from.get_bld()
else:
self.install_from = self.path.get_bld()
self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
self.process_py(uinode)
@extension('.ts')
def add_pylang(self, node):
"""Adds all the .ts file into ``self.lang``"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
@feature('pyqt5')
def apply_pyqt5(self):
"""
The additional parameters are:
:param lang: list of translation files (\\*.ts) to process
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
"""
if getattr(self, 'lang', None):
qmtasks = []
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
if getattr(self, 'langname', None):
qmnodes = [k.outputs[0] for k in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + '.qrc')
t = self.create_task('qm2rcc', qmnodes, rcnode)
create_pyrcc_task(self, t.outputs[0])
class pyrcc(Task.Task):
"""
Processes ``.qrc`` files
"""
color = 'BLUE'
run_str = '${QT_PYRCC} ${QT_PYRCC_FLAGS} ${SRC} -o ${TGT}'
ext_out = ['.py']
def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]
def scan(self):
"""Parse the *.qrc* files"""
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
return ([], [])
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
fi = open(self.inputs[0].abspath(), 'r')
try:
parser.parse(fi)
finally:
fi.close()
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd:
nodes.append(nd)
else:
names.append(x)
return (nodes, names)
class ui5py(Task.Task):
"""
Processes ``.ui`` files for python
"""
color = 'BLUE'
run_str = '${QT_PYUIC} ${QT_PYUIC_FLAGS} ${SRC} -o ${TGT}'
ext_out = ['.py']
class ts2qm(Task.Task):
"""
Generates ``.qm`` files from ``.ts`` files
"""
color = 'BLUE'
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
class qm2rcc(Task.Task):
"""
Generates ``.qrc`` files from ``.qm`` files
"""
color = 'BLUE'
after = 'ts2qm'
def run(self):
"""Create a qrc file including the inputs"""
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
self.outputs[0].write(code)
def configure(self):
self.find_pyqt5_binaries()
# warn about this during the configuration too
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
@conf
def find_pyqt5_binaries(self):
"""
Detects PyQt5 or PySide2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
"""
env = self.env
if getattr(Options.options, 'want_pyqt5', True):
self.find_program(['pyuic5'], var='QT_PYUIC')
self.find_program(['pyrcc5'], var='QT_PYRCC')
self.find_program(['pylupdate5'], var='QT_PYLUPDATE')
elif getattr(Options.options, 'want_pyside2', True):
self.find_program(['pyside2-uic','uic-qt5'], var='QT_PYUIC')
self.find_program(['pyside2-rcc','rcc-qt5'], var='QT_PYRCC')
self.find_program(['pyside2-lupdate','lupdate-qt5'], var='QT_PYLUPDATE')
elif getattr(Options.options, 'want_pyqt4', True):
self.find_program(['pyuic4'], var='QT_PYUIC')
self.find_program(['pyrcc4'], var='QT_PYRCC')
self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
else:
self.find_program(['pyuic5','pyside2-uic','pyuic4','uic-qt5'], var='QT_PYUIC')
self.find_program(['pyrcc5','pyside2-rcc','pyrcc4','rcc-qt5'], var='QT_PYRCC')
self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4','lupdate-qt5'], var='QT_PYLUPDATE')
if not env.QT_PYUIC:
self.fatal('cannot find the uic compiler for python for qt5')
if not env.QT_PYRCC:
self.fatal('cannot find the rcc compiler for python for qt5')
self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
def options(opt):
"""
Command-line options
"""
pyqt5opt=opt.add_option_group("Python QT5 Options")
pyqt5opt.add_option('--pyqt5-pyqt5', action='store_true', default=False, dest='want_pyqt5', help='use PyQt5 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use PySide2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
| 7,343 | Python | .py | 207 | 33.062802 | 209 | 0.704946 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,682 | parallel_debug.py | projecthamster_hamster/waflib/extras/parallel_debug.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007-2010 (ita)
"""
Debugging helper for parallel compilation.
Copy it to your project and load it with::
def options(opt):
opt.load('parallel_debug', tooldir='.')
def build(bld):
...
The build will then output a file named pdebug.svg in the source directory.
"""
import re, sys, threading, time, traceback
try:
from Queue import Queue
except:
from queue import Queue
from waflib import Runner, Options, Task, Logs, Errors
SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0"
x="${project.x}" y="${project.y}" width="${project.width}" height="${project.height}" id="svg602" xml:space="preserve">
<style type='text/css' media='screen'>
g.over rect { stroke:#FF0000; fill-opacity:0.4 }
</style>
<script type='text/javascript'><![CDATA[
var svg = document.getElementsByTagName('svg')[0];
svg.addEventListener('mouseover', function(e) {
var g = e.target.parentNode;
var x = document.getElementById('r_' + g.id);
if (x) {
g.setAttribute('class', g.getAttribute('class') + ' over');
x.setAttribute('class', x.getAttribute('class') + ' over');
showInfo(e, g.id, e.target.attributes.tooltip.value);
}
}, false);
svg.addEventListener('mouseout', function(e) {
var g = e.target.parentNode;
var x = document.getElementById('r_' + g.id);
if (x) {
g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
hideInfo(e);
}
}, false);
function showInfo(evt, txt, details) {
${if project.tooltip}
tooltip = document.getElementById('tooltip');
var t = document.getElementById('tooltiptext');
t.firstChild.data = txt + " " + details;
var x = evt.clientX + 9;
if (x > 250) { x -= t.getComputedTextLength() + 16; }
var y = evt.clientY + 20;
tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
tooltip.setAttributeNS(null, "visibility", "visible");
var r = document.getElementById('tooltiprect');
r.setAttribute('width', t.getComputedTextLength() + 6);
${endif}
}
function hideInfo(evt) {
var tooltip = document.getElementById('tooltip');
tooltip.setAttributeNS(null,"visibility","hidden");
}
]]></script>
<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
<rect
x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"></rect>
${if project.title}
<text x="${project.title_x}" y="${project.title_y}"
style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">${project.title}</text>
${endif}
${for cls in project.groups}
<g id='${cls.classname}'>
${for rect in cls.rects}
<rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' tooltip='${rect.name}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
${endfor}
</g>
${endfor}
${for info in project.infos}
<g id='r_${info.classname}'>
<rect x='${info.x}' y='${info.y}' width='${info.width}' height='${info.height}' style="font-size:10;fill:${info.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
<text x="${info.text_x}" y="${info.text_y}"
style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
>${info.text}</text>
</g>
${endfor}
${if project.tooltip}
<g transform="translate(0,0)" visibility="hidden" id="tooltip">
<rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
<text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
</g>
${endif}
</svg>
"""
COMPILE_TEMPLATE = '''def f(project):
lst = []
def xml_escape(value):
return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">")
%s
return ''.join(lst)
'''
reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
def compile_template(line):
extr = []
def repl(match):
g = match.group
if g('dollar'):
return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
extr.append(g('code'))
return "<<|@|>>"
return None
line2 = reg_act.sub(repl, line)
params = line2.split('<<|@|>>')
assert(extr)
indent = 0
buf = []
app = buf.append
def app(txt):
buf.append(indent * '\t' + txt)
for x in range(len(extr)):
if params[x]:
app("lst.append(%r)" % params[x])
f = extr[x]
if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
elif f.startswith(('endif', 'endfor')):
indent -= 1
elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
elif f.startswith('xml:'):
app('lst.append(xml_escape(%s))' % f[4:])
else:
#app('lst.append((%s) or "cannot find %s")' % (f, f))
app('lst.append(str(%s))' % f)
if extr:
if params[-1]:
app("lst.append(%r)" % params[-1])
fun = COMPILE_TEMPLATE % "\n\t".join(buf)
# uncomment the following to debug the template
#for i, x in enumerate(fun.splitlines()):
# print i, x
return Task.funex(fun)
# red #ff4d4d
# green #4da74d
# lila #a751ff
color2code = {
'GREEN' : '#4da74d',
'YELLOW' : '#fefe44',
'PINK' : '#a751ff',
'RED' : '#cc1d1d',
'BLUE' : '#6687bb',
'CYAN' : '#34e2e2',
}
mp = {}
info = [] # list of (text,color)
def map_to_color(name):
if name in mp:
return mp[name]
try:
cls = Task.classes[name]
except KeyError:
return color2code['RED']
if cls.color in mp:
return mp[cls.color]
if cls.color in color2code:
return color2code[cls.color]
return color2code['RED']
def process(self):
m = self.generator.bld.producer
try:
# TODO another place for this?
del self.generator.bld.task_sigs[self.uid()]
except KeyError:
pass
self.generator.bld.producer.set_running(1, self)
try:
ret = self.run()
except Exception:
self.err_msg = traceback.format_exc()
self.hasrun = Task.EXCEPTION
# TODO cleanup
m.error_handler(self)
return
if ret:
self.err_code = ret
self.hasrun = Task.CRASHED
else:
try:
self.post_run()
except Errors.WafError:
pass
except Exception:
self.err_msg = traceback.format_exc()
self.hasrun = Task.EXCEPTION
else:
self.hasrun = Task.SUCCESS
if self.hasrun != Task.SUCCESS:
m.error_handler(self)
self.generator.bld.producer.set_running(-1, self)
Task.Task.process_back = Task.Task.process
Task.Task.process = process
old_start = Runner.Parallel.start
def do_start(self):
try:
Options.options.dband
except AttributeError:
self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')
self.taskinfo = Queue()
old_start(self)
if self.dirty:
make_picture(self)
Runner.Parallel.start = do_start
lock_running = threading.Lock()
def set_running(self, by, tsk):
with lock_running:
try:
cache = self.lock_cache
except AttributeError:
cache = self.lock_cache = {}
i = 0
if by > 0:
vals = cache.values()
for i in range(self.numjobs):
if i not in vals:
cache[tsk] = i
break
else:
i = cache[tsk]
del cache[tsk]
self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs))) )
Runner.Parallel.set_running = set_running
def name2class(name):
return name.replace(' ', '_').replace('.', '_')
def make_picture(producer):
# first, cast the parameters
if not hasattr(producer.bld, 'path'):
return
tmp = []
try:
while True:
tup = producer.taskinfo.get(False)
tmp.append(list(tup))
except:
pass
try:
ini = float(tmp[0][2])
except:
return
if not info:
seen = []
for x in tmp:
name = x[3]
if not name in seen:
seen.append(name)
else:
continue
info.append((name, map_to_color(name)))
info.sort(key=lambda x: x[0])
thread_count = 0
acc = []
for x in tmp:
thread_count += x[6]
acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7]))
data_node = producer.bld.path.make_node('pdebug.dat')
data_node.write('\n'.join(acc))
tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
st = {}
for l in tmp:
if not l[0] in st:
st[l[0]] = len(st.keys())
tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
THREAD_AMOUNT = len(st.keys())
st = {}
for l in tmp:
if not l[1] in st:
st[l[1]] = len(st.keys())
tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
BAND = Options.options.dband
seen = {}
acc = []
for x in range(len(tmp)):
line = tmp[x]
id = line[1]
if id in seen:
continue
seen[id] = True
begin = line[2]
thread_id = line[0]
for y in range(x + 1, len(tmp)):
line = tmp[y]
if line[1] == id:
end = line[2]
#print id, thread_id, begin, end
#acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) )
break
if Options.options.dmaxtime < 0.1:
gwidth = 1
for x in tmp:
m = BAND * x[2]
if m > gwidth:
gwidth = m
else:
gwidth = BAND * Options.options.dmaxtime
ratio = float(Options.options.dwidth) / gwidth
gwidth = Options.options.dwidth
gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
# simple data model for our template
class tobject(object):
pass
model = tobject()
model.x = 0
model.y = 0
model.width = gwidth + 4
model.height = gheight + 4
model.tooltip = not Options.options.dnotooltip
model.title = Options.options.dtitle
model.title_x = gwidth / 2
model.title_y = gheight + - 5
groups = {}
for (x, y, w, h, clsname, name) in acc:
try:
groups[clsname].append((x, y, w, h, name))
except:
groups[clsname] = [(x, y, w, h, name)]
# groups of rectangles (else js highlighting is slow)
model.groups = []
for cls in groups:
g = tobject()
model.groups.append(g)
g.classname = name2class(cls)
g.rects = []
for (x, y, w, h, name) in groups[cls]:
r = tobject()
g.rects.append(r)
r.x = 2 + x * ratio
r.y = 2 + y
r.width = w * ratio
r.height = h
r.name = name
r.color = map_to_color(cls)
cnt = THREAD_AMOUNT
# caption
model.infos = []
for (text, color) in info:
inf = tobject()
model.infos.append(inf)
inf.classname = name2class(text)
inf.x = 2 + BAND
inf.y = 5 + (cnt + 0.5) * BAND
inf.width = BAND/2
inf.height = BAND/2
inf.color = color
inf.text = text
inf.text_x = 2 + 2 * BAND
inf.text_y = 5 + (cnt + 0.5) * BAND + 10
cnt += 1
# write the file...
template1 = compile_template(SVG_TEMPLATE)
txt = template1(model)
node = producer.bld.path.make_node('pdebug.svg')
node.write(txt)
Logs.warn('Created the diagram %r', node)
def options(opt):
opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
help='title for the svg diagram', dest='dtitle')
opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip')
| 12,067 | Python | .py | 380 | 28.955263 | 256 | 0.659802 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,683 | win32_opts.py | projecthamster_hamster/waflib/extras/win32_opts.py | #! /usr/bin/env python
# encoding: utf-8
"""
Windows-specific optimizations
This module can help reducing the overhead of listing files on windows
(more than 10000 files). Python 3.5 already provides the listdir
optimization though.
"""
import os
from waflib import Utils, Build, Node, Logs
try:
TP = '%s\\*'.decode('ascii')
except AttributeError:
TP = '%s\\*'
if Utils.is_win32:
from waflib.Tools import md5_tstamp
import ctypes, ctypes.wintypes
FindFirstFile = ctypes.windll.kernel32.FindFirstFileW
FindNextFile = ctypes.windll.kernel32.FindNextFileW
FindClose = ctypes.windll.kernel32.FindClose
FILE_ATTRIBUTE_DIRECTORY = 0x10
INVALID_HANDLE_VALUE = -1
UPPER_FOLDERS = ('.', '..')
try:
UPPER_FOLDERS = [unicode(x) for x in UPPER_FOLDERS]
except NameError:
pass
def cached_hash_file(self):
try:
cache = self.ctx.cache_listdir_cache_hash_file
except AttributeError:
cache = self.ctx.cache_listdir_cache_hash_file = {}
if id(self.parent) in cache:
try:
t = cache[id(self.parent)][self.name]
except KeyError:
raise IOError('Not a file')
else:
# an opportunity to list the files and the timestamps at once
findData = ctypes.wintypes.WIN32_FIND_DATAW()
find = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData))
if find == INVALID_HANDLE_VALUE:
cache[id(self.parent)] = {}
raise IOError('Not a file')
cache[id(self.parent)] = lst_files = {}
try:
while True:
if findData.cFileName not in UPPER_FOLDERS:
thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
if not thatsadir:
ts = findData.ftLastWriteTime
d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime
lst_files[str(findData.cFileName)] = d
if not FindNextFile(find, ctypes.byref(findData)):
break
except Exception:
cache[id(self.parent)] = {}
raise IOError('Not a file')
finally:
FindClose(find)
t = lst_files[self.name]
fname = self.abspath()
if fname in Build.hashes_md5_tstamp:
if Build.hashes_md5_tstamp[fname][0] == t:
return Build.hashes_md5_tstamp[fname][1]
try:
fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
except OSError:
raise IOError('Cannot read from %r' % fname)
f = os.fdopen(fd, 'rb')
m = Utils.md5()
rb = 1
try:
while rb:
rb = f.read(200000)
m.update(rb)
finally:
f.close()
# ensure that the cache is overwritten
Build.hashes_md5_tstamp[fname] = (t, m.digest())
return m.digest()
Node.Node.cached_hash_file = cached_hash_file
def get_bld_sig_win32(self):
try:
return self.ctx.hash_cache[id(self)]
except KeyError:
pass
except AttributeError:
self.ctx.hash_cache = {}
self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath())
return ret
Node.Node.get_bld_sig = get_bld_sig_win32
def isfile_cached(self):
# optimize for nt.stat calls, assuming there are many files for few folders
try:
cache = self.__class__.cache_isfile_cache
except AttributeError:
cache = self.__class__.cache_isfile_cache = {}
try:
c1 = cache[id(self.parent)]
except KeyError:
c1 = cache[id(self.parent)] = []
curpath = self.parent.abspath()
findData = ctypes.wintypes.WIN32_FIND_DATAW()
find = FindFirstFile(TP % curpath, ctypes.byref(findData))
if find == INVALID_HANDLE_VALUE:
Logs.error("invalid win32 handle isfile_cached %r", self.abspath())
return os.path.isfile(self.abspath())
try:
while True:
if findData.cFileName not in UPPER_FOLDERS:
thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
if not thatsadir:
c1.append(str(findData.cFileName))
if not FindNextFile(find, ctypes.byref(findData)):
break
except Exception as e:
Logs.error('exception while listing a folder %r %r', self.abspath(), e)
return os.path.isfile(self.abspath())
finally:
FindClose(find)
return self.name in c1
Node.Node.isfile_cached = isfile_cached
def find_or_declare_win32(self, lst):
# assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
if isinstance(lst, str):
lst = [x for x in Utils.split_path(lst) if x and x != '.']
node = self.get_bld().search_node(lst)
if node:
if not node.isfile_cached():
try:
node.parent.mkdir()
except OSError:
pass
return node
self = self.get_src()
node = self.find_node(lst)
if node:
if not node.isfile_cached():
try:
node.parent.mkdir()
except OSError:
pass
return node
node = self.get_bld().make_node(lst)
node.parent.mkdir()
return node
Node.Node.find_or_declare = find_or_declare_win32
| 4,708 | Python | .py | 148 | 27.97973 | 105 | 0.697003 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,684 | wix.py | projecthamster_hamster/waflib/extras/wix.py | #!/usr/bin/python
# encoding: utf-8
# vim: tabstop=4 noexpandtab
"""
Windows Installer XML Tool (WiX)
.wxs --- candle ---> .wxobj --- light ---> .msi
bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..])
bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..])
"""
import os, copy
from waflib import TaskGen
from waflib import Task
from waflib.Utils import winreg
class candle(Task.Task):
run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}',
class light(Task.Task):
run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}"
@TaskGen.feature('wix')
@TaskGen.before_method('process_source')
def wix(self):
#X.wxs -> ${SRC} for CANDLE
#X.wxobj -> ${SRC} for LIGHT
#X.dll -> -ext X in ${LIGHTFLAGS}
#X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS}
wxobj = []
wxs = []
exts = []
wxl = []
rest = []
for x in self.source:
if x.endswith('.wxobj'):
wxobj.append(x)
elif x.endswith('.wxs'):
wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj'))
wxs.append(x)
elif x.endswith('.dll'):
exts.append(x[:-4])
elif '.' not in x:
exts.append(x)
elif x.endswith('.wxl'):
wxl.append(x)
else:
rest.append(x)
self.source = self.to_nodes(rest) #.wxs
cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj))
lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen))
cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[]))
lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[]))
for x in wxl:
lght.env.append_value('LIGHTFLAGS','wixui.wixlib')
lght.env.append_value('LIGHTFLAGS','-loc')
lght.env.append_value('LIGHTFLAGS',x)
for x in exts:
cndl.env.append_value('CANDLEFLAGS','-ext')
cndl.env.append_value('CANDLEFLAGS',x)
lght.env.append_value('LIGHTFLAGS','-ext')
lght.env.append_value('LIGHTFLAGS',x)
#wix_bin_path()
def wix_bin_path():
basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders"
query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey)
cnt=winreg.QueryInfoKey(query)[0]
thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK'
for i in range(cnt-1,-1,-1):
thiskey = winreg.EnumKey(query,i)
if 'WiX' in thiskey:
break
winreg.CloseKey(query)
return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin')
def configure(ctx):
path_list=[wix_bin_path()]
ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list)
ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list)
| 2,659 | Python | .py | 73 | 34.09589 | 105 | 0.692068 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,685 | c_bgxlc.py | projecthamster_hamster/waflib/extras/c_bgxlc.py | #! /usr/bin/env python
# encoding: utf-8
# harald at klimachs.de
"""
IBM XL Compiler for Blue Gene
"""
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
from waflib.Tools import xlc # method xlc_common_flags
from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'].append('c_bgxlc')
@conf
def find_bgxlc(conf):
cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC')
conf.get_xlc_version(cc)
conf.env.CC = cc
conf.env.CC_NAME = 'bgxlc'
def configure(conf):
conf.find_bgxlc()
conf.find_ar()
conf.xlc_common_flags()
conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull']
conf.env.LINKFLAGS_cprogram = []
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| 706 | Python | .py | 26 | 25.461538 | 54 | 0.735905 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,686 | valadoc.py | projecthamster_hamster/waflib/extras/valadoc.py | #! /usr/bin/env python
# encoding: UTF-8
# Nicolas Joseph 2009
"""
ported from waf 1.5:
TODO: tabs vs spaces
"""
from waflib import Task, Utils, Errors, Logs
from waflib.TaskGen import feature
VALADOC_STR = '${VALADOC}'
class valadoc(Task.Task):
vars = ['VALADOC', 'VALADOCFLAGS']
color = 'BLUE'
after = ['cprogram', 'cstlib', 'cshlib', 'cxxprogram', 'cxxstlib', 'cxxshlib']
quiet = True # no outputs .. this is weird
def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.output_dir = ''
self.doclet = ''
self.package_name = ''
self.package_version = ''
self.files = []
self.vapi_dirs = []
self.protected = True
self.private = False
self.inherit = False
self.deps = False
self.vala_defines = []
self.vala_target_glib = None
self.enable_non_null_experimental = False
self.force = False
def run(self):
if not self.env['VALADOCFLAGS']:
self.env['VALADOCFLAGS'] = ''
cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
cmd.append ('-o %s' % self.output_dir)
if getattr(self, 'doclet', None):
cmd.append ('--doclet %s' % self.doclet)
cmd.append ('--package-name %s' % self.package_name)
if getattr(self, 'package_version', None):
cmd.append ('--package-version %s' % self.package_version)
if getattr(self, 'packages', None):
for package in self.packages:
cmd.append ('--pkg %s' % package)
if getattr(self, 'vapi_dirs', None):
for vapi_dir in self.vapi_dirs:
cmd.append ('--vapidir %s' % vapi_dir)
if not getattr(self, 'protected', None):
cmd.append ('--no-protected')
if getattr(self, 'private', None):
cmd.append ('--private')
if getattr(self, 'inherit', None):
cmd.append ('--inherit')
if getattr(self, 'deps', None):
cmd.append ('--deps')
if getattr(self, 'vala_defines', None):
for define in self.vala_defines:
cmd.append ('--define %s' % define)
if getattr(self, 'vala_target_glib', None):
cmd.append ('--target-glib=%s' % self.vala_target_glib)
if getattr(self, 'enable_non_null_experimental', None):
cmd.append ('--enable-non-null-experimental')
if getattr(self, 'force', None):
cmd.append ('--force')
cmd.append (' '.join ([x.abspath() for x in self.files]))
return self.generator.bld.exec_command(' '.join(cmd))
@feature('valadoc')
def process_valadoc(self):
"""
Generate API documentation from Vala source code with valadoc
doc = bld(
features = 'valadoc',
output_dir = '../doc/html',
package_name = 'vala-gtk-example',
package_version = '1.0.0',
packages = 'gtk+-2.0',
vapi_dirs = '../vapi',
force = True
)
path = bld.path.find_dir ('../src')
doc.files = path.ant_glob (incl='**/*.vala')
"""
task = self.create_task('valadoc')
if getattr(self, 'output_dir', None):
task.output_dir = self.path.find_or_declare(self.output_dir).abspath()
else:
Errors.WafError('no output directory')
if getattr(self, 'doclet', None):
task.doclet = self.doclet
else:
Errors.WafError('no doclet directory')
if getattr(self, 'package_name', None):
task.package_name = self.package_name
else:
Errors.WafError('no package name')
if getattr(self, 'package_version', None):
task.package_version = self.package_version
if getattr(self, 'packages', None):
task.packages = Utils.to_list(self.packages)
if getattr(self, 'vapi_dirs', None):
vapi_dirs = Utils.to_list(self.vapi_dirs)
for vapi_dir in vapi_dirs:
try:
task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
except AttributeError:
Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
if getattr(self, 'files', None):
task.files = self.files
else:
Errors.WafError('no input file')
if getattr(self, 'protected', None):
task.protected = self.protected
if getattr(self, 'private', None):
task.private = self.private
if getattr(self, 'inherit', None):
task.inherit = self.inherit
if getattr(self, 'deps', None):
task.deps = self.deps
if getattr(self, 'vala_defines', None):
task.vala_defines = Utils.to_list(self.vala_defines)
if getattr(self, 'vala_target_glib', None):
task.vala_target_glib = self.vala_target_glib
if getattr(self, 'enable_non_null_experimental', None):
task.enable_non_null_experimental = self.enable_non_null_experimental
if getattr(self, 'force', None):
task.force = self.force
def configure(conf):
conf.find_program('valadoc', errmsg='You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
| 4,445 | Python | .py | 128 | 31.921875 | 131 | 0.69036 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,687 | codelite.py | projecthamster_hamster/waflib/extras/codelite.py | #! /usr/bin/env python
# encoding: utf-8
# CodeLite Project
# Christian Klein (chrikle@berlios.de)
# Created: Jan 2012
# As templete for this file I used the msvs.py
# I hope this template will work proper
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
"""
To add this tool to your project:
def options(conf):
opt.load('codelite')
It can be a good idea to add the sync_exec tool too.
To generate solution files:
$ waf configure codelite
To customize the outputs, provide subclasses in your wscript files:
from waflib.extras import codelite
class vsnode_target(codelite.vsnode_target):
def get_build_command(self, props):
# likely to be required
return "waf.bat build"
def collect_source(self):
# likely to be required
...
class codelite_bar(codelite.codelite_generator):
def init(self):
codelite.codelite_generator.init(self)
self.vsnode_target = vsnode_target
The codelite class re-uses the same build() function for reading the targets (task generators),
you may therefore specify codelite settings on the context object:
def build(bld):
bld.codelite_solution_name = 'foo.workspace'
bld.waf_command = 'waf.bat'
bld.projects_dir = bld.srcnode.make_node('')
bld.projects_dir.mkdir()
ASSUMPTIONS:
* a project can be either a directory or a target, project files are written only for targets that have source files
* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
"""
import os, re, sys
import uuid # requires python 2.5
from waflib.Build import BuildContext
from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
<CodeLite_Project Name="${project.name}" InternalType="Library">
<Plugins>
<Plugin Name="qmake">
<![CDATA[00010001N0005Release000000000000]]>
</Plugin>
</Plugins>
<Description/>
<Dependencies/>
<VirtualDirectory Name="src">
${for x in project.source}
${if (project.get_key(x)=="sourcefile")}
<File Name="${x.abspath()}"/>
${endif}
${endfor}
</VirtualDirectory>
<VirtualDirectory Name="include">
${for x in project.source}
${if (project.get_key(x)=="headerfile")}
<File Name="${x.abspath()}"/>
${endif}
${endfor}
</VirtualDirectory>
<Settings Type="Dynamic Library">
<GlobalSettings>
<Compiler Options="" C_Options="">
<IncludePath Value="."/>
</Compiler>
<Linker Options="">
<LibraryPath Value="."/>
</Linker>
<ResourceCompiler Options=""/>
</GlobalSettings>
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
<IncludePath Value="."/>
<IncludePath Value="."/>
</Compiler>
<Linker Options="" Required="yes">
<LibraryPath Value=""/>
</Linker>
<ResourceCompiler Options="" Required="no"/>
<General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
<Environment EnvVarSetName="<Use Defaults>" DbgSetName="<Use Defaults>">
<![CDATA[]]>
</Environment>
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
<PostConnectCommands/>
<StartupCommands/>
</Releaseger>
<PreBuild/>
<PostBuild/>
<CustomBuild Enabled="yes">
$b = project.build_properties[0]}
<RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
<CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
<BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
<Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
<Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
<Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
<Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
<Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
<Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
<PreprocessFileCommand/>
<SingleFileCommand/>
<MakefileGenerationCommand/>
<ThirdPartyToolName>None</ThirdPartyToolName>
<WorkingDirectory/>
</CustomBuild>
<AdditionalRules>
<CustomPostBuild/>
<CustomPreBuild/>
</AdditionalRules>
<Completion>
<ClangCmpFlags/>
<ClangPP/>
<SearchPaths/>
</Completion>
</Configuration>
<Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
<Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
<IncludePath Value="."/>
</Compiler>
<Linker Options="" Required="yes"/>
<ResourceCompiler Options="" Required="no"/>
<General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
<Environment EnvVarSetName="<Use Defaults>" DbgSetName="<Use Defaults>">
<![CDATA[
]]>
</Environment>
<Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
<PostConnectCommands/>
<StartupCommands/>
</Releaseger>
<PreBuild/>
<PostBuild/>
<CustomBuild Enabled="no">
<RebuildCommand/>
<CleanCommand/>
<BuildCommand/>
<PreprocessFileCommand/>
<SingleFileCommand/>
<MakefileGenerationCommand/>
<ThirdPartyToolName/>
<WorkingDirectory/>
</CustomBuild>
<AdditionalRules>
<CustomPostBuild/>
<CustomPreBuild/>
</AdditionalRules>
<Completion>
<ClangCmpFlags/>
<ClangPP/>
<SearchPaths/>
</Completion>
</Configuration>
</Settings>
</CodeLite_Project>'''
SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
${for p in project.all_projects}
<Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
${endfor}
<BuildMatrix>
<WorkspaceConfiguration Name="Release" Selected="yes">
${for p in project.all_projects}
<Project Name="${p.name}" ConfigName="Release"/>
${endfor}
</WorkspaceConfiguration>
</BuildMatrix>
</CodeLite_Workspace>'''
COMPILE_TEMPLATE = '''def f(project):
lst = []
def xml_escape(value):
return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">")
%s
#f = open('cmd.txt', 'w')
#f.write(str(lst))
#f.close()
return ''.join(lst)
'''
reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
def compile_template(line):
"""
Compile a template expression into a python function (like jsps, but way shorter)
"""
extr = []
def repl(match):
g = match.group
if g('dollar'):
return "$"
elif g('backslash'):
return "\\"
elif g('subst'):
extr.append(g('code'))
return "<<|@|>>"
return None
line2 = reg_act.sub(repl, line)
params = line2.split('<<|@|>>')
assert(extr)
indent = 0
buf = []
app = buf.append
def app(txt):
buf.append(indent * '\t' + txt)
for x in range(len(extr)):
if params[x]:
app("lst.append(%r)" % params[x])
f = extr[x]
if f.startswith(('if', 'for')):
app(f + ':')
indent += 1
elif f.startswith('py:'):
app(f[3:])
elif f.startswith(('endif', 'endfor')):
indent -= 1
elif f.startswith(('else', 'elif')):
indent -= 1
app(f + ':')
indent += 1
elif f.startswith('xml:'):
app('lst.append(xml_escape(%s))' % f[4:])
else:
#app('lst.append((%s) or "cannot find %s")' % (f, f))
app('lst.append(%s)' % f)
if extr:
if params[-1]:
app("lst.append(%r)" % params[-1])
fun = COMPILE_TEMPLATE % "\n\t".join(buf)
#print(fun)
return Task.funex(fun)
re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
def rm_blank_lines(txt):
txt = re_blank.sub('\r\n', txt)
return txt
BOM = '\xef\xbb\xbf'
try:
BOM = bytes(BOM, 'latin-1') # python 3
except (TypeError, NameError):
pass
def stealth_write(self, data, flags='wb'):
try:
unicode
except NameError:
data = data.encode('utf-8') # python 3
else:
data = data.decode(sys.getfilesystemencoding(), 'replace')
data = data.encode('utf-8')
if self.name.endswith('.project'):
data = BOM + data
try:
txt = self.read(flags='rb')
if txt != data:
raise ValueError('must write')
except (IOError, ValueError):
self.write(data, flags=flags)
else:
Logs.debug('codelite: skipping %r', self)
Node.Node.stealth_write = stealth_write
re_quote = re.compile("[^a-zA-Z0-9-]")
def quote(s):
return re_quote.sub("_", s)
def xml_escape(value):
return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">")
def make_uuid(v, prefix = None):
"""
simple utility function
"""
if isinstance(v, dict):
keys = list(v.keys())
keys.sort()
tmp = str([(k, v[k]) for k in keys])
else:
tmp = str(v)
d = Utils.md5(tmp.encode()).hexdigest().upper()
if prefix:
d = '%s%s' % (prefix, d[8:])
gid = uuid.UUID(d, version = 4)
return str(gid).upper()
def diff(node, fromnode):
# difference between two nodes, but with "(..)" instead of ".."
c1 = node
c2 = fromnode
c1h = c1.height()
c2h = c2.height()
lst = []
up = 0
while c1h > c2h:
lst.append(c1.name)
c1 = c1.parent
c1h -= 1
while c2h > c1h:
up += 1
c2 = c2.parent
c2h -= 1
while id(c1) != id(c2):
lst.append(c1.name)
up += 1
c1 = c1.parent
c2 = c2.parent
for i in range(up):
lst.append('(..)')
lst.reverse()
return tuple(lst)
class build_property(object):
pass
class vsnode(object):
"""
Abstract class representing visual studio elements
We assume that all visual studio nodes have a uuid and a parent
"""
def __init__(self, ctx):
self.ctx = ctx # codelite context
self.name = '' # string, mandatory
self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
self.uuid = '' # string, mandatory
self.parent = None # parent node for visual studio nesting
def get_waf(self):
"""
Override in subclasses...
"""
return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))
def ptype(self):
"""
Return a special uuid for projects written in the solution file
"""
pass
def write(self):
"""
Write the project file, by default, do nothing
"""
pass
def make_uuid(self, val):
"""
Alias for creating uuid values easily (the templates cannot access global variables)
"""
return make_uuid(val)
class vsnode_vsdir(vsnode):
"""
Nodes representing visual studio folders (which do not match the filesystem tree!)
"""
VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
def __init__(self, ctx, uuid, name, vspath=''):
vsnode.__init__(self, ctx)
self.title = self.name = name
self.uuid = uuid
self.vspath = vspath or name
def ptype(self):
return self.VS_GUID_SOLUTIONFOLDER
class vsnode_project(vsnode):
"""
Abstract class representing visual studio project elements
A project is assumed to be writable, and has a node representing the file to write to
"""
VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
def ptype(self):
return self.VS_GUID_VCPROJ
def __init__(self, ctx, node):
vsnode.__init__(self, ctx)
self.path = node
self.uuid = make_uuid(node.abspath())
self.name = node.name
self.title = self.path.abspath()
self.source = [] # list of node objects
self.build_properties = [] # list of properties (nmake commands, output dir, etc)
def dirs(self):
"""
Get the list of parent folders of the source files (header files included)
for writing the filters
"""
lst = []
def add(x):
if x.height() > self.tg.path.height() and x not in lst:
lst.append(x)
add(x.parent)
for x in self.source:
add(x.parent)
return lst
def write(self):
Logs.debug('codelite: creating %r', self.path)
#print "self.name:",self.name
# first write the project file
template1 = compile_template(PROJECT_TEMPLATE)
proj_str = template1(self)
proj_str = rm_blank_lines(proj_str)
self.path.stealth_write(proj_str)
# then write the filter
#template2 = compile_template(FILTER_TEMPLATE)
#filter_str = template2(self)
#filter_str = rm_blank_lines(filter_str)
#tmp = self.path.parent.make_node(self.path.name + '.filters')
#tmp.stealth_write(filter_str)
def get_key(self, node):
"""
required for writing the source files
"""
name = node.name
if name.endswith(('.cpp', '.c')):
return 'sourcefile'
return 'headerfile'
def collect_properties(self):
"""
Returns a list of triplet (configuration, platform, output_directory)
"""
ret = []
for c in self.ctx.configurations:
for p in self.ctx.platforms:
x = build_property()
x.outdir = ''
x.configuration = c
x.platform = p
x.preprocessor_definitions = ''
x.includes_search_path = ''
# can specify "deploy_dir" too
ret.append(x)
self.build_properties = ret
def get_build_params(self, props):
opt = ''
return (self.get_waf(), opt)
def get_build_command(self, props):
return "%s build %s" % self.get_build_params(props)
def get_clean_command(self, props):
return "%s clean %s" % self.get_build_params(props)
def get_rebuild_command(self, props):
return "%s clean build %s" % self.get_build_params(props)
def get_install_command(self, props):
return "%s install %s" % self.get_build_params(props)
def get_build_and_install_command(self, props):
return "%s build install %s" % self.get_build_params(props)
def get_build_and_install_all_command(self, props):
return "%s build install" % self.get_build_params(props)[0]
def get_clean_all_command(self, props):
return "%s clean" % self.get_build_params(props)[0]
def get_build_all_command(self, props):
return "%s build" % self.get_build_params(props)[0]
def get_rebuild_all_command(self, props):
return "%s clean build" % self.get_build_params(props)[0]
def get_filter_name(self, node):
lst = diff(node, self.tg.path)
return '\\'.join(lst) or '.'
class vsnode_alias(vsnode_project):
def __init__(self, ctx, node, name):
vsnode_project.__init__(self, ctx, node)
self.name = name
self.output_file = ''
class vsnode_build_all(vsnode_alias):
"""
Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
This is the only alias enabled by default
"""
def __init__(self, ctx, node, name='build_all_projects'):
vsnode_alias.__init__(self, ctx, node, name)
self.is_active = True
class vsnode_install_all(vsnode_alias):
"""
Fake target used to emulate the behaviour of "make install"
"""
def __init__(self, ctx, node, name='install_all_projects'):
vsnode_alias.__init__(self, ctx, node, name)
def get_build_command(self, props):
return "%s build install %s" % self.get_build_params(props)
def get_clean_command(self, props):
return "%s clean %s" % self.get_build_params(props)
def get_rebuild_command(self, props):
return "%s clean build install %s" % self.get_build_params(props)
class vsnode_project_view(vsnode_alias):
"""
Fake target used to emulate a file system view
"""
def __init__(self, ctx, node, name='project_view'):
vsnode_alias.__init__(self, ctx, node, name)
self.tg = self.ctx() # fake one, cannot remove
self.exclude_files = Node.exclude_regs + '''
waf-2*
waf3-2*/**
.waf-2*
.waf3-2*/**
**/*.sdf
**/*.suo
**/*.ncb
**/%s
''' % Options.lockfile
def collect_source(self):
# this is likely to be slow
self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
def get_build_command(self, props):
params = self.get_build_params(props) + (self.ctx.cmd,)
return "%s %s %s" % params
def get_clean_command(self, props):
return ""
def get_rebuild_command(self, props):
return self.get_build_command(props)
class vsnode_target(vsnode_project):
"""
CodeLite project representing a targets (programs, libraries, etc) and bound
to a task generator
"""
def __init__(self, ctx, tg):
"""
A project is more or less equivalent to a file/folder
"""
base = getattr(ctx, 'projects_dir', None) or tg.path
node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
vsnode_project.__init__(self, ctx, node)
self.name = quote(tg.name)
self.tg = tg # task generator
def get_build_params(self, props):
"""
Override the default to add the target name
"""
opt = ''
if getattr(self, 'tg', None):
opt += " --targets=%s" % self.tg.name
return (self.get_waf(), opt)
def collect_source(self):
tg = self.tg
source_files = tg.to_nodes(getattr(tg, 'source', []))
include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
include_files = []
for x in include_dirs:
if isinstance(x, str):
x = tg.path.find_node(x)
if x:
lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
include_files.extend(lst)
# remove duplicates
self.source.extend(list(set(source_files + include_files)))
self.source.sort(key=lambda x: x.abspath())
def collect_properties(self):
"""
CodeLite projects are associated with platforms and configurations (for building especially)
"""
super(vsnode_target, self).collect_properties()
for x in self.build_properties:
x.outdir = self.path.parent.abspath()
x.preprocessor_definitions = ''
x.includes_search_path = ''
try:
tsk = self.tg.link_task
except AttributeError:
pass
else:
x.output_file = tsk.outputs[0].abspath()
x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
class codelite_generator(BuildContext):
'''generates a CodeLite workspace'''
cmd = 'codelite'
fun = 'build'
def init(self):
"""
Some data that needs to be present
"""
if not getattr(self, 'configurations', None):
self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
if not getattr(self, 'platforms', None):
self.platforms = ['Win32']
if not getattr(self, 'all_projects', None):
self.all_projects = []
if not getattr(self, 'project_extension', None):
self.project_extension = '.project'
if not getattr(self, 'projects_dir', None):
self.projects_dir = self.srcnode.make_node('')
self.projects_dir.mkdir()
# bind the classes to the object, so that subclass can provide custom generators
if not getattr(self, 'vsnode_vsdir', None):
self.vsnode_vsdir = vsnode_vsdir
if not getattr(self, 'vsnode_target', None):
self.vsnode_target = vsnode_target
if not getattr(self, 'vsnode_build_all', None):
self.vsnode_build_all = vsnode_build_all
if not getattr(self, 'vsnode_install_all', None):
self.vsnode_install_all = vsnode_install_all
if not getattr(self, 'vsnode_project_view', None):
self.vsnode_project_view = vsnode_project_view
self.numver = '11.00'
self.vsver = '2010'
def execute(self):
"""
Entry point
"""
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])
# user initialization
self.init()
# two phases for creating the solution
self.collect_projects() # add project objects into "self.all_projects"
self.write_files() # write the corresponding project and solution files
def collect_projects(self):
"""
Fill the list self.all_projects with project objects
Fill the list of build targets
"""
self.collect_targets()
#self.add_aliases()
#self.collect_dirs()
default_project = getattr(self, 'default_project', None)
def sortfun(x):
if x.name == default_project:
return ''
return getattr(x, 'path', None) and x.path.abspath() or x.name
self.all_projects.sort(key=sortfun)
def write_files(self):
"""
Write the project and solution files from the data collected
so far. It is unlikely that you will want to change this
"""
for p in self.all_projects:
p.write()
# and finally write the solution file
node = self.get_solution_node()
node.parent.mkdir()
Logs.warn('Creating %r', node)
#a = dir(self.root)
#for b in a:
# print b
#print self.group_names
#print "Hallo2: ",self.root.listdir()
#print getattr(self, 'codelite_solution_name', None)
template1 = compile_template(SOLUTION_TEMPLATE)
sln_str = template1(self)
sln_str = rm_blank_lines(sln_str)
node.stealth_write(sln_str)
def get_solution_node(self):
"""
The solution filename is required when writing the .vcproj files
return self.solution_node and if it does not exist, make one
"""
try:
return self.solution_node
except:
pass
codelite_solution_name = getattr(self, 'codelite_solution_name', None)
if not codelite_solution_name:
codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
setattr(self, 'codelite_solution_name', codelite_solution_name)
if os.path.isabs(codelite_solution_name):
self.solution_node = self.root.make_node(codelite_solution_name)
else:
self.solution_node = self.srcnode.make_node(codelite_solution_name)
return self.solution_node
def project_configurations(self):
"""
Helper that returns all the pairs (config,platform)
"""
ret = []
for c in self.configurations:
for p in self.platforms:
ret.append((c, p))
return ret
def collect_targets(self):
"""
Process the list of task generators
"""
for g in self.groups:
for tg in g:
if not isinstance(tg, TaskGen.task_gen):
continue
if not hasattr(tg, 'codelite_includes'):
tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
tg.post()
if not getattr(tg, 'link_task', None):
continue
p = self.vsnode_target(self, tg)
p.collect_source() # delegate this processing
p.collect_properties()
self.all_projects.append(p)
def add_aliases(self):
"""
Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
We also add an alias for "make install" (disabled by default)
"""
base = getattr(self, 'projects_dir', None) or self.tg.path
node_project = base.make_node('build_all_projects' + self.project_extension) # Node
p_build = self.vsnode_build_all(self, node_project)
p_build.collect_properties()
self.all_projects.append(p_build)
node_project = base.make_node('install_all_projects' + self.project_extension) # Node
p_install = self.vsnode_install_all(self, node_project)
p_install.collect_properties()
self.all_projects.append(p_install)
node_project = base.make_node('project_view' + self.project_extension) # Node
p_view = self.vsnode_project_view(self, node_project)
p_view.collect_source()
p_view.collect_properties()
self.all_projects.append(p_view)
n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
p_build.parent = p_install.parent = p_view.parent = n
self.all_projects.append(n)
def collect_dirs(self):
"""
Create the folder structure in the CodeLite project view
"""
seen = {}
def make_parents(proj):
# look at a project, try to make a parent
if getattr(proj, 'parent', None):
# aliases already have parents
return
x = proj.iter_path
if x in seen:
proj.parent = seen[x]
return
# There is not vsnode_vsdir for x.
# So create a project representing the folder "x"
n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
n.iter_path = x.parent
self.all_projects.append(n)
# recurse up to the project directory
if x.height() > self.srcnode.height() + 1:
make_parents(n)
for p in self.all_projects[:]: # iterate over a copy of all projects
if not getattr(p, 'tg', None):
# but only projects that have a task generator
continue
# make a folder for each task generator
p.iter_path = p.tg.path
make_parents(p)
| 33,888 | Python | .py | 746 | 31.686327 | 224 | 0.536419 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,688 | unity.py | projecthamster_hamster/waflib/extras/unity.py | #! /usr/bin/env python
# encoding: utf-8
"""
Compile whole groups of C/C++ files at once
(C and C++ files are processed independently though).
To enable globally::
def options(opt):
opt.load('compiler_cxx')
def build(bld):
bld.load('compiler_cxx unity')
To enable for specific task generators only::
def build(bld):
bld(features='c cprogram unity', source='main.c', ...)
The file order is often significant in such builds, so it can be
necessary to adjust the order of source files and the batch sizes.
To control the amount of files processed in a batch per target
(the default is 50)::
def build(bld):
bld(features='c cprogram', unity_size=20)
"""
from waflib import Task, Options
from waflib.Tools import c_preproc
from waflib import TaskGen
MAX_BATCH = 50
EXTS_C = ('.c',)
EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++')
def options(opt):
global MAX_BATCH
opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH,
help='default unity batch size (0 disables unity builds)')
@TaskGen.taskgen_method
def batch_size(self):
default = getattr(Options.options, 'batchsize', MAX_BATCH)
if default < 1:
return 0
return getattr(self, 'unity_size', default)
class unity(Task.Task):
color = 'BLUE'
scan = c_preproc.scan
def to_include(self, node):
ret = node.path_from(self.outputs[0].parent)
ret = ret.replace('\\', '\\\\').replace('"', '\\"')
return ret
def run(self):
lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs]
txt = ''.join(lst)
self.outputs[0].write(txt)
def __str__(self):
node = self.outputs[0]
return node.path_from(node.ctx.launch_node())
def bind_unity(obj, cls_name, exts):
if not 'mappings' in obj.__dict__:
obj.mappings = dict(obj.mappings)
for j in exts:
fun = obj.mappings[j]
if fun.__name__ == 'unity_fun':
raise ValueError('Attempt to bind unity mappings multiple times %r' % j)
def unity_fun(self, node):
cnt = self.batch_size()
if cnt <= 1:
return fun(self, node)
x = getattr(self, 'master_%s' % cls_name, None)
if not x or len(x.inputs) >= cnt:
x = self.create_task('unity')
setattr(self, 'master_%s' % cls_name, x)
cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0)
c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name))
x.outputs = [c_node]
setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1)
fun(self, c_node)
x.inputs.append(node)
obj.mappings[j] = unity_fun
@TaskGen.feature('unity')
@TaskGen.before('process_source')
def single_unity(self):
lst = self.to_list(self.features)
if 'c' in lst:
bind_unity(self, 'c', EXTS_C)
if 'cxx' in lst:
bind_unity(self, 'cxx', EXTS_CXX)
def build(bld):
if bld.env.CC_NAME:
bind_unity(TaskGen.task_gen, 'c', EXTS_C)
if bld.env.CXX_NAME:
bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX)
| 2,872 | Python | .py | 85 | 31.141176 | 98 | 0.684153 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,689 | c_emscripten.py | projecthamster_hamster/waflib/extras/c_emscripten.py | #!/usr/bin/env python
# -*- coding: utf-8 vi:ts=4:noexpandtab
import subprocess, shlex, sys
from waflib.Tools import ccroot, gcc, gxx
from waflib.Configure import conf
from waflib.TaskGen import after_method, feature
from waflib.Tools.compiler_c import c_compiler
from waflib.Tools.compiler_cxx import cxx_compiler
for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
c_compiler[supported_os].append('c_emscripten')
cxx_compiler[supported_os].append('c_emscripten')
@conf
def get_emscripten_version(conf, cc):
"""
Emscripten doesn't support processing '-' like clang/gcc
"""
dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
dummy.write("")
cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
env = conf.env.env or None
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
out = p.communicate()[0]
except Exception as e:
conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'latin-1')
k = {}
out = out.splitlines()
for line in out:
lst = shlex.split(line)
if len(lst)>2:
key = lst[1]
val = lst[2]
k[key] = val
if not ('__clang__' in k and 'EMSCRIPTEN' in k):
conf.fatal('Could not determine the emscripten compiler version.')
conf.env.DEST_OS = 'generic'
conf.env.DEST_BINFMT = 'elf'
conf.env.DEST_CPU = 'asm-js'
conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
return k
@conf
def find_emscripten(conf):
cc = conf.find_program(['emcc'], var='CC')
conf.get_emscripten_version(cc)
conf.env.CC = cc
conf.env.CC_NAME = 'emscripten'
cxx = conf.find_program(['em++'], var='CXX')
conf.env.CXX = cxx
conf.env.CXX_NAME = 'emscripten'
conf.find_program(['emar'], var='AR')
def configure(conf):
conf.find_emscripten()
conf.find_ar()
conf.gcc_common_flags()
conf.gxx_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
conf.env.ARFLAGS = ['rcs']
conf.env.cshlib_PATTERN = '%s.js'
conf.env.cxxshlib_PATTERN = '%s.js'
conf.env.cstlib_PATTERN = '%s.a'
conf.env.cxxstlib_PATTERN = '%s.a'
conf.env.cprogram_PATTERN = '%s.html'
conf.env.cxxprogram_PATTERN = '%s.html'
conf.env.CXX_TGT_F = ['-c', '-o', '']
conf.env.CC_TGT_F = ['-c', '-o', '']
conf.env.CXXLNK_TGT_F = ['-o', '']
conf.env.CCLNK_TGT_F = ['-o', '']
conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
| 2,528 | Python | .py | 74 | 32 | 94 | 0.677181 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,690 | waf_xattr.py | projecthamster_hamster/waflib/extras/waf_xattr.py | #! /usr/bin/env python
# encoding: utf-8
"""
Use extended attributes instead of database files
1. Input files will be made writable
2. This is only for systems providing extended filesystem attributes
3. By default, hashes are calculated only if timestamp/size change (HASH_CACHE below)
4. The module enables "deep_inputs" on all tasks by propagating task signatures
5. This module also skips task signature comparisons for task code changes due to point 4.
6. This module is for Python3/Linux only, but it could be extended to Python2/other systems
using the xattr library
7. For projects in which tasks always declare output files, it should be possible to
store the rest of build context attributes on output files (imp_sigs, raw_deps and node_deps)
but this is not done here
On a simple C++ project benchmark, the variations before and after adding waf_xattr.py were observed:
total build time: 20s -> 22s
no-op build time: 2.4s -> 1.8s
pickle file size: 2.9MB -> 2.6MB
"""
import os
from waflib import Logs, Node, Task, Utils, Errors
from waflib.Task import SKIP_ME, RUN_ME, CANCEL_ME, ASK_LATER, SKIPPED, MISSING
HASH_CACHE = True
SIG_VAR = 'user.waf.sig'
SEP = ','.encode()
TEMPLATE = '%b%d,%d'.encode()
try:
PermissionError
except NameError:
PermissionError = IOError
def getxattr(self):
return os.getxattr(self.abspath(), SIG_VAR)
def setxattr(self, val):
os.setxattr(self.abspath(), SIG_VAR, val)
def h_file(self):
try:
ret = getxattr(self)
except OSError:
if HASH_CACHE:
st = os.stat(self.abspath())
mtime = st.st_mtime
size = st.st_size
else:
if len(ret) == 16:
# for build directory files
return ret
if HASH_CACHE:
# check if timestamp and mtime match to avoid re-hashing
st = os.stat(self.abspath())
mtime, size = ret[16:].split(SEP)
if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size):
return ret[:16]
ret = Utils.h_file(self.abspath())
if HASH_CACHE:
val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size))
try:
setxattr(self, val)
except PermissionError:
os.chmod(self.abspath(), st.st_mode | 128)
setxattr(self, val)
return ret
def runnable_status(self):
bld = self.generator.bld
if bld.is_install < 0:
return SKIP_ME
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
elif t.hasrun < SKIPPED:
# a dependency has an error
return CANCEL_ME
# first compute the signature
try:
new_sig = self.signature()
except Errors.TaskNotReady:
return ASK_LATER
if not self.outputs:
# compare the signature to a signature computed previously
# this part is only for tasks with no output files
key = self.uid()
try:
prev_sig = bld.task_sigs[key]
except KeyError:
Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
return RUN_ME
if new_sig != prev_sig:
Logs.debug('task: task %r must run: the task signature changed', self)
return RUN_ME
# compare the signatures of the outputs to make a decision
for node in self.outputs:
try:
sig = node.h_file()
except EnvironmentError:
Logs.debug('task: task %r must run: an output node does not exist', self)
return RUN_ME
if sig != new_sig:
Logs.debug('task: task %r must run: an output node is stale', self)
return RUN_ME
return (self.always_run and RUN_ME) or SKIP_ME
def post_run(self):
bld = self.generator.bld
sig = self.signature()
for node in self.outputs:
if not node.exists():
self.hasrun = MISSING
self.err_msg = '-> missing file: %r' % node.abspath()
raise Errors.WafError(self.err_msg)
os.setxattr(node.abspath(), 'user.waf.sig', sig)
if not self.outputs:
# only for task with no outputs
bld.task_sigs[self.uid()] = sig
if not self.keep_last_cmd:
try:
del self.last_cmd
except AttributeError:
pass
try:
os.getxattr
except AttributeError:
pass
else:
h_file.__doc__ = Node.Node.h_file.__doc__
# keep file hashes as file attributes
Node.Node.h_file = h_file
# enable "deep_inputs" on all tasks
Task.Task.runnable_status = runnable_status
Task.Task.post_run = post_run
Task.Task.sig_deep_inputs = Utils.nada
| 4,144 | Python | .py | 128 | 29.710938 | 101 | 0.729344 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,691 | ticgt.py | projecthamster_hamster/waflib/extras/ticgt.py | #!/usr/bin/env python
# encoding: utf-8
# Texas Instruments code generator support (experimental)
# When reporting issues, please directly assign the bug to the maintainer.
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2012"
"""
TI cgt6x is a compiler suite for TI DSPs.
The toolchain does pretty weird things, and I'm sure I'm missing some of them.
But still, the tool saves time.
What this tool does is:
- create a TI compiler environment
- create TI compiler features, to handle some specifics about this compiler
It has a few idiosyncracies, such as not giving the liberty of the .o file names
- automatically activate them when using the TI compiler
- handle the tconf tool
The tool
TODO:
- the set_platform_flags() function is not nice
- more tests
- broaden tool scope, if needed
"""
import os, re
from waflib import Options, Utils, Task, TaskGen
from waflib.Tools import c, ccroot, c_preproc
from waflib.Configure import conf
from waflib.TaskGen import feature, before_method
from waflib.Tools.c import cprogram
opj = os.path.join
@conf
def find_ticc(conf):
conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
conf.env.CC_NAME = 'ticc'
@conf
def find_tild(conf):
conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
conf.env.LINK_CC_NAME = 'tild'
@conf
def find_tiar(conf):
conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
conf.env.AR_NAME = 'tiar'
conf.env.ARFLAGS = 'qru'
@conf
def ticc_common_flags(conf):
v = conf.env
if not v['LINK_CC']:
v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = []
v['CCLNK_TGT_F'] = ['-o']
v['CPPPATH_ST'] = '-I%s'
v['DEFINES_ST'] = '-d%s'
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-i%s' # template for adding libpaths
v['STLIB_ST'] = '-l=%s.lib'
v['STLIBPATH_ST'] = '-i%s'
# program
v['cprogram_PATTERN'] = '%s.out'
# static lib
#v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
v['cstlib_PATTERN'] = '%s.lib'
def configure(conf):
v = conf.env
v.TI_CGT_DIR = getattr(Options.options, 'ti-cgt-dir', "")
v.TI_DSPLINK_DIR = getattr(Options.options, 'ti-dsplink-dir', "")
v.TI_BIOSUTILS_DIR = getattr(Options.options, 'ti-biosutils-dir', "")
v.TI_DSPBIOS_DIR = getattr(Options.options, 'ti-dspbios-dir', "")
v.TI_XDCTOOLS_DIR = getattr(Options.options, 'ti-xdctools-dir', "")
conf.find_ticc()
conf.find_tiar()
conf.find_tild()
conf.ticc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR)
conf.env.TCONF_INCLUDES += [
opj(conf.env.TI_DSPBIOS_DIR, 'packages'),
]
conf.env.INCLUDES += [
opj(conf.env.TI_CGT_DIR, 'include'),
]
conf.env.LIBPATH += [
opj(conf.env.TI_CGT_DIR, "lib"),
]
conf.env.INCLUDES_DSPBIOS += [
opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'),
]
conf.env.LIBPATH_DSPBIOS += [
opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'),
]
conf.env.INCLUDES_DSPLINK += [
opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'),
]
@conf
def ti_set_debug(cfg, debug=1):
"""
Sets debug flags for the compiler.
TODO:
- for each TI CFLAG/INCLUDES/LINKFLAGS/LIBPATH replace RELEASE by DEBUG
- -g --no_compress
"""
if debug:
cfg.env.CFLAGS += "-d_DEBUG -dDEBUG -dDDSP_DEBUG".split()
@conf
def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board):
"""
Sets the INCLUDES, LINKFLAGS for DSPLINK and TCONF_INCLUDES
For the specific hardware.
Assumes that DSPLINK was built in its own folder.
:param splat: short platform name (eg. OMAPL138)
:param dsp: DSP name (eg. 674X)
:param dspbios_ver: string identifying DspBios version (eg. 5.XX)
:param board: board name (eg. OMAPL138GEM)
"""
d1 = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver)
d = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver, board)
cfg.env.TCONF_INCLUDES += [d1, d]
cfg.env.INCLUDES_DSPLINK += [
opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp),
d,
]
cfg.env.LINKFLAGS_DSPLINK += [
opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x)
for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio')
]
def options(opt):
opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
class ti_cprogram(cprogram):
"""
Link object files into a c program
Changes:
- the linked executable to have a relative path (because we can)
- put the LIBPATH first
"""
run_str = '${LINK_CC} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].bldpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} '
@feature("c")
@before_method('apply_link')
def use_ti_cprogram(self):
"""
Automatically uses ti_cprogram link process
"""
if 'cprogram' in self.features and self.env.CC_NAME == 'ticc':
self.features.insert(0, "ti_cprogram")
class ti_c(Task.Task):
"""
Compile task for the TI codegen compiler
This compiler does not allow specifying the output file name, only the output path.
"""
"Compile C files into object files"
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}'
vars = ['CCDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan
def create_compiled_task(self, name, node):
"""
Overrides ccroot.create_compiled_task to support ti_c
"""
out = '%s' % (node.change_ext('.obj').name)
if self.env.CC_NAME == 'ticc':
name = 'ti_c'
task = self.create_task(name, node, node.parent.find_or_declare(out))
self.env.OUT = '-fr%s' % (node.parent.get_bld().abspath())
try:
self.compiled_tasks.append(task)
except AttributeError:
self.compiled_tasks = [task]
return task
@TaskGen.extension('.c')
def c_hook(self, node):
"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
if self.env.CC_NAME == 'ticc':
return create_compiled_task(self, 'ti_c', node)
else:
return self.create_compiled_task('c', node)
@feature("ti-tconf")
@before_method('process_source')
def apply_tconf(self):
sources = [x.get_src() for x in self.to_nodes(self.source, path=self.path.get_src())]
node = sources[0]
assert(sources[0].name.endswith(".tcf"))
if len(sources) > 1:
assert(sources[1].name.endswith(".cmd"))
target = getattr(self, 'target', self.source)
target_node = node.get_bld().parent.find_or_declare(node.name)
procid = "%d" % int(getattr(self, 'procid', 0))
importpaths = []
includes = Utils.to_list(getattr(self, 'includes', []))
for x in includes + self.env.TCONF_INCLUDES:
if x == os.path.abspath(x):
importpaths.append(x)
else:
relpath = self.path.find_node(x).path_from(target_node.parent)
importpaths.append(relpath)
task = self.create_task('ti_tconf', sources, target_node.change_ext('.cdb'))
task.path = self.path
task.includes = includes
task.cwd = target_node.parent.abspath()
task.env = self.env.derive()
task.env["TCONFSRC"] = node.path_from(target_node.parent)
task.env["TCONFINC"] = '-Dconfig.importPath=%s' % ";".join(importpaths)
task.env['TCONFPROGNAME'] = '-Dconfig.programName=%s' % target
task.env['PROCID'] = procid
task.outputs = [
target_node.change_ext("cfg_c.c"),
target_node.change_ext("cfg.s62"),
target_node.change_ext("cfg.cmd"),
]
create_compiled_task(self, 'ti_c', task.outputs[1])
ctask = create_compiled_task(self, 'ti_c', task.outputs[0])
ctask.env = self.env.derive()
self.add_those_o_files(target_node.change_ext("cfg.cmd"))
if len(sources) > 1:
self.add_those_o_files(sources[1])
self.source = []
re_tconf_include = re.compile(r'(?P<type>utils\.importFile)\("(?P<file>.*)"\)',re.M)
class ti_tconf(Task.Task):
run_str = '${TCONF} ${TCONFINC} ${TCONFPROGNAME} ${TCONFSRC} ${PROCID}'
color = 'PINK'
def scan(self):
includes = Utils.to_list(getattr(self, 'includes', []))
def deps(node):
nodes, names = [], []
if node:
code = Utils.readf(node.abspath())
for match in re_tconf_include.finditer(code):
path = match.group('file')
if path:
for x in includes:
filename = opj(x, path)
fi = self.path.find_resource(filename)
if fi:
subnodes, subnames = deps(fi)
nodes += subnodes
names += subnames
nodes.append(fi)
names.append(path)
break
return nodes, names
return deps(self.inputs[0])
| 9,474 | Python | .py | 243 | 36.452675 | 298 | 0.687609 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,692 | halide.py | projecthamster_hamster/waflib/extras/halide.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Halide code generation tool
__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
__copyright__ = "Jérôme Carretero, 2014"
"""
Tool to run `Halide <http://halide-lang.org>`_ code generators.
Usage::
bld(
name='pipeline',
# ^ Reference this in use="..." for things using the generated code
#target=['pipeline.o', 'pipeline.h']
# ^ by default, name.{o,h} is added, but you can set the outputs here
features='halide',
halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug",
# ^ Environment passed to the generator,
# can be a dict, k/v list, or string.
args=[],
# ^ Command-line arguments to the generator (optional),
# eg. to give parameters to the scheduling
source='pipeline_gen',
# ^ Name of the source executable
)
Known issues:
- Currently only supports Linux (no ".exe")
- Doesn't rerun on input modification when input is part of a build
chain, and has been modified externally.
"""
import os
from waflib import Task, Utils, Options, TaskGen, Errors
class run_halide_gen(Task.Task):
color = 'CYAN'
vars = ['HALIDE_ENV', 'HALIDE_ARGS']
run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}"
def __str__(self):
stuff = "halide"
stuff += ("[%s]" % (",".join(
('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items()))))
return Task.Task.__str__(self).replace(self.__class__.__name__,
stuff)
@TaskGen.feature('halide')
@TaskGen.before_method('process_source')
def halide(self):
Utils.def_attrs(self,
args=[],
halide_env={},
)
bld = self.bld
env = self.halide_env
try:
if isinstance(env, str):
env = dict(x.split('=') for x in env.split())
elif isinstance(env, list):
env = dict(x.split('=') for x in env)
assert isinstance(env, dict)
except Exception as e:
if not isinstance(e, ValueError) \
and not isinstance(e, AssertionError):
raise
raise Errors.WafError(
"halide_env must be under the form" \
" {'HL_x':'a', 'HL_y':'b'}" \
" or ['HL_x=y', 'HL_y=b']" \
" or 'HL_x=y HL_y=b'")
src = self.to_nodes(self.source)
assert len(src) == 1, "Only one source expected"
src = src[0]
args = Utils.to_list(self.args)
def change_ext(src, ext):
# Return a node with a new extension, in an appropriate folder
name = src.name
xpos = src.name.rfind('.')
if xpos == -1:
xpos = len(src.name)
newname = name[:xpos] + ext
if src.is_child_of(bld.bldnode):
node = src.get_src().parent.find_or_declare(newname)
else:
node = bld.bldnode.find_or_declare(newname)
return node
def to_nodes(self, lst, path=None):
tmp = []
path = path or self.path
find = path.find_or_declare
if isinstance(lst, self.path.__class__):
lst = [lst]
for x in Utils.to_list(lst):
if isinstance(x, str):
node = find(x)
else:
node = x
tmp.append(node)
return tmp
tgt = to_nodes(self, self.target)
if not tgt:
tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
cwd = tgt[0].parent.abspath()
task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
task.env.append_unique('HALIDE_ARGS', args)
if task.env.env == []:
task.env.env = {}
task.env.env.update(env)
task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items()))
task.env.HALIDE_ARGS = args
try:
self.compiled_tasks.append(task)
except AttributeError:
self.compiled_tasks = [task]
self.source = []
def configure(conf):
if Options.options.halide_root is None:
conf.check_cfg(package='Halide', args='--cflags --libs')
else:
halide_root = Options.options.halide_root
conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ]
conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ]
conf.env.LIB_HALIDE = ["Halide"]
# You might want to add this, while upstream doesn't fix it
#conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread']
def options(opt):
opt.add_option('--halide-root',
help="path to Halide include and lib files",
)
| 3,987 | Python | .py | 123 | 29.284553 | 83 | 0.662578 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,693 | qt5.py | projecthamster_hamster/waflib/Tools/qt5.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)
# Rafaël Kooi, 2023 (RA-Kooi)
"""
This tool helps with finding Qt5 and Qt6 tools and libraries,
and also provides syntactic sugar for using Qt5 and Qt6 tools.
The following snippet illustrates the tool usage::
def options(opt):
opt.load('compiler_cxx qt5')
def configure(conf):
conf.load('compiler_cxx qt5')
def build(bld):
bld(
features = 'qt5 cxx cxxprogram',
uselib = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
source = 'main.cpp textures.qrc aboutDialog.ui',
target = 'window',
)
Alternatively the following snippet illustrates Qt6 tool usage::
def options(opt):
opt.load('compiler_cxx qt5')
def configure(conf):
conf.want_qt6 = True
conf.load('compiler_cxx qt5')
def build(bld):
bld(
features = 'qt6 cxx cxxprogram',
uselib = 'QT6CORE QT6GUI QT6OPENGL QT6SVG',
source = 'main.cpp textures.qrc aboutDialog.ui',
target = 'window',
)
Here, the UI description and resource files will be processed
to generate code.
Usage
=====
Load the "qt5" tool.
You also need to edit your sources accordingly:
- the normal way of doing things is to have your C++ files
include the .moc file.
This is regarded as the best practice (and provides much faster
compilations).
It also implies that the include paths have beenset properly.
- to have the include paths added automatically, use the following::
from waflib.TaskGen import feature, before_method, after_method
@feature('cxx')
@after_method('process_source')
@before_method('apply_incpaths')
def add_includes_paths(self):
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
self.includes = sorted(incs)
Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.
A few options (--qt{dir,bin,...}) and environment variables
(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
tool path selection, etc; please read the source for more info.
For Qt6 replace the QT5_ prefix with QT6_.
The detection uses pkg-config on Linux by default. The list of
libraries to be requested to pkg-config is formulated by scanning
in the QTLIBS directory (that can be passed via --qtlibs or by
setting the environment variable QT5_LIBDIR or QT6_LIBDIR otherwise is
derived by querying qmake for QT_INSTALL_LIBS directory) for
shared/static libraries present.
Alternatively the list of libraries to be requested via pkg-config
can be set using the qt5_vars attribute, ie:
conf.qt5_vars = ['Qt5Core', 'Qt5Gui', 'Qt5Widgets', 'Qt5Test'];
For Qt6 use the qt6_vars attribute.
This can speed up configuration phase if needed libraries are
known beforehand, can improve detection on systems with a
sparse QT5/Qt6 libraries installation (ie. NIX) and can improve
detection of some header-only Qt modules (ie. Qt5UiPlugin).
To force static library detection use:
QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
To use Qt6 set the want_qt6 attribute, ie:
conf.want_qt6 = True;
"""
from __future__ import with_statement
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True
import os, sys, re
from waflib.Tools import cxx
from waflib import Build, Task, Utils, Options, Errors, Context
from waflib.TaskGen import feature, after_method, extension, before_method
from waflib.Configure import conf
from waflib import Logs
MOC_H = ['.h', '.hpp', '.hxx', '.hh']
"""
File extensions associated to .moc files
"""
EXT_RCC = ['.qrc']
"""
File extension for the resource (.qrc) files
"""
EXT_UI = ['.ui']
"""
File extension for the user interface (.ui) files
"""
EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
"""
File extensions of C++ files that may require a .moc processing
"""
class qxx(Task.classes['cxx']):
"""
Each C++ file can have zero or several .moc files to create.
They are known only when the files are scanned (preprocessor)
To avoid scanning the c++ files each time (parsing C/C++), the results
are retrieved from the task cache (bld.node_deps/bld.raw_deps).
The moc tasks are also created *dynamically* during the build.
"""
def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0
def runnable_status(self):
"""
Compute the task signature to make sure the scanner was executed. Create the
moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
then postpone the task execution (there is no need to recompute the task signature).
"""
if self.moc_done:
return Task.Task.runnable_status(self)
else:
for t in self.run_after:
if not t.hasrun:
return Task.ASK_LATER
self.add_moc_tasks()
return Task.Task.runnable_status(self)
def create_moc_task(self, h_node, m_node):
"""
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
It is not possible to change the file names, but we can assume that the moc transformation will be identical,
and the moc tasks can be shared in a global cache.
"""
try:
moc_cache = self.generator.bld.moc_cache
except AttributeError:
moc_cache = self.generator.bld.moc_cache = {}
try:
return moc_cache[h_node]
except KeyError:
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)
tsk.env.append_unique('MOC_FLAGS', '-i')
if self.generator:
self.generator.tasks.append(tsk)
# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
gen.outstanding.append(tsk)
gen.total += 1
return tsk
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')
def add_moc_tasks(self):
"""
Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
"""
node = self.inputs[0]
bld = self.generator.bld
# skip on uninstall due to generated files
if bld.is_install == Build.UNINSTALL:
return
try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')
include_nodes = [node.parent] + self.generator.includes_nodes
moctasks = []
mocfiles = set()
for d in bld.raw_deps.get(self.uid(), []):
if not d.endswith('.moc'):
continue
# process that base.moc only once
if d in mocfiles:
continue
mocfiles.add(d)
# find the source associated with the moc file
h_node = None
base2 = d[:-4]
# foo.moc from foo.cpp
prefix = node.name[:node.name.rfind('.')]
if base2 == prefix:
h_node = node
else:
# this deviates from the standard
# if bar.cpp includes foo.moc, then assume it is from foo.h
for x in include_nodes:
for e in MOC_H:
h_node = x.find_node(base2 + e)
if h_node:
break
else:
continue
break
if h_node:
m_node = h_node.change_ext('.moc')
else:
raise Errors.WafError('No source found for %r which is a moc file' % d)
# create the moc task
task = self.create_moc_task(h_node, m_node)
moctasks.append(task)
# simple scheduler dependency: run the moc task before others
self.run_after.update(set(moctasks))
self.moc_done = 1
class trans_update(Task.Task):
"""Updates a .ts files from a list of C++ files"""
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
color = 'BLUE'
class XMLHandler(ContentHandler):
"""
Parses ``.qrc`` files
"""
def __init__(self):
ContentHandler.__init__(self)
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(str(''.join(self.buf)))
def characters(self, cars):
self.buf.append(cars)
@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Creates rcc and cxx tasks for ``.qrc`` files"
rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
self.compiled_tasks.append(cpptask)
except AttributeError:
self.compiled_tasks = [cpptask]
return cpptask
@extension(*EXT_UI)
def create_uic_task(self, node):
"Create uic tasks for user interface ``.ui`` definition files"
"""
If UIC file is used in more than one bld, we would have a conflict in parallel execution
It is not possible to change the file names (like .self.idx. as for objects) as they have
to be referenced by the source file, but we can assume that the transformation will be identical
and the tasks can be shared in a global cache.
"""
try:
uic_cache = self.bld.uic_cache
except AttributeError:
uic_cache = self.bld.uic_cache = {}
if node not in uic_cache:
uictask = uic_cache[node] = self.create_task('ui5', node)
uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
@extension('.ts')
def add_lang(self, node):
"""Adds all the .ts file into ``self.lang``"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
@feature('qt5', 'qt6')
@before_method('process_source')
def process_mocs(self):
"""
Processes MOC files included in headers::
def build(bld):
bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')
The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
is provided to avoid name clashes when the same headers are used by several targets.
"""
lst = self.to_nodes(getattr(self, 'moc', []))
self.source = self.to_list(getattr(self, 'source', []))
for x in lst:
prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
moc_node = x.parent.find_or_declare(moc_target)
self.source.append(moc_node)
self.create_task('moc', x, moc_node)
@feature('qt5', 'qt6')
@after_method('apply_link')
def apply_qt5(self):
"""
Adds MOC_FLAGS which may be necessary for moc::
def build(bld):
bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')
The additional parameters are:
:param lang: list of translation files (\\*.ts) to process
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
:param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**)
:type update: bool
:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
"""
if getattr(self, 'lang', None):
qmtasks = []
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
if getattr(self, 'update', None) and Options.options.trans_qt5:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)
if getattr(self, 'langname', None):
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst = []
for flag in self.to_list(self.env.CXXFLAGS):
if len(flag) < 2:
continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
lst.append('-' + flag[1:])
else:
lst.append(flag)
self.env.append_value('MOC_FLAGS', lst)
@extension(*EXT_QT5)
def cxx_hook(self, node):
"""
Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
"""
return self.create_compiled_task('qxx', node)
class rcc(Task.Task):
"""
Processes ``.qrc`` files
"""
color = 'BLUE'
run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
ext_out = ['.h']
def rcname(self):
return os.path.splitext(self.inputs[0].name)[0]
def scan(self):
"""Parse the *.qrc* files"""
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
return ([], [])
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
with open(self.inputs[0].abspath(), 'r') as f:
parser.parse(f)
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd:
nodes.append(nd)
else:
names.append(x)
return (nodes, names)
def quote_flag(self, x):
"""
Override Task.quote_flag. QT parses the argument files
differently than cl.exe and link.exe
:param x: flag
:type x: string
:return: quoted flag
:rtype: string
"""
return x
class moc(Task.Task):
"""
Creates ``.moc`` files
"""
color = 'BLUE'
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
def quote_flag(self, x):
"""
Override Task.quote_flag. QT parses the argument files
differently than cl.exe and link.exe
:param x: flag
:type x: string
:return: quoted flag
:rtype: string
"""
return x
class ui5(Task.Task):
"""
Processes ``.ui`` files
"""
color = 'BLUE'
run_str = '${QT_UIC} ${SRC} -o ${TGT}'
ext_out = ['.h']
class ts2qm(Task.Task):
"""
Generates ``.qm`` files from ``.ts`` files
"""
color = 'BLUE'
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
class qm2rcc(Task.Task):
"""
Generates ``.qrc`` files from ``.qm`` files
"""
color = 'BLUE'
after = 'ts2qm'
def run(self):
"""Create a qrc file including the inputs"""
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
self.outputs[0].write(code)
def configure(self):
"""
Besides the configuration options, the environment variable QT5_ROOT may be used
to give the location of the qt5 libraries (absolute path).
The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
"""
if 'COMPILER_CXX' not in self.env:
self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
self.want_qt6 = getattr(self, 'want_qt6', False)
if self.want_qt6:
self.qt_vars = Utils.to_list(getattr(self, 'qt6_vars', []))
else:
self.qt_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
self.find_qt5_binaries()
self.set_qt5_libs_dir()
self.set_qt5_libs_to_check()
self.set_qt5_defines()
self.find_qt5_libraries()
self.add_qt5_rpath()
self.simplify_qt5_libs()
# warn about this during the configuration too
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
feature = 'qt6' if self.want_qt6 else 'qt5'
# Qt6 requires C++17 (https://www.qt.io/blog/qt-6.0-released)
stdflag = '-std=c++17' if self.want_qt6 else '-std=c++11'
# Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
uses = 'QT6CORE' if self.want_qt6 else 'QT5CORE'
for flag in [[], '-fPIE', '-fPIC', stdflag, [stdflag, '-fPIE'], [stdflag, '-fPIC']]:
msg = 'See if Qt files compile '
if flag:
msg += 'with %s' % flag
try:
self.check(features=feature + ' cxx', use=uses, uselib_store=feature, cxxflags=flag, fragment=frag, msg=msg)
except self.errors.ConfigurationError:
pass
else:
break
else:
self.fatal('Could not build a simple Qt application')
# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
if Utils.unversioned_sys_platform() == 'freebsd':
frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
try:
self.check(features=feature + ' cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
except self.errors.ConfigurationError:
self.check(features=feature + ' cxx cxxprogram', use=uses, uselib_store=feature, libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')
@conf
def find_qt5_binaries(self):
"""
Detects Qt programs such as qmake, moc, uic, lrelease
"""
env = self.env
opt = Options.options
qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')
qt_ver = '6' if self.want_qt6 else '5'
paths = []
if qtdir:
qtbin = os.path.join(qtdir, 'bin')
# the qt directory has been given from QT5_ROOT - deduce the qt binary path
if not qtdir:
qtdir = self.environ.get('QT' + qt_ver + '_ROOT', '')
qtbin = self.environ.get('QT' + qt_ver + '_BIN') or os.path.join(qtdir, 'bin')
if qtbin:
paths = [qtbin]
# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = self.environ.get('PATH', '').split(os.pathsep)
paths.extend([
'/usr/share/qt' + qt_ver + '/bin',
'/usr/local/lib/qt' + qt_ver + '/bin'])
try:
lst = Utils.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()
# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)
# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['0', '0', '0']
qmake_vars = ['qmake-qt' + qt_ver, 'qmake' + qt_ver, 'qmake']
for qmk in qmake_vars:
try:
qmake = self.find_program(qmk, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
try:
version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver[0] == qt_ver and new_ver > prev_ver:
cand = qmake
prev_ver = new_ver
# qmake could not be found easily, rely on qtchooser
if not cand:
try:
self.find_program('qtchooser')
except self.errors.ConfigurationError:
pass
else:
cmd = self.env.QTCHOOSER + ['-qt=' + qt_ver, '-run-tool=qmake']
try:
version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
except self.errors.WafError:
pass
else:
cand = cmd
if cand:
self.env.QMAKE = cand
else:
self.fatal('Could not find qmake for qt' + qt_ver)
# Once we have qmake, we want to query qmake for the paths where we want to look for tools instead
paths = []
self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
paths.append(qtbin)
if self.want_qt6:
self.env.QT_HOST_LIBEXECS = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_LIBEXECS']).strip()
paths.append(self.env.QT_HOST_LIBEXECS)
def find_bin(lst, var):
if var in env:
return
for f in lst:
try:
ret = self.find_program(f, path_list=paths)
except self.errors.ConfigurationError:
pass
else:
env[var]=ret
break
find_bin(['uic-qt' + qt_ver, 'uic'], 'QT_UIC')
if not env.QT_UIC:
self.fatal('cannot find the uic compiler for qt' + qt_ver)
self.start_msg('Checking for uic version')
uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
uicver = ''.join(uicver).strip()
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1 or (self.want_qt6 and uicver.find(' 5.') != -1):
if self.want_qt6:
self.fatal('this uic compiler is for qt3 or qt4 or qt5, add uic for qt6 to your path')
else:
self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
find_bin(['moc-qt' + qt_ver, 'moc'], 'QT_MOC')
find_bin(['rcc-qt' + qt_ver, 'rcc'], 'QT_RCC')
find_bin(['lrelease-qt' + qt_ver, 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt' + qt_ver, 'lupdate'], 'QT_LUPDATE')
env.UIC_ST = '%s -o %s'
env.MOC_ST = '-o'
env.ui_PATTERN = 'ui_%s.h'
env.QT_LRELEASE_FLAGS = ['-silent']
env.MOCCPPPATH_ST = '-I%s'
env.MOCDEFINES_ST = '-D%s'
@conf
def set_qt5_libs_dir(self):
env = self.env
qt_ver = '6' if self.want_qt6 else '5'
qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT' + qt_ver + '_LIBDIR')
if not qtlibs:
try:
qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
except Errors.WafError:
qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
qtlibs = os.path.join(qtdir, 'lib')
self.msg('Found the Qt' + qt_ver + ' library path', qtlibs)
env.QTLIBS = qtlibs
@conf
def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
env = self.env
qt_ver = '6' if self.want_qt6 else '5'
if force_static:
exts = ('.a', '.lib')
prefix = 'STLIB'
else:
exts = ('.so', '.lib')
prefix = 'LIB'
def lib_names():
for x in exts:
for k in ('', qt_ver) if Utils.is_win32 else ['']:
for p in ('lib', ''):
yield (p, name, k, x)
for tup in lib_names():
k = ''.join(tup)
path = os.path.join(qtlibs, k)
if os.path.exists(path):
if env.DEST_OS == 'win32':
libval = ''.join(tup[:-1])
else:
libval = name
env.append_unique(prefix + '_' + uselib, libval)
env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
env.append_unique('INCLUDES_' + uselib, qtincludes)
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt' + qt_ver, 'Qt')))
return k
return False
@conf
def find_qt5_libraries(self):
env = self.env
qt_ver = '6' if self.want_qt6 else '5'
qtincludes = self.environ.get('QT' + qt_ver + '_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
force_static = self.environ.get('QT' + qt_ver + '_FORCE_STATIC')
try:
if self.environ.get('QT' + qt_ver + '_XCOMPILE'):
self.fatal('QT' + qt_ver + '_XCOMPILE Disables pkg-config detection')
self.check_cfg(atleast_pkgconfig_version='0.1')
except self.errors.ConfigurationError:
for i in self.qt_vars:
uselib = i.upper()
if Utils.unversioned_sys_platform() == 'darwin':
# Since at least qt 4.7.3 each library locates in separate directory
fwk = i.replace('Qt' + qt_ver, 'Qt')
frameworkName = fwk + '.framework'
qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
if os.path.exists(qtDynamicLib):
env.append_unique('FRAMEWORK_' + uselib, fwk)
env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
else:
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
else:
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
if not force_static and not ret:
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
else:
path = '%s:%s:%s/pkgconfig:/usr/lib/qt%s/lib/pkgconfig:/opt/qt%s/lib/pkgconfig:/usr/lib/qt%s/lib:/opt/qt%s/lib' % (
self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS, qt_ver, qt_ver, qt_ver, qt_ver)
for i in self.qt_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
@conf
def simplify_qt5_libs(self):
"""
Since library paths make really long command-lines,
and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
"""
env = self.env
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE':
continue
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core:
continue
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(self.qt_vars, 'LIBPATH_QTCORE')
@conf
def add_qt5_rpath(self):
"""
Defines rpath entries for Qt libraries
"""
env = self.env
if getattr(Options.options, 'want_rpath', False):
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_' + var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_' + var] = accu
process_rpath(self.qt_vars, 'LIBPATH_QTCORE')
@conf
def set_qt5_libs_to_check(self):
qt_ver = '6' if self.want_qt6 else '5'
if not self.qt_vars:
dirlst = Utils.listdir(self.env.QTLIBS)
pat = self.env.cxxshlib_PATTERN
if Utils.is_win32:
pat = pat.replace('.dll', '.lib')
if self.environ.get('QT' + qt_ver + '_FORCE_STATIC'):
pat = self.env.cxxstlib_PATTERN
if Utils.unversioned_sys_platform() == 'darwin':
pat = r"%s\.framework"
# We only want to match Qt5 or Qt in the case of Qt5, in the case
# of Qt6 we want to match Qt6 or Qt. This speeds up configuration
# and reduces the chattiness of the configuration. Should also prevent
# possible misconfiguration.
if self.want_qt6:
re_qt = re.compile(pat % 'Qt6?(?!\\d)(?P<name>\\w+)' + '$')
else:
re_qt = re.compile(pat % 'Qt5?(?!\\d)(?P<name>\\w+)' + '$')
for x in sorted(dirlst):
m = re_qt.match(x)
if m:
self.qt_vars.append("Qt%s%s" % (qt_ver, m.group('name')))
if not self.qt_vars:
self.fatal('cannot find any Qt%s library (%r)' % (qt_ver, self.env.QTLIBS))
qtextralibs = getattr(Options.options, 'qtextralibs', None)
if qtextralibs:
self.qt_vars.extend(qtextralibs.split(','))
@conf
def set_qt5_defines(self):
qt_ver = '6' if self.want_qt6 else '5'
if sys.platform != 'win32':
return
for x in self.qt_vars:
y=x.replace('Qt' + qt_ver, 'Qt')[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
def options(opt):
"""
Command-line options
"""
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)
opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
| 26,948 | Python | .py | 747 | 32.850067 | 165 | 0.682209 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,694 | clang.py | projecthamster_hamster/waflib/Tools/clang.py | #!/usr/bin/env python
# encoding: utf-8
# Krzysztof Kosiński 2014
"""
Detect the Clang C compiler
"""
from waflib.Tools import ccroot, ar, gcc
from waflib.Configure import conf
@conf
def find_clang(conf):
"""
Finds the program clang and executes it to ensure it really is clang
"""
cc = conf.find_program('clang', var='CC')
conf.get_cc_version(cc, clang=True)
conf.env.CC_NAME = 'clang'
def configure(conf):
conf.find_clang()
conf.find_program(['llvm-ar', 'ar'], var='AR')
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| 623 | Python | .py | 25 | 23.2 | 69 | 0.728956 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,695 | dmd.py | projecthamster_hamster/waflib/Tools/dmd.py | #!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2008-2018 (ita)
import sys
from waflib.Tools import ar, d
from waflib.Configure import conf
@conf
def find_dmd(conf):
"""
Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
"""
conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')
# make sure that we're dealing with dmd1, dmd2, or ldc(1)
out = conf.cmd_and_log(conf.env.D + ['--help'])
if out.find("D Compiler v") == -1:
out = conf.cmd_and_log(conf.env.D + ['-version'])
if out.find("based on DMD v1.") == -1:
conf.fatal("detected compiler is not dmd/ldc")
@conf
def common_flags_ldc(conf):
"""
Sets the D flags required by *ldc*
"""
v = conf.env
v.DFLAGS = ['-d-version=Posix']
v.LINKFLAGS = []
v.DFLAGS_dshlib = ['-relocation-model=pic']
@conf
def common_flags_dmd(conf):
"""
Set the flags required by *dmd* or *dmd2*
"""
v = conf.env
v.D_SRC_F = ['-c']
v.D_TGT_F = '-of%s'
v.D_LINKER = v.D
v.DLNK_SRC_F = ''
v.DLNK_TGT_F = '-of%s'
v.DINC_ST = '-I%s'
v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s'
v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s'
v.LINKFLAGS_dprogram= ['-quiet']
v.DFLAGS_dshlib = ['-fPIC']
v.LINKFLAGS_dshlib = ['-L-shared']
v.DHEADER_ext = '.di'
v.DFLAGS_d_with_header = ['-H', '-Hf']
v.D_HDR_F = '%s'
def configure(conf):
"""
Configuration for *dmd*, *dmd2*, and *ldc*
"""
conf.find_dmd()
if sys.platform == 'win32':
out = conf.cmd_and_log(conf.env.D + ['--help'])
if out.find('D Compiler v2.') > -1:
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
conf.load('ar')
conf.load('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if str(conf.env.D).find('ldc') > -1:
conf.common_flags_ldc()
| 1,880 | Python | .py | 64 | 27.203125 | 74 | 0.606667 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,696 | python.py | projecthamster_hamster/waflib/Tools/python.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007-2015 (ita)
# Gustavo Carneiro (gjc), 2007
"""
Support for Python, detect the headers and libraries and provide
*use* variables to link C/C++ programs against them::
def options(opt):
opt.load('compiler_c python')
def configure(conf):
conf.load('compiler_c python')
conf.check_python_version((2,4,2))
conf.check_python_headers()
def build(bld):
bld.program(features='pyembed', source='a.c', target='myprog')
bld.shlib(features='pyext', source='b.c', target='mylib')
"""
import os, sys
from waflib import Errors, Logs, Node, Options, Task, Utils
from waflib.TaskGen import extension, before_method, after_method, feature
from waflib.Configure import conf
FRAG = '''
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
void Py_Initialize(void);
void Py_Finalize(void);
#ifdef __cplusplus
}
#endif
int main(int argc, char **argv)
{
(void)argc; (void)argv;
Py_Initialize();
Py_Finalize();
return 0;
}
'''
"""
Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
"""
INST = '''
import sys, py_compile
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
'''
"""
Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
"""
DISTUTILS_IMP = """
try:
from distutils.sysconfig import get_config_var, get_python_lib
except ImportError:
from sysconfig import get_config_var, get_path
def get_python_lib(*k, **kw):
keyword='platlib' if kw.get('plat_specific') else 'purelib'
if 'prefix' in kw:
return get_path(keyword, vars={'installed_base': kw['prefix'], 'platbase': kw['prefix']})
return get_path(keyword)
""".splitlines()
@before_method('process_source')
@feature('py')
def feature_py(self):
"""
Create tasks to byte-compile .py files and install them, if requested
"""
self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
install_from = getattr(self, 'install_from', None)
if install_from and not isinstance(install_from, Node.Node):
install_from = self.path.find_dir(install_from)
self.install_from = install_from
ver = self.env.PYTHON_VERSION
if not ver:
self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
if int(ver.replace('.', '')) > 31:
self.install_32 = True
@extension('.py')
def process_py(self, node):
"""
Add signature of .py file, so it will be byte-compiled when necessary
"""
assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.nice_path())
self.install_from = getattr(self, 'install_from', None)
relative_trick = getattr(self, 'relative_trick', True)
if self.install_from:
assert isinstance(self.install_from, Node.Node), \
'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.nice_path(), type(self.install_from))
# where to install the python file
if self.install_path:
if self.install_from:
self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=relative_trick)
else:
self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=relative_trick)
lst = []
if self.env.PYC:
lst.append('pyc')
if self.env.PYO:
lst.append('pyo')
if self.install_path:
if self.install_from:
target_dir = node.path_from(self.install_from) if relative_trick else node.name
pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
else:
target_dir = node.path_from(self.path) if relative_trick else node.name
pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
else:
pyd = node.abspath()
for ext in lst:
if self.env.PYTAG and not self.env.NOPYCACHE:
# __pycache__ installation for python 3.2 - PEP 3147
name = node.name[:-3]
pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
pyobj.parent.mkdir()
else:
pyobj = node.change_ext(".%s" % ext)
tsk = self.create_task(ext, node, pyobj)
tsk.pyd = pyd
if self.install_path:
self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=relative_trick)
class pyc(Task.Task):
"""
Byte-compiling python files
"""
color = 'PINK'
def __str__(self):
node = self.outputs[0]
return node.path_from(node.ctx.launch_node())
def run(self):
cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
ret = self.generator.bld.exec_command(cmd)
return ret
class pyo(Task.Task):
"""
Byte-compiling python files
"""
color = 'PINK'
def __str__(self):
node = self.outputs[0]
return node.path_from(node.ctx.launch_node())
def run(self):
cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
ret = self.generator.bld.exec_command(cmd)
return ret
@feature('pyext')
@before_method('propagate_uselib_vars', 'apply_link')
@after_method('apply_bundle')
def init_pyext(self):
"""
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
*lib* prefix from library names.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PYEXT' in self.uselib:
self.uselib.append('PYEXT')
# override shlib_PATTERN set by the osx module
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN
try:
if not self.install_path:
return
except AttributeError:
self.install_path = '${PYTHONARCHDIR}'
@feature('pyext')
@before_method('apply_link', 'apply_bundle')
def set_bundle(self):
"""Mac-specific pyext extension that enables bundles from c_osx.py"""
if Utils.unversioned_sys_platform() == 'darwin':
self.mac_bundle = True
@before_method('propagate_uselib_vars')
@feature('pyembed')
def init_pyembed(self):
"""
Add the PYEMBED variable.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PYEMBED' in self.uselib:
self.uselib.append('PYEMBED')
@conf
def get_python_variables(self, variables, imports=None):
"""
Spawn a new python process to dump configuration variables
:param variables: variables to print
:type variables: list of string
:param imports: one import by element
:type imports: list of string
:return: the variable values
:rtype: list of string
"""
if not imports:
try:
imports = self.python_imports
except AttributeError:
imports = DISTUTILS_IMP
program = list(imports) # copy
program.append('')
for v in variables:
program.append("print(repr(%s))" % v)
os_env = dict(os.environ)
try:
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
except KeyError:
pass
try:
out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
except Errors.WafError:
self.fatal('Could not run %r' % self.env.PYTHON)
self.to_log(out)
return_values = []
for s in out.splitlines():
s = s.strip()
if not s:
continue
if s == 'None':
return_values.append(None)
elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
return_values.append(eval(s))
elif s[0].isdigit():
return_values.append(int(s))
else: break
return return_values
@conf
def test_pyembed(self, mode, msg='Testing pyembed configuration'):
self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg,
fragment=FRAG, errmsg='Could not build a python embedded interpreter',
features='%s %sprogram pyembed' % (mode, mode))
@conf
def test_pyext(self, mode, msg='Testing pyext configuration'):
self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg,
fragment=FRAG, errmsg='Could not build python extensions',
features='%s %sshlib pyext' % (mode, mode))
@conf
def python_cross_compile(self, features='pyembed pyext'):
"""
For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure
The following variables are used:
PYTHON_VERSION required
PYTAG required
PYTHON_LDFLAGS required
pyext_PATTERN required
PYTHON_PYEXT_LDFLAGS
PYTHON_PYEMBED_LDFLAGS
"""
features = Utils.to_list(features)
if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
return False
for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
if not x in self.environ:
self.fatal('Please set %s in the os environment' % x)
else:
self.env[x] = self.environ[x]
xx = self.env.CXX_NAME and 'cxx' or 'c'
if 'pyext' in features:
flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
if flags is None:
self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
else:
self.parse_flags(flags, 'PYEXT')
self.test_pyext(xx)
if 'pyembed' in features:
flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
if flags is None:
self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
else:
self.parse_flags(flags, 'PYEMBED')
self.test_pyembed(xx)
return True
@conf
def check_python_headers(conf, features='pyembed pyext'):
"""
Check for headers and libraries necessary to extend or embed python.
It may use the module *distutils* or sysconfig in newer Python versions.
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
* PYEXT: for compiling python extensions
* PYEMBED: for embedding a python interpreter
"""
features = Utils.to_list(features)
assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
env = conf.env
if not env.CC_NAME and not env.CXX_NAME:
conf.fatal('load a compiler first (gcc, g++, ..)')
# bypass all the code below for cross-compilation
if conf.python_cross_compile(features):
return
if not env.PYTHON_VERSION:
conf.check_python_version()
pybin = env.PYTHON
if not pybin:
conf.fatal('Could not find the python executable')
# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
v = 'prefix SO EXT_SUFFIX LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
try:
lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")
vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))
dct = dict(zip(v, lst))
x = 'MACOSX_DEPLOYMENT_TARGET'
if dct[x]:
env[x] = conf.environ[x] = str(dct[x])
env.pyext_PATTERN = '%s' + (dct['EXT_SUFFIX'] or dct['SO']) # SO is deprecated in 3.5 and removed in 3.11
# Try to get pythonX.Y-config
num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)
if env.PYTHON_CONFIG:
# check python-config output only once
if conf.env.HAVE_PYTHON_H:
return
# python2.6-config requires 3 runs
all_flags = [['--cflags', '--libs', '--ldflags']]
if sys.hexversion < 0x2070000:
all_flags = [[k] for k in all_flags[0]]
xx = env.CXX_NAME and 'cxx' or 'c'
if 'pyembed' in features:
for flags in all_flags:
# Python 3.8 has different flags for pyembed, needs --embed
embedflags = flags + ['--embed']
try:
conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(embedflags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=embedflags)
except conf.errors.ConfigurationError:
# However Python < 3.8 doesn't accept --embed, so we need a fallback
conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)
try:
conf.test_pyembed(xx)
except conf.errors.ConfigurationError:
# python bug 7352
if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
conf.test_pyembed(xx)
else:
raise
if 'pyext' in features:
for flags in all_flags:
conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)
try:
conf.test_pyext(xx)
except conf.errors.ConfigurationError:
# python bug 7352
if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
conf.test_pyext(xx)
else:
raise
conf.define('HAVE_PYTHON_H', 1)
return
# No python-config, do something else on windows systems
all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
conf.parse_flags(all_flags, 'PYEMBED')
all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
conf.parse_flags(all_flags, 'PYEXT')
result = None
if not dct["LDVERSION"]:
dct["LDVERSION"] = env.PYTHON_VERSION
# further simplification will be complicated
for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')):
# LIBPATH_PYEMBED is already set; see if it works.
if not result and env.LIBPATH_PYEMBED:
path = env.LIBPATH_PYEMBED
conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
if not result and dct['LIBDIR']:
path = [dct['LIBDIR']]
conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)
if not result and dct['LIBPL']:
path = [dct['LIBPL']]
conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)
if not result:
path = [os.path.join(dct['prefix'], "libs")]
conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY rather than pythonX.Y (win32)\n")
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
if not result:
path = [os.path.normpath(os.path.join(dct['INCLUDEPY'], '..', 'libs'))]
conf.to_log("\n\n# try again with -L$INCLUDEPY/../libs, and pythonXY rather than pythonX.Y (win32)\n")
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $INCLUDEPY/../libs' % name)
if result:
break # do not forget to set LIBPATH_PYEMBED
if result:
env.LIBPATH_PYEMBED = path
env.append_value('LIB_PYEMBED', [name])
else:
conf.to_log("\n\n### LIB NOT FOUND\n")
# under certain conditions, python extensions must link to
# python libraries, not just python embedding programs.
if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
env.LIB_PYEXT = env.LIB_PYEMBED
conf.to_log("Found an include path for Python extensions: %r\n" % (dct['INCLUDEPY'],))
env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
# Code using the Python API needs to be compiled with -fno-strict-aliasing
if env.CC_NAME == 'gcc':
env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
if env.CXX_NAME == 'gcc':
env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
if env.CC_NAME == "msvc":
try:
from distutils.msvccompiler import MSVCCompiler
except ImportError:
# From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py
env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO'])
else:
dist_compiler = MSVCCompiler()
dist_compiler.initialize()
env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Could not build a Python embedded interpreter')
@conf
def check_python_version(conf, minver=None):
"""
Check if the python interpreter is found matching a given minimum version.
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4')
of the actual python version found, and PYTHONDIR and PYTHONARCHDIR
are defined, pointing to the site-packages directories appropriate for
this python version, where modules/packages/extensions should be
installed.
:param minver: minimum version
:type minver: tuple of int
"""
assert minver is None or isinstance(minver, tuple)
pybin = conf.env.PYTHON
if not pybin:
conf.fatal('could not find the python executable')
# Get python version string
cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
Logs.debug('python: Running python command %r', cmd)
lines = conf.cmd_and_log(cmd).split()
assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines)
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
# Compare python version with the minimum required
result = (minver is None) or (pyver_tuple >= minver)
if result:
# define useful environment variables
pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
conf.env.PYTHON_VERSION = pyver
if 'PYTHONDIR' in conf.env:
# Check if --pythondir was specified
pydir = conf.env.PYTHONDIR
elif 'PYTHONDIR' in conf.environ:
# Check environment for PYTHONDIR
pydir = conf.environ['PYTHONDIR']
else:
# Finally, try to guess
if Utils.is_win32:
(pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0) or ''"])
else:
(pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
if 'PYTHONARCHDIR' in conf.env:
# Check if --pythonarchdir was specified
pyarchdir = conf.env.PYTHONARCHDIR
elif 'PYTHONARCHDIR' in conf.environ:
# Check environment for PYTHONDIR
pyarchdir = conf.environ['PYTHONARCHDIR']
else:
# Finally, try to guess
(pyarchdir, ) = conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
if not pyarchdir:
pyarchdir = pydir
if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
conf.define('PYTHONDIR', pydir)
conf.define('PYTHONARCHDIR', pyarchdir)
conf.env.PYTHONDIR = pydir
conf.env.PYTHONARCHDIR = pyarchdir
# Feedback
pyver_full = '.'.join(map(str, pyver_tuple[:3]))
if minver is None:
conf.msg('Checking for python version', pyver_full)
else:
minver_str = '.'.join(map(str, minver))
conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW')
if not result:
conf.fatal('The python version is too old, expecting %r' % (minver,))
PYTHON_MODULE_TEMPLATE = '''
import %s as current_module
version = getattr(current_module, '__version__', None)
if version is not None:
print(str(version))
else:
print('unknown version')
'''
@conf
def check_python_module(conf, module_name, condition=''):
"""
Check if the selected python interpreter can import the given python module::
def configure(conf):
conf.check_python_module('pygccxml')
conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")
:param module_name: module
:type module_name: string
"""
msg = "Checking for python module %r" % module_name
if condition:
msg = '%s (%s)' % (msg, condition)
conf.start_msg(msg)
try:
ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
except Errors.WafError:
conf.end_msg(False)
conf.fatal('Could not find the python module %r' % module_name)
ret = ret.strip()
if condition:
conf.end_msg(ret)
if ret == 'unknown version':
conf.fatal('Could not check the %s version' % module_name)
def num(*k):
if isinstance(k[0], int):
return Utils.loose_version('.'.join([str(x) for x in k]))
else:
return Utils.loose_version(k[0])
d = {'num': num, 'ver': Utils.loose_version(ret)}
ev = eval(condition, {}, d)
if not ev:
conf.fatal('The %s version does not satisfy the requirements' % module_name)
else:
if ret == 'unknown version':
conf.end_msg(True)
else:
conf.end_msg(ret)
def configure(conf):
"""
Detect the python interpreter
"""
v = conf.env
if getattr(Options.options, 'pythondir', None):
v.PYTHONDIR = Options.options.pythondir
if getattr(Options.options, 'pythonarchdir', None):
v.PYTHONARCHDIR = Options.options.pythonarchdir
if getattr(Options.options, 'nopycache', None):
v.NOPYCACHE=Options.options.nopycache
if not v.PYTHON:
v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
v.PYTHON = Utils.to_list(v.PYTHON)
conf.find_program('python', var='PYTHON')
v.PYFLAGS = ''
v.PYFLAGS_OPT = '-O'
v.PYC = getattr(Options.options, 'pyc', 1)
v.PYO = getattr(Options.options, 'pyo', 1)
try:
v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import sys\ntry:\n print(sys.implementation.cache_tag)\nexcept AttributeError:\n import imp\n print(imp.get_tag())\n"]).strip()
except Errors.WafError:
pass
def options(opt):
"""
Add python-specific options
"""
pyopt=opt.add_option_group("Python Options")
pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
pyopt.add_option('--nopycache',dest='nopycache', action='store_true',
help='Do not use __pycache__ directory to install objects [Default:auto]')
pyopt.add_option('--python', dest="python",
help='python binary to be used [Default: %s]' % sys.executable)
pyopt.add_option('--pythondir', dest='pythondir',
help='Installation path for python modules (py, platform-independent .py and .pyc files)')
pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
| 23,264 | Python | .py | 566 | 38.303887 | 184 | 0.70978 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,697 | c.py | projecthamster_hamster/waflib/Tools/c.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)
"Base for c programs/libraries"
from waflib import TaskGen, Task
from waflib.Tools import c_preproc
from waflib.Tools.ccroot import link_task, stlink_task
@TaskGen.extension('.c')
def c_hook(self, node):
"Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
if not self.env.CC and self.env.CXX:
return self.create_compiled_task('cxx', node)
return self.create_compiled_task('c', node)
class c(Task.Task):
"Compiles C files into object files"
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
vars = ['CCDEPS'] # unused variable to depend on, just in case
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
scan = c_preproc.scan
class cprogram(link_task):
"Links object files into c programs"
run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
ext_out = ['.bin']
vars = ['LINKDEPS']
inst_to = '${BINDIR}'
class cshlib(cprogram):
"Links object files into c shared libraries"
inst_to = '${LIBDIR}'
class cstlib(stlink_task):
"Links object files into a c static libraries"
pass # do not remove
| 1,529 | Python | .py | 31 | 47.451613 | 308 | 0.708054 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,698 | glib2.py | projecthamster_hamster/waflib/Tools/glib2.py | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)
"""
Support for GLib2 tools:
* marshal
* enums
* gsettings
* gresource
"""
import os
import functools
from waflib import Context, Task, Utils, Options, Errors, Logs
from waflib.TaskGen import taskgen_method, before_method, feature, extension
from waflib.Configure import conf
################## marshal files
@taskgen_method
def add_marshal_file(self, filename, prefix):
"""
Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
:param filename: xml file to compile
:type filename: string
:param prefix: marshal prefix (--prefix=prefix)
:type prefix: string
"""
if not hasattr(self, 'marshal_list'):
self.marshal_list = []
self.meths.append('process_marshal')
self.marshal_list.append((filename, prefix))
@before_method('process_source')
def process_marshal(self):
"""
Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
Adds the c file created to the list of source to process.
"""
for f, prefix in getattr(self, 'marshal_list', []):
node = self.path.find_resource(f)
if not node:
raise Errors.WafError('file not found %r' % f)
h_node = node.change_ext('.h')
c_node = node.change_ext('.c')
task = self.create_task('glib_genmarshal', node, [h_node, c_node])
task.env.GLIB_GENMARSHAL_PREFIX = prefix
self.source = self.to_nodes(getattr(self, 'source', []))
self.source.append(c_node)
class glib_genmarshal(Task.Task):
vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
color = 'BLUE'
ext_out = ['.h']
def run(self):
bld = self.generator.bld
get = self.env.get_flat
cmd1 = "%s %s --prefix=%s --header > %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[0].abspath()
)
ret = bld.exec_command(cmd1)
if ret:
return ret
#print self.outputs[1].abspath()
c = '''#include "%s"\n''' % self.outputs[0].name
self.outputs[1].write(c)
cmd2 = "%s %s --prefix=%s --body >> %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[1].abspath()
)
return bld.exec_command(cmd2)
########################## glib-mkenums
@taskgen_method
def add_enums_from_template(self, source='', target='', template='', comments=''):
"""
Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
:param source: enum file to process
:type source: string
:param target: target file
:type target: string
:param template: template file
:type template: string
:param comments: comments
:type comments: string
"""
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'target': target,
'template': template,
'file-head': '',
'file-prod': '',
'file-tail': '',
'enum-prod': '',
'value-head': '',
'value-prod': '',
'value-tail': '',
'comments': comments})
@taskgen_method
def add_enums(self, source='', target='',
file_head='', file_prod='', file_tail='', enum_prod='',
value_head='', value_prod='', value_tail='', comments=''):
"""
Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
:param source: enum file to process
:type source: string
:param target: target file
:type target: string
:param file_head: unused
:param file_prod: unused
:param file_tail: unused
:param enum_prod: unused
:param value_head: unused
:param value_prod: unused
:param value_tail: unused
:param comments: comments
:type comments: string
"""
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'template': '',
'target': target,
'file-head': file_head,
'file-prod': file_prod,
'file-tail': file_tail,
'enum-prod': enum_prod,
'value-head': value_head,
'value-prod': value_prod,
'value-tail': value_tail,
'comments': comments})
@before_method('process_source')
def process_enums(self):
"""
Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
"""
for enum in getattr(self, 'enums_list', []):
task = self.create_task('glib_mkenums')
env = task.env
inputs = []
# process the source
source_list = self.to_list(enum['source'])
if not source_list:
raise Errors.WafError('missing source ' + str(enum))
source_list = [self.path.find_resource(k) for k in source_list]
inputs += source_list
env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
# find the target
if not enum['target']:
raise Errors.WafError('missing target ' + str(enum))
tgt_node = self.path.find_or_declare(enum['target'])
if tgt_node.name.endswith('.c'):
self.source.append(tgt_node)
env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
options = []
if enum['template']: # template, if provided
template_node = self.path.find_resource(enum['template'])
options.append('--template %s' % (template_node.abspath()))
inputs.append(template_node)
params = {'file-head' : '--fhead',
'file-prod' : '--fprod',
'file-tail' : '--ftail',
'enum-prod' : '--eprod',
'value-head' : '--vhead',
'value-prod' : '--vprod',
'value-tail' : '--vtail',
'comments': '--comments'}
for param, option in params.items():
if enum[param]:
options.append('%s %r' % (option, enum[param]))
env.GLIB_MKENUMS_OPTIONS = ' '.join(options)
# update the task instance
task.set_inputs(inputs)
task.set_outputs(tgt_node)
class glib_mkenums(Task.Task):
"""
Processes enum files
"""
run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
color = 'PINK'
ext_out = ['.h']
######################################### gsettings
@taskgen_method
def add_settings_schemas(self, filename_list):
"""
Adds settings files to process to *settings_schema_files*
:param filename_list: files
:type filename_list: list of string
"""
if not hasattr(self, 'settings_schema_files'):
self.settings_schema_files = []
if not isinstance(filename_list, list):
filename_list = [filename_list]
self.settings_schema_files.extend(filename_list)
@taskgen_method
def add_settings_enums(self, namespace, filename_list):
"""
Called only once by task generator to set the enums namespace.
:param namespace: namespace
:type namespace: string
:param filename_list: enum files to process
:type filename_list: file list
"""
if hasattr(self, 'settings_enum_namespace'):
raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
self.settings_enum_namespace = namespace
if not isinstance(filename_list, list):
filename_list = [filename_list]
self.settings_enum_files = filename_list
@feature('glib2')
def process_settings(self):
"""
Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.
"""
enums_tgt_node = []
install_files = []
settings_schema_files = getattr(self, 'settings_schema_files', [])
if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
# 1. process gsettings_enum_files (generate .enums.xml)
#
if hasattr(self, 'settings_enum_files'):
enums_task = self.create_task('glib_mkenums')
source_list = self.settings_enum_files
source_list = [self.path.find_resource(k) for k in source_list]
enums_task.set_inputs(source_list)
enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
target = self.settings_enum_namespace + '.enums.xml'
tgt_node = self.path.find_or_declare(target)
enums_task.set_outputs(tgt_node)
enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
enums_tgt_node = [tgt_node]
install_files.append(tgt_node)
options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
enums_task.env.GLIB_MKENUMS_OPTIONS = options
# 2. process gsettings_schema_files (validate .gschema.xml files)
#
for schema in settings_schema_files:
schema_task = self.create_task ('glib_validate_schema')
schema_node = self.path.find_resource(schema)
if not schema_node:
raise Errors.WafError("Cannot find the schema file %r" % schema)
install_files.append(schema_node)
source_list = enums_tgt_node + [schema_node]
schema_task.set_inputs (source_list)
schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]
target_node = schema_node.change_ext('.xml.valid')
schema_task.set_outputs (target_node)
schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()
# 3. schemas install task
def compile_schemas_callback(bld):
if not bld.is_install:
return
compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
destdir = Options.options.destdir
paths = bld._compile_schemas_registered
if destdir:
paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
for path in paths:
Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
if self.bld.exec_command(compile_schemas + [path]):
Logs.warn('Could not update GSettings schema cache %r' % path)
if self.bld.is_install:
schemadir = self.env.GSETTINGSSCHEMADIR
if not schemadir:
raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
if install_files:
self.add_install_files(install_to=schemadir, install_from=install_files)
registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
if not registered_schemas:
registered_schemas = self.bld._compile_schemas_registered = set()
self.bld.add_post_fun(compile_schemas_callback)
registered_schemas.add(schemadir)
class glib_validate_schema(Task.Task):
"""
Validates schema files
"""
run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
color = 'PINK'
################## gresource
@extension('.gresource.xml')
def process_gresource_source(self, node):
"""
Creates tasks that turn ``.gresource.xml`` files to C code
"""
if not self.env.GLIB_COMPILE_RESOURCES:
raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")
if 'gresource' in self.features:
return
h_node = node.change_ext('_xml.h')
c_node = node.change_ext('_xml.c')
self.create_task('glib_gresource_source', node, [h_node, c_node])
self.source.append(c_node)
@feature('gresource')
def process_gresource_bundle(self):
"""
Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files::
def build(bld):
bld(
features='gresource',
source=['resources1.gresource.xml', 'resources2.gresource.xml'],
install_path='${LIBDIR}/${PACKAGE}'
)
:param source: XML files to process
:type source: list of string
:param install_path: installation path
:type install_path: string
"""
for i in self.to_list(self.source):
node = self.path.find_resource(i)
task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
inst_to = getattr(self, 'install_path', None)
if inst_to:
self.add_install_files(install_to=inst_to, install_from=task.outputs)
class glib_gresource_base(Task.Task):
"""
Base class for gresource based tasks
"""
color = 'BLUE'
base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
def scan(self):
"""
Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command``
"""
bld = self.generator.bld
kw = {}
kw['cwd'] = self.get_cwd()
kw['quiet'] = Context.BOTH
cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
self.inputs[0].parent.srcpath(),
self.inputs[0].bld_dir(),
self.inputs[0].bldpath()
), self.env)
output = bld.cmd_and_log(cmd, **kw)
nodes = []
names = []
for dep in output.splitlines():
if dep:
node = bld.bldnode.find_node(dep)
if node:
nodes.append(node)
else:
names.append(dep)
return (nodes, names)
class glib_gresource_source(glib_gresource_base):
"""
Task to generate C source code (.h and .c files) from a gresource.xml file
"""
vars = ['GLIB_COMPILE_RESOURCES']
fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
ext_out = ['.h']
def run(self):
return self.fun_h[0](self) or self.fun_c[0](self)
class glib_gresource_bundle(glib_gresource_base):
"""
Task to generate a .gresource binary file from a gresource.xml file
"""
run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
shell = True # temporary workaround for #795
@conf
def find_glib_genmarshal(conf):
conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
@conf
def find_glib_mkenums(conf):
if not conf.env.PERL:
conf.find_program('perl', var='PERL')
conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS')
@conf
def find_glib_compile_schemas(conf):
# when cross-compiling, gsettings.m4 locates the program with the following:
# pkg-config --variable glib_compile_schemas gio-2.0
conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')
def getstr(varname):
return getattr(Options.options, varname, getattr(conf.env,varname, ''))
gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
if not gsettingsschemadir:
datadir = getstr('DATADIR')
if not datadir:
prefix = conf.env.PREFIX
datadir = os.path.join(prefix, 'share')
gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')
conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir
@conf
def find_glib_compile_resources(conf):
conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')
def configure(conf):
"""
Finds the following programs:
* *glib-genmarshal* and set *GLIB_GENMARSHAL*
* *glib-mkenums* and set *GLIB_MKENUMS*
* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
* *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)
"""
conf.find_glib_genmarshal()
conf.find_glib_mkenums()
conf.find_glib_compile_schemas(mandatory=False)
conf.find_glib_compile_resources(mandatory=False)
def options(opt):
"""
Adds the ``--gsettingsschemadir`` command-line option
"""
gr = opt.add_option_group('Installation directories')
gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')
| 15,771 | Python | .py | 407 | 34.700246 | 261 | 0.684662 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |
20,699 | nasm.py | projecthamster_hamster/waflib/Tools/nasm.py | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2018 (ita)
"""
Nasm tool (asm processing)
"""
import os
import waflib.Tools.asm # leave this
from waflib.TaskGen import feature
@feature('asm')
def apply_nasm_vars(self):
"""provided for compatibility"""
self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))
def configure(conf):
"""
Detect nasm/yasm and set the variable *AS*
"""
conf.find_program(['nasm', 'yasm'], var='AS')
conf.env.AS_TGT_F = ['-o']
conf.env.ASLNK_TGT_F = ['-o']
conf.load('asm')
conf.env.ASMPATH_ST = '-I%s' + os.sep
txt = conf.cmd_and_log(conf.env.AS + ['--version'])
if 'yasm' in txt.lower():
conf.env.ASM_NAME = 'yasm'
else:
conf.env.ASM_NAME = 'nasm'
| 733 | Python | .py | 27 | 25.37037 | 80 | 0.676638 | projecthamster/hamster | 1,069 | 250 | 128 | GPL-3.0 | 9/5/2024, 5:12:46 PM (Europe/Amsterdam) |