An advanced programming language branch, master, updated. 49bd05cf9e9ffe8d0b41451d93bc045465cc8e2d

1 view
Skip to first unread message

felixgit

unread,
Oct 7, 2010, 1:25:52 AM10/7/10
to felix-...@googlegroups.com
This is an automated email from the git hooks/post-receive script. It was
generated because a ref change was pushed to the repository containing
the project "An advanced programming language".

The branch, master has been updated
via 49bd05cf9e9ffe8d0b41451d93bc045465cc8e2d (commit)
from 8b7a4f79a2af14c2a4e0a27f157889416bc6e779 (commit)

Those revisions listed above that are new to this repository have
not appeared on any other notification email; so we list those
revisions in full, below.

- Log -----------------------------------------------------------------
commit 49bd05cf9e9ffe8d0b41451d93bc045465cc8e2d
Author: John Skaller <ska...@users.sourceforge.net>
Date: Thu Oct 7 16:25:22 2010 +1100

Get rid of the old fbuild.. not maintained anymore so useless.

diff --git a/fbuild_old/bin/fbuild b/fbuild_old/bin/fbuild
deleted file mode 100755
index 57676ab..0000000
--- a/fbuild_old/bin/fbuild
+++ /dev/null
@@ -1,489 +0,0 @@
-#!/usr/bin/env python
-###############################################################
-# FELIX MAKE SCRIPT
-###############################################################
-import os
-import sys
-import glob
-import shutil
-import imp
-
-# forward import of set
-try:
- set
-except NameError:
- from sets import Set as set
-
-sys.path.append(
- os.path.join(os.path.dirname(sys.argv[0]), '..', 'lib')
-)
-
-os.environ['PYTHONPATH'] = os.pathsep.join([
- os.path.join(os.path.dirname(sys.argv[0]), '..', 'lib'),
- os.environ.get('PYTHONPATH', ''),
-])
-
-if '' not in sys.path: sys.path.insert(0, '')
-
-import fbuild.flxbuild.flxutil
-import fbuild.flxbuild.package
-import fbuild.flxbuild.process
-
-# ------------------------------------------------
-# Load the initial config
-# ------------------------------------------------
-
-try:
- import config
-except Exception:
- import traceback
- xt,xv,tb = sys.exc_info()
- print "ERROR IN config/__init__.py"
- traceback.print_exception(xt,xv,tb)
- print "You must either"
- print "(a) edit config/__init__.py and fix the error, or,"
- print "(b) i) delete it, and,"
- print " ii) run 'python script/make_config.py'"
- print " again to reset it:"
- print " this is done automatically by 'make boot'"
- sys.exit(1)
-
-# ------------------------------------------------
-# PROCESS COMMAND LINE OPTIONS
-# ------------------------------------------------
-
-import fbuildroot_old as fbuildroot
-
-def load_options():
- from optparse import OptionParser, make_option
-
- parser = OptionParser()
- parser.add_options([
- make_option('-v', '--verbose',
- action='count',
- default=1,
- help='print out extra debugging info'),
- make_option('-q', '--quiet',
- action='count',
- default=0,
- help='do not print out extra debugging info'),
- make_option('--force',
- action='store_true',
- default=False,
- help='force running a process'),
- make_option('--phase', dest='selected_phases', metavar='PHASE',
- action='append',
- default=[],
- help='run only this phase of the build'),
- make_option('--pkg', dest='selected_packages', metavar='PKG',
- action='append',
- default=[],
- help='build only these packages'),
- make_option('--model', dest='selected_models', metavar='MODEL',
- action='append',
- default=[],
- help='build only the phases which can run on this platform'),
- make_option('--lparchive',
- metavar='PATH',
- help='use an alternative lpsrc directory'),
- ])
-
- try:
- extra_options = fbuildroot.options
- except AttributeError:
- pass
- else:
- option_group = parser.add_option_group('project options')
- option_group.add_options(extra_options)
-
- options, args = parser.parse_args()
-
- ####
-
- # temporary hack to set up the shell
- config.HOST_OCAML.verbose = options.verbose
- config.HOST_CC.verbose = options.verbose
- config.HOST_CXX.verbose = options.verbose
- config.TARGET_CC.verbose = options.verbose
- config.TARGET_CXX.verbose = options.verbose
-
- config.HOST_OCAML.quiet = options.quiet
- config.HOST_CC.quiet = options.quiet
- config.HOST_CXX.quiet = options.quiet
- config.TARGET_CC.quiet = options.quiet
- config.TARGET_CXX.quiet = options.quiet
-
- ####
-
- ####
-
- # if the user didn't say which phases to run
- # then run all the phases that use the host model
- # On building on Linux for MinGW, the build model
- # is linux, and the host model is mingw, the target
- # is win32. We'd be running all MinGW phases.
- # note a phase is named for the targetted product
- # NOT the machine that generates it. Thus host
- # phase mingw means 'compile on Linux for MingW'
-
- if not options.selected_phases and not options.selected_models:
- options.selected_models = [config.host_model]
-
- for model in options.selected_models:
- if model == config.build_model and 'build' not in options.selected_phases:
- options.selected_phases.append('build')
-
- if model == config.host_model and 'host' not in options.selected_phases:
- options.selected_phases.append('host')
-
- if model == config.target_model and 'target' not in options.selected_phases:
- options.selected_phases.append('target')
-
- if model == config.run_model and 'run' not in options.selected_phases:
- options.selected_phases.append('run')
-
- ####
-
- if options.lparchive:
- config.FLX_LPARCHIVE = options.lparchive
-
- return options, args
-
-
-def initial_extraction(options, args):
- if "extract" not in args and "force_extract" not in args:
- return
-
- paks = glob.glob(os.path.join(config.FLX_LPARCHIVE, "lpsrc", "*.pak"))
-
- for p in paks:
- print "EXTRACTING", p, "from", config.FLX_LPARCHIVE
- if "force_extract" in args:
- cmd = (config.ISCR, '--force', '--break-on-error', p)
- else:
- cmd = (config.ISCR, '--break-on-error', p)
-
- fbuild.flxbuild.flxutil.execute(cmd,
- verbose=options.verbose,
- quiet=options.quiet,
- )
-
- # this is a hack
- fbuild.flxbuild.flxutil.mkdirs('speed')
- shutil.copy(os.path.join('misc', 'interscript.css'), 'speed')
-
- print "EXTRACTION COMPLETE"
- sys.exit(0)
-
-# ----------------------------------------------------------------------------
-
-def load_packages():
- pkgs = []
- unsorted_pkgs = []
-
- for i in glob.glob(os.path.join(config.src_dir, "cpkgs", "target", "*.py")):
- shutil.copy(i, os.path.join('cpkgs', 'target'))
-
- for i in \
- glob.glob(os.path.join("spkgs", "*.py")) + \
- glob.glob(os.path.join(config.src_dir, "spkgs", "*.py")):
- pkg = os.path.basename(os.path.splitext(i)[0])
- if pkg == '__init__':
- continue
-
- f, filename, description = imp.find_module(pkg, [os.path.dirname(i)])
- try:
- module = imp.load_module(pkg, f, filename, description)
- finally:
- if f:
- f.close()
-
- unsorted_pkgs.append(pkg)
- d = {}
- for k, v in module.__dict__.iteritems():
- if k[0] != '_':
- d[k] = v
- fbuild.flxbuild.package.pkgd[pkg] = d
-
- def addpkg_r(pkg):
- if pkg not in pkgs:
- if pkg not in fbuild.flxbuild.package.pkgd.keys():
- print "Unknown package", pkg
- print "Please extract!"
- sys.exit(1)
- else:
- reqs = fbuild.flxbuild.package.pkgd[pkg].get('pkg_requires',[])
- fbuild.flxbuild.package.pkgreqs[pkg]=reqs
- for i in reqs:
- addpkg_r(i)
- pkgs.append(pkg)
-
- for pkg in unsorted_pkgs:
- addpkg_r(pkg)
-
- # invert the requirements in order to determine the dependencies
- fbuild.flxbuild.package.pkgdeps.update(
- fbuild.flxbuild.flxutil.invert(fbuild.flxbuild.package.pkgreqs))
-
- return pkgs
-
-# -------------------------------------------------
-# LOAD PROCESSES
-
-
-def load_process(process_path, options, args):
- p = process_path.split('.')
- module, name = '.'.join(p[:-1]), p[-1]
- process = getattr(__import__(module, {}, {}, ['']), name)
-
- if not callable(process):
- raise ImportError
-
- if type(process) is type:
- process = process(
- verbose=options.verbose,
- quiet=options.quiet,
- optimise=options.optimise,
- debug=options.debug,
- force=options.force or 'force' in args,
- options=args,
- )
-
- return process
-
-
-def load_processes(options, args):
- for phase, processes in fbuildroot.fbuild_processes.iteritems():
- for process_path in processes:
- process = load_process(process_path, options, args)
- fbuild.flxbuild.process.processes[process_path] = process
-
-# -----------------------------------------------------------------------------
-
-def calculate_packages(options, phase, packages):
- if options.selected_packages:
- selected_pkgs = options.selected_packages
- else:
- selected_pkgs = []
- max_len = max([len(s) for s in fbuild.flxbuild.package.pkgd])
-
- for pkg, pkgdict in fbuild.flxbuild.package.pkgd.iteritems():
- latest_src_time = fbuild.flxbuild.process.get_latest_src_time(pkgdict)
-
- stamp = os.path.join('pkg-stamps', '%s.%s' % (pkg, phase))
- latest_build_time = fbuild.flxbuild.flxutil.filetime(stamp)
-
- if latest_build_time == 0:
- print "Pak %s (virtual)" % pkg.ljust(max_len), 'UNBUILT'
- selected_pkgs.append(pkg)
- elif latest_build_time < latest_src_time:
- print "Pak %s changed: %s" % (
- pkg.ljust(max_len),
- fbuild.flxbuild.flxutil.fmtime(latest_src_time),
- )
- selected_pkgs.append(pkg)
-
- return [pkg for pkg in packages if pkg in selected_pkgs]
-
-####
-
-def run_phase(options, phase, packages):
- print '-' * 78
- print '***** MAKING PHASE', phase
- pkgsummary = {}
- for pkg in calculate_packages(options, phase, packages):
- pkgdict = fbuild.flxbuild.package.pkgd[pkg]
- print "*****", phase, "MAKING PACKAGE", pkg, "************"
- stamp = os.path.join("pkg-stamps", "%s.%s" % (pkg, phase))
- if os.path.exists(stamp):
- os.remove(stamp)
-
- for process in fbuildroot.fbuild_processes.get(phase, []):
- #print "*****", phase, "RUNNING PROCESS", process, "************"
- result = fbuild.flxbuild.process.processes[process](pkg, pkgdict, pkgsummary)
- if not result and result is not None:
- break
- else:
- fbuild.flxbuild.process.enstamp(stamp, options.quiet)
-
- if pkgsummary:
- pkgsummary = pkgsummary.items()
- pkgsummary.sort()
- print phase, "PHASE SUMMARY"
- for (package, process), summary in pkgsummary:
- if summary != "no tests":
- print ' ', process.ljust(25), ':', package.ljust(20), ':', summary
-
-# -----------------------------------------------------------------------------
-
-def print_failures():
- # print out all the failed processes
- failure_log = fbuild.flxbuild.flxutil.Tee()
-
- failure_log.write("----- PROCESS STATUS -----\n")
- failed = 0
- fatal = 0
- for name, process in fbuild.flxbuild.process.processes.iteritems():
- try:
- failures = process.failures
- successes = process.successes
- dummy = process.dummy
- used = process.used
- except AttributeError:
- continue
-
- nfail = len(failures)
- npass = len(successes)
- failed = failed or nfail > 0
- fatal = fatal or (nfail > 0 and not dummy)
-
- if nfail:
- print >> failure_log, 'PROCESS **FAILED**:', name, nfail, '/', npass + nfail, 'failures'
- if dummy:
- print >> failure_log, " [Expected failure, doesn't break build]"
- else:
- if used:
- print >> failure_log, 'PROCESS PASSED :', name, npass, 'passed'
- else:
- pass
- #print >> failure_log, 'PROCESS UNUSED :', name
-
- if not failed:
- return False
-
- print '^^^^^^^^^^ FAILURES by group ^^^^^^^^^^^^^'
-
- kats = {}
- for name, process in fbuild.flxbuild.process.processes.iteritems():
- try:
- failures = process.failures
- except AttributeError:
- continue
-
- if not failures:
- continue
-
- print '- %s: %s' % (len(failures), name)
-
- lookup = {}
-
- for pkg, file, failure in failures:
- files = lookup.get(pkg, [])
- files.append(file)
- lookup[pkg] = files
-
- lookup = lookup.items()
- lookup.sort()
-
- for pkg, files in lookup:
- files.sort()
-
- print ' - %s: %s' % (len(files), pkg)
- lastfile = ""
- for file in files:
- if file == lastfile: continue
- lastfile = file
- # grab the first line of the file to see if it
- # has a #line number to print out as well, so we can
- # localize the error to the interscript file location
-
- emitted = 0
- f = open(file)
- eat = 1
- while eat == 1:
- try:
- line = f.readline()
- except:
- eat = 0
- if emitted == 0: fstring = file
- else: fstring = ""
- if emitted == 0 and line[0:5] == '#line':
- comment = line
- elif line [0:8] == "//Check ":
- kat = line[8:].strip()
- l = kats.get(kat,[])
- l.append(file)
- kats[kat]=l
- comment = kat
- else:
- if emitted == 0:
- comment = line
- else:
- comment = ""
- eat = 0
- if line != "":
- print ' %-35s %s' % (fstring, comment.strip())
- emitted = 1
- if emitted == 0:
- print ' %s' % file
- f.close()
-
- print '^^^^^^^^^^ FAILURES by category ^^^^^^^^^^'
- keys = kats.keys()
- keys.sort()
- for kat in keys:
- print kat+":"
- files = kats[kat]
- for file in files:
- print " ",file
-
- f = open('errors.log', 'w')
- try:
- f.write(failure_log.getvalue())
- finally:
- f.close()
-
- return fatal
-
-# -----------------------------------------------------------------------------
-
-def main():
- # add the felix directory to the search paths
- sys.path.append(config.src_dir)
- os.environ['PYTHONPATH'] = \
- config.src_dir + os.pathsep + os.environ.get('PYTHONPATH', '')
-
- options, args = load_options()
- initial_extraction(options, args)
- packages = load_packages()
- load_processes(options, args)
-
- ####
-
- # run all the pre-processes
- for process in fbuildroot.fbuild_preprocesses:
- load_process(process, options, args).preprocess()
-
- # execute user selected processes in command line order
- for arg in args:
- # check in the build system stock process set
- # these are run independently of the selected phase
- if arg in fbuild.flxbuild.process.processes:
- fbuild.flxbuild.process.processes[arg].runme()
- else:
- # try seeing if it's a process
- try:
- process = load_process(arg, options, args)
- except ImportError:
- pass
- else:
- process.runme()
-
- else:
- # otherwise run all the processes specified by the application
- # for the selected phases, in the application specified phase order
- for phase in options.selected_phases:
- run_phase(options, phase, packages)
-
- ####
-
- if print_failures():
- print "********* BUILD FAILED ********************"
- return 1
- else:
- print "*********** RUN COMPLETE: NO UNEXPECTED FAILURES DETECTED *******"
-
- return 0
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/fbuild_old/bin/make_config.py b/fbuild_old/bin/make_config.py
deleted file mode 100755
index 67e3bb7..0000000
--- a/fbuild_old/bin/make_config.py
+++ /dev/null
@@ -1,600 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import sys
-import time
-import glob
-from optparse import OptionParser, make_option
-import shutil
-
-sys.path.append(
- os.path.join(os.path.dirname(sys.argv[0]), '..', 'lib')
-)
-
-os.environ['PYTHONPATH'] = os.pathsep.join([
- os.path.join(os.path.dirname(sys.argv[0]), '..', 'lib'),
- os.environ.get('PYTHONPATH', ''),
-])
-
-from fbuild.flxbuild.flxutil import xqt, ExecutionError
-from fbuild.flxbuild.config_support import pr, pa
-
-import version
-
-# -----------------------------------------------------------------------------
-
-# supported platforms
-
-platforms = [
- "posix",
- "cygwin",
- "nocygwin",
- "mingw",
- "win32",
- "win64",
- "osx",
- "detect"
- "solaris",
- "bsd",
- "linux"
- ]
-
-# map other names for them, obtained from
-# various place like os.platform, os.system("mname -u"), etc
-
-archmap = {
- "irix":"posix",
- "irix64":"posix",
- "unix":"posix",
- "posix":"posix",
- "linux":"linux",
- "gnu/linux":"linux",
- "solaris":"solaris",
- "sunos":"solaris",
- "cygwin":"cygwin",
- "nocygwin":"nocygwin",
- "mingw":"mingw",
- "windows":"win32",
- "nt":"win32",
- "win32":"win32",
- "win64":"win64",
- "darwin":"osx",
- "freebsd":"bsd",
- "netbsd":"bsd",
- "openbsd":"bsd",
- "osx":"osx",
- "detect":"detect"
- }
-
-ALL_PHASES = ["build", "host", "target", "run"]
-
-# -----------------------------------------------------------------------------
-
-def check_model(m):
- try:
- m = archmap[m]
- except KeyError:
- print "Unknown model '"+m+"' please choose one of:"
- for m in platforms: print " * " + m
- sys.exit(1)
- return m
-
-def load_options():
- parser = OptionParser()
- parser.add_options([
- make_option('-v', '--verbose',
- action='count',
- default=0,
- help='print out extra debugging info'),
- make_option('-q', '--quiet',
- dest='verbose',
- action='store_const',
- const=0,
- help='do not print out extra debugging info'),
- make_option('--prefix',
- help='install into this prefixed directory',
- default=os.environ.get('PREFIX', '/usr/local')),
- make_option('-I', '--include-path',
- metavar='INCLUDE_PATH',
- dest='include_paths',
- action='append',
- help='additionally search these paths for headers'),
- make_option('-L', '--lib-path',
- metavar='LIB_PATH',
- dest='lib_paths',
- action='append',
- help='additionally search these paths for libraries'),
- make_option('--build',
- dest='build_model',
- help='specify the build model'),
- make_option('--host',
- dest='host_model',
- help='specify the host model'),
- make_option('--target',
- dest='target_model',
- help='specify the target model'),
- make_option('--run',
- dest='run_model',
- help='specify the run model'),
- make_option('--buildcc',
- metavar='CC',
- help='specify the build c compiler'),
- make_option('--hostcc',
- metavar='CC',
- help='specify the host c compiler'),
- make_option('--targetcc',
- metavar='CC',
- help='specify the target c compiler'),
- make_option('--buildcxx',
- metavar='CXX',
- help='specify the build c++ compiler'),
- make_option('--hostcxx',
- metavar='CXX',
- help='specify the host c++ compiler'),
- make_option('--targetcxx',
- metavar='CXX',
- help='specify the target c++ compiler'),
- make_option('--boot',
- dest='bootfile',
- help='add a config bootfile for additional config modification'),
- make_option('--src-dir',
- metavar='PATH',
- default=os.environ.get("SRC_DIR", os.curdir),
- help='specify the source directory'),
- make_option('--lparchive',
- metavar='PATH',
- default=os.environ.get("FLX_LPARCHIVE", os.curdir),
- help='specify the location of the interscript files'),
- make_option('--phase',
- dest='phases',
- action='append',
- default=[],
- help='specify which phases to configure'),
- ])
-
- options, args = parser.parse_args()
-
- if not options.verbose:
- options.quiet = 2
- else:
- options.quiet = 0
-
- if options.build_model:
- print "Specified build model", options.build_model
- options.build_model = check_model(options.build_model)
- else:
- # attempt to find the Felix name for the build OS
- # using uname -s, or, if that fails, Python os.name
- # if the final result isn't a name we recognize
- # set the build_model to 'detect' to indicate C level
- # testing is to be used. Note that these C tests are
- # done anyhow, and may verify, refine, or otherwise
- # munge this result .. however we need some initial
- # indication HOW to perform these tests.
-
- try:
- output = xqt('uname', '-s')
- except ExecutionError:
- try:
- options.build_model = archmap[os.name]
- except KeyError:
- print "uname -s and Python returns unknown OS type, assuming 'detect'"
- options.build_model = "detect"
- else:
- output = output[0].strip().lower()
- options.build_model = archmap[output]
-
- # attempt to find the Felix name for the build OS
- # using uname -s, or, if that fails, Python os.name
- # if the final result isn't a name we recognize
- # set the build_model to 'detect' to indicate C level
- # testing is to be used. Note that these C tests are
- # done anyhow, and may verify, refine, or otherwise
- # munge this result .. however we need some initial
- # indication HOW to perform these tests.
-
- try:
- output = xqt('uname', '-s')
- except ExecutionError:
- try:
- options.build_model = archmap[os.name]
- except KeyError:
- print "uname -s and Python returns unknown OS type, assuming 'detect'"
- options.build_model = "detect"
- else:
- output = output[0].strip().lower()
- options.build_model = archmap[output]
-
- print "Build platform: " + options.build_model
-
- if options.host_model:
- print "Specified host model", options.host_model
- options.host_model = check_model(options.host_model)
-
- if options.target_model:
- print "Specified target model", options.target_model
- options.target_model = check_model(options.target_model)
-
- if options.run_model:
- print "Specified run model", options.run_model
- options.run_model = check_model(options.run_model)
-
- for phase in options.phases:
- if phase not in ALL_PHASES:
- print "UNKNOWN PHASE", phase,"not in", ALL_PHASES
- sys.exit(1)
-
- if not options.phases:
- options.phases = ALL_PHASES
-
- if options.bootfile:
- try:
- execfile(options.bootfile)
- print "Loaded", options.bootfile
- except:
- print "Cannot execute specified bootstrap file: ", options.bootfile
- sys.exit(1)
-
- return options, args
-
-#---------------------------------------------------
-# Discover C/C++ compilers, linker, and other 'binutils'
-
-import fbuild.flxbuild.ocaml_class
-import fbuild.flxbuild.gcc_class
-import fbuild.flxbuild.gxx_class
-import fbuild.flxbuild.msvcc_class
-import fbuild.flxbuild.msvcxx_class
-
-#
-# Detect the native build model
-#
-# This model has two uses: first, to build any build time
-# tools needed to assist in generating the sources
-# and second, to aid in selecting the options to cross-compile
-# for the chosen host and target
-#
-
-def load_compiler(options, compiler, cc, input_cc, phase, model, suffix):
- CC = compiler(verbose=options.verbose, quiet=options.quiet)
-
- CC.set_options(
- COM=cc,
- include_paths=options.include_paths,
- lib_paths=options.lib_paths,
- use=phase,
- model=model,
- build=options.build_model,
- )
-
- if input_cc and (
- input_cc.options.COM == CC.options.COM and
- input_cc.options.model == CC.options.model and
- input_cc.options.build == CC.options.build):
- print 'using the', input_cc.options.use, 'CC options for the', phase, 'CC'
- CC.load_options(os.path.join('config', input_cc.options.use + suffix))
- CC.options.use = phase
- else:
- CC.check_options()
- CC.report_config()
- CC.save_options(os.path.join('config', phase + suffix))
-
- return CC
-
-
-
-def load_compilers(options, phase, model, cc, cxx,
- input_cc=None,
- input_cxx=None):
- print
- print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
- print "Checking", phase, "MODEL", model
- print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
- print
-
- if model in ["win32","win64"]:
- CC = fbuild.flxbuild.msvcc_class.msvcc
- CXX = fbuild.flxbuild.msvcxx_class.msvcxx
- else:
- CC = fbuild.flxbuild.gcc_class.gcc
- CXX = fbuild.flxbuild.gxx_class.gxx
-
- CC = load_compiler(options, CC, cc, input_cc, phase, model, '_cc.py')
- CXX = load_compiler(options, CXX, cxx, input_cxx, phase, model, '_cxx.py')
-
- return CC, CXX
-
-# -----------------------------------------------------------------------------
-
-def configure_build(options):
- BUILD_CC, BUILD_CXX = load_compilers(options,
- phase='build',
- model=options.build_model,
- cc=options.buildcc,
- cxx=options.buildcxx,
- )
-
- BUILD_CC.check_options()
- BUILD_CC.report_config()
- BUILD_CC.save_options("config/build_cc.py")
- options.build_model = BUILD_CC.options.model
-
- BUILD_CXX.check_options()
- BUILD_CXX.report_config()
- BUILD_CXX.save_options("config/build_cxx.py")
- options.build_model = BUILD_CXX.options.model
-
- cpkgs = glob.glob("cpkgs"+os.sep+"build"+os.sep+"*.py")
- for cpkgf in cpkgs:
- cpkg = os.path.splitext(os.path.basename(cpkgf))[0]
- print "build CONFIGURING", cpkg
- __import__('cpkgs.build.' + cpkg)
-
- return BUILD_CC, BUILD_CXX
-
-
-def configure_host(options, BUILD_CC, BUILD_CXX):
- #
- # Now create the host model: the compiler has to run
- # on the build machine, but can cross compile for
- # the host (if so, we can build but not test Felix)
- #
- # Cross compilation of the host tools may prevent any
- # testing of the tools
- #
-
- if not options.host_model:
- options.host_model = options.build_model
- print "Defaulting host model to build model:", options.host_model
-
- HOST_CC, HOST_CXX = load_compilers(options,
- phase='host',
- model=options.host_model,
- cc=options.hostcc,
- cxx=options.hostcxx,
- input_cc=BUILD_CC,
- input_cxx=BUILD_CXX,
- )
-
- HOST_OCAML = fbuild.flxbuild.ocaml_class.ocaml(verbose=options.verbose, quiet=options.quiet)
-
- camllinkopts = ""
- if HOST_CXX.options.CYGWIN:
- camllinkopts = '-ccopt "-Wl,--stack -Wl,10485760" '
-
- HOST_OCAML.autodetect(camllinkopts)
- HOST_OCAML.report_config()
- HOST_OCAML.save_options("config/ocaml_config.py")
-
- options.host_model = HOST_CXX.options.model
-
- cpkgs = glob.glob("cpkgs"+os.sep+"host"+os.sep+"*.py")
- for cpkgf in cpkgs:
- cpkg = os.path.splitext(os.path.basename(cpkgf))[0]
- print "host CONFIGURING", cpkg
- __import__('cpkgs.host.' + cpkg)
-
- return HOST_CC, HOST_CXX, HOST_OCAML
-
-
-def configure_target(options, HOST_CC, HOST_CXX):
- #
- # Now create the target model: the compiler has to run
- # on the build machine, but can cross compile for
- # the target
- #
- # cross compilation of C++ generated by Felix allows us to
- # check the generated code compiles, but not that it runs
- # [but the output is largely portable so we can still try]
- #
-
- if not options.target_model:
- options.target_model = options.host_model
- print "Defaulting target model to host model:", options.target_model
-
- TARGET_CC, TARGET_CXX = load_compilers(options,
- phase='target',
- model=options.target_model,
- cc=options.targetcc,
- cxx=options.targetcxx,
- input_cc=HOST_CC,
- input_cxx=HOST_CXX,
- )
-
- options.target_model = TARGET_CXX.options.model
-
- return TARGET_CC, TARGET_CXX
-
-
-def configure_run(options):
- if not options.run_model:
- options.run_model = options.target_model
- print "Defaulting run model to target model:", options.run_model
-
-# -----------------------------------------------------------------------------
-
-def write_config(options, CONFIG_TIME,
- BUILD_CC, BUILD_CXX,
- HOST_CC, HOST_CXX, HOST_OCAML,
- TARGET_CC, TARGET_CXX):
- try:
- print "Writing main config file"
- f = open("config"+os.sep+"__init__.py","w")
- pr(f,'import sys')
- pr(f,"if '' not in sys.path: sys.path = [''] + sys.path")
- pr(f,'#'+CONFIG_TIME)
- pr(f,"CONFIG_TIME = " + repr(CONFIG_TIME))
- pr(f,"flx_version = " + repr(version.flx_version))
- pr(f,"flx_version_major = " + repr(version.flx_version_major))
- pr(f,"godi_revision = " + repr(version.godi_revision))
- pr(f,"debian_revision = " + repr(version.debian_revision))
- if options.bootfile:
- pr(f,"try:")
- pr(f," execfile('config/config_bootstrap.py')")
- pr(f,"except: pass")
- pr(f,"import fbuild.flxbuild.gcc_class")
- pr(f,"import fbuild.flxbuild.msvcc_class")
- pr(f,"import fbuild.flxbuild.gxx_class")
- pr(f,"import fbuild.flxbuild.msvcxx_class")
- pr(f,"import fbuild.flxbuild.ocaml_class")
- pr(f,"from fbuild.flxbuild.config_support import *")
-
- pr(f,"")
- pr(f,"#User configurable section")
- pr(f,"SUPPORT_DYNAMIC_LOADING = " + repr(TARGET_CXX.options.SUPPORT_DYNAMIC_LOADING))
- pr(f,"SUPPORT_STATIC_LINKAGE = 1")
- if TARGET_CXX.options.SUPPORT_DYNAMIC_LOADING:
- pr(f,"DEFAULT_LINK_MODEL = 'dynamic'")
- else:
- pr(f,"DEFAULT_LINK_MODEL = 'static'")
- pr(f,"build_model = " + repr(options.build_model))
- pr(f,"host_model = " + repr(options.host_model))
- pr(f,"target_model = " + repr(options.target_model))
- pr(f,"run_model = " + repr(options.run_model))
-
- # target model switches
- pr(f,"CYGWIN = " + repr(TARGET_CXX.options.CYGWIN))
- pr(f,"MACOSX = " + repr(TARGET_CXX.options.MACOSX))
- pr(f,"WIN32 = " + repr(TARGET_CXX.options.WIN32))
- pr(f,"WIN64 = " + repr(TARGET_CXX.options.WIN64))
- pr(f,"POSIX = " + repr(TARGET_CXX.options.POSIX))
- pr(f,"SOLARIS = " + repr(TARGET_CXX.options.SOLARIS))
- pr(f,"BSD = " + repr(TARGET_CXX.options.BSD))
- pr(f,"LINUX = " + repr(TARGET_CXX.options.LINUX))
- pr(f,"PREFIX = " + repr(options.prefix))
- pr(f,"src_dir = " + repr(options.src_dir))
- pr(f,"FLX_LPARCHIVE = " + repr(options.lparchive))
- pr(f,"FLX_RTL_DIR = " + repr(os.path.join('lib', 'rtl')))
- pr(f,"FLX_HOST_CONFIG_DIR = " + repr(os.path.join('config', 'host')))
- pr(f,"FLX_TARGET_CONFIG_DIR = " + repr(os.path.join('config', 'target')))
- pr(f,"")
-
- pr(f,"HOST_OCAML = fbuild.flxbuild.ocaml_class.ocaml()")
- pr(f,"HOST_OCAML.load_options("+repr('config'+os.sep+'ocaml_config.py')+")")
-
- cc = HOST_CC.__class__.__name__
- pr(f,"HOST_CC = fbuild.flxbuild."+cc+"_class."+cc+"()")
- pr(f,"HOST_CC.load_options("+repr('config'+os.sep+'host_cc.py')+")")
- cc = TARGET_CC.__class__.__name__
- pr(f,"TARGET_CC = fbuild.flxbuild."+cc+"_class."+cc+"()")
- pr(f,"TARGET_CC.load_options("+repr('config'+os.sep+'target_cc.py')+")")
-
- cxx = HOST_CXX.__class__.__name__
- pr(f,"HOST_CXX = fbuild.flxbuild."+cxx+"_class."+cxx+"()")
- pr(f,"HOST_CXX.load_options("+repr('config'+os.sep+'host_cxx.py')+")")
- cxx = TARGET_CXX.__class__.__name__
- pr(f,"TARGET_CXX = fbuild.flxbuild."+cxx+"_class."+cxx+"()")
- pr(f,"TARGET_CXX.load_options("+repr('config'+os.sep+'target_cxx.py')+")")
- pr(f,"")
-
- if options.target_model in ["win32","win64"]:
- HAVE_MSVC = 1
- HAVE_GNU = 0
- DIFF = 'FC /L /W'
- else:
- HAVE_MSVC = 0
- HAVE_GNU = 1
-
- #DIFF = "diff -a -b " # build system is Unix Python
- # RF - trying out args that work on solaris (-a = not cool)
- # could use that sys type stuff here?
- DIFF = 'diff -b'
-
- pr(f,"HAVE_GNU = " + repr(HAVE_GNU))
- pr(f,"FLXCC_CPP='cpp '")
- pr(f,"HAVE_MSVC = " + repr(HAVE_MSVC))
- pr(f,"DIFF = " + repr(DIFF))
-
- ISCR = sys.executable + ' ' + \
- os.path.join(options.src_dir, 'interscript', 'bin', 'iscr.py') + \
- ' --cache-prefix=lpsrc-cache'
- pr(f,"ISCR = " + repr(ISCR))
-
- # ---------------------------------------------------
-
- # RF: noone seems to be using the results of this
- # JS: Not yet: policy is to test it out anyhow, in case needed
- #
- # uname -s: kernel name "linux" on linux
- # uname -n: network node name "rosella" on JS box
- # uname -r: kernel-release "2.6.12-10-amd64-k8" on JS box
- # uname -v: kernel-version " #1 Thu Dec 22 11:12:06 UTC 2005" on JS box
- # uname -m: machine hardware name: "x86_64" on JS box
- # uname -o: operating system: "GNU/Linux" on JS box
- # uname -p: OSX only? on osx reports broad cpu type (e.g. powerpc)
- # not sure what it reports on intel macs.
- # machine command reports very specific cpu type, e.g. ppc7450, ppc7400
-
- try:
- output = xqt('uname', '-m')
- except ExecutionError:
- ARCH = "unknown"
- else:
- ARCH = output[0].strip().lower()
- pr(f,"ARCH = " + repr(ARCH))
-
- try:
- if options.build_model == 'osx':
- output = xqt('uname -p')
- else:
- output = xqt('uname -o')
- except ExecutionError:
- OS = 'unknown'
- else:
- OS = output[0].strip().lower()
-
- pr(f,"OS = " + repr(OS))
-
- pr(f,"")
- pr(f,"# HACK to get all the target variables into global namespace")
- f.close()
- if options.bootfile:
- print "Copying bootfile :", options.bootfile
- shutil.copy(options.bootfile, os.path.join('config', 'config_bootstrap.py'))
- except EnvironmentError:
- print "Unable to create config"+os.sep+"__init__.py"
- sys.exit(1)
-
- print "Created config"+os.sep+"__init__.py"
- print "Edit this file to set your preferences"
- print "This file will not be clobbered by the Felix build process"
-
- cpkgs = glob.glob("cpkgs"+os.sep+"target"+os.sep+"*.py")
- for cpkgf in cpkgs:
- cpkg = os.path.splitext(os.path.basename(cpkgf))[0]
- print "target CONFIGURING", cpkg
- __import__('cpkgs.target.' + cpkg)
-
-# -----------------------------------------------------------------------------
-
-def main():
- time_stamp_format = "%Y/%m/%d %H:%M:%S UTC"
- config_time = time.gmtime(time.time())
- CONFIG_TIME = time.strftime(time_stamp_format, config_time)
-
- options, args = load_options()
-
- if args:
- print 'Unknown configure args:', args
- return 1
-
- if not os.path.exists('config'):
- os.path.mkdir('config')
-
- BUILD_CC, BUILD_CXX = configure_build(options)
- HOST_CC, HOST_CXX, HOST_OCAML = configure_host(options, BUILD_CC, BUILD_CXX)
- TARGET_CC, TARGET_CXX = configure_target(options, HOST_CC, HOST_CXX)
- configure_run(options)
-
- write_config(options, CONFIG_TIME,
- BUILD_CC, BUILD_CXX,
- HOST_CC, HOST_CXX, HOST_OCAML,
- TARGET_CC, TARGET_CXX,
- )
-
- return 0
-
-# -----------------------------------------------------------------------------
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/fbuild_old/lib/fbuild/__init__.py b/fbuild_old/lib/fbuild/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fbuild_old/lib/fbuild/compat/_Queue.py b/fbuild_old/lib/fbuild/compat/_Queue.py
deleted file mode 100644
index e4b9d3a..0000000
--- a/fbuild_old/lib/fbuild/compat/_Queue.py
+++ /dev/null
@@ -1,221 +0,0 @@
-"""A multi-producer, multi-consumer queue."""
-
-from time import time as _time
-
-try:
- from collections import deque
-except ImportError:
- class deque(list):
- def popleft(self):
- return self.pop(0)
-
-__all__ = ['Empty', 'Full', 'Queue']
-
-class Empty(Exception):
- "Exception raised by Queue.get(block=0)/get_nowait()."
- pass
-
-class Full(Exception):
- "Exception raised by Queue.put(block=0)/put_nowait()."
- pass
-
-class Queue:
- """Create a queue object with a given maximum size.
-
- If maxsize is <= 0, the queue size is infinite.
- """
- def __init__(self, maxsize=0):
- try:
- import threading
- except ImportError:
- import dummy_threading as threading
- self._init(maxsize)
- # mutex must be held whenever the queue is mutating. All methods
- # that acquire mutex must release it before returning. mutex
- # is shared between the three conditions, so acquiring and
- # releasing the conditions also acquires and releases mutex.
- self.mutex = threading.Lock()
- # Notify not_empty whenever an item is added to the queue; a
- # thread waiting to get is notified then.
- self.not_empty = threading.Condition(self.mutex)
- # Notify not_full whenever an item is removed from the queue;
- # a thread waiting to put is notified then.
- self.not_full = threading.Condition(self.mutex)
- # Notify all_tasks_done whenever the number of unfinished tasks
- # drops to zero; thread waiting to join() is notified to resume
- self.all_tasks_done = threading.Condition(self.mutex)
- self.unfinished_tasks = 0
-
- def task_done(self):
- """Indicate that a formerly enqueued task is complete.
-
- Used by Queue consumer threads. For each get() used to fetch a task,
- a subsequent call to task_done() tells the queue that the processing
- on the task is complete.
-
- If a join() is currently blocking, it will resume when all items
- have been processed (meaning that a task_done() call was received
- for every item that had been put() into the queue).
-
- Raises a ValueError if called more times than there were items
- placed in the queue.
- """
- self.all_tasks_done.acquire()
- try:
- unfinished = self.unfinished_tasks - 1
- if unfinished <= 0:
- if unfinished < 0:
- raise ValueError('task_done() called too many times')
- self.all_tasks_done.notifyAll()
- self.unfinished_tasks = unfinished
- finally:
- self.all_tasks_done.release()
-
- def join(self):
- """Blocks until all items in the Queue have been gotten and processed.
-
- The count of unfinished tasks goes up whenever an item is added to the
- queue. The count goes down whenever a consumer thread calls task_done()
- to indicate the item was retrieved and all work on it is complete.
-
- When the count of unfinished tasks drops to zero, join() unblocks.
- """
- self.all_tasks_done.acquire()
- try:
- while self.unfinished_tasks:
- self.all_tasks_done.wait()
- finally:
- self.all_tasks_done.release()
-
- def qsize(self):
- """Return the approximate size of the queue (not reliable!)."""
- self.mutex.acquire()
- n = self._qsize()
- self.mutex.release()
- return n
-
- def empty(self):
- """Return True if the queue is empty, False otherwise (not reliable!)."""
- self.mutex.acquire()
- n = self._empty()
- self.mutex.release()
- return n
-
- def full(self):
- """Return True if the queue is full, False otherwise (not reliable!)."""
- self.mutex.acquire()
- n = self._full()
- self.mutex.release()
- return n
-
- def put(self, item, block=True, timeout=None):
- """Put an item into the queue.
-
- If optional args 'block' is true and 'timeout' is None (the default),
- block if necessary until a free slot is available. If 'timeout' is
- a positive number, it blocks at most 'timeout' seconds and raises
- the Full exception if no free slot was available within that time.
- Otherwise ('block' is false), put an item on the queue if a free slot
- is immediately available, else raise the Full exception ('timeout'
- is ignored in that case).
- """
- self.not_full.acquire()
- try:
- if not block:
- if self._full():
- raise Full
- elif timeout is None:
- while self._full():
- self.not_full.wait()
- else:
- if timeout < 0:
- raise ValueError("'timeout' must be a positive number")
- endtime = _time() + timeout
- while self._full():
- remaining = endtime - _time()
- if remaining <= 0.0:
- raise Full
- self.not_full.wait(remaining)
- self._put(item)
- self.unfinished_tasks += 1
- self.not_empty.notify()
- finally:
- self.not_full.release()
-
- def put_nowait(self, item):
- """Put an item into the queue without blocking.
-
- Only enqueue the item if a free slot is immediately available.
- Otherwise raise the Full exception.
- """
- return self.put(item, False)
-
- def get(self, block=True, timeout=None):
- """Remove and return an item from the queue.
-
- If optional args 'block' is true and 'timeout' is None (the default),
- block if necessary until an item is available. If 'timeout' is
- a positive number, it blocks at most 'timeout' seconds and raises
- the Empty exception if no item was available within that time.
- Otherwise ('block' is false), return an item if one is immediately
- available, else raise the Empty exception ('timeout' is ignored
- in that case).
- """
- self.not_empty.acquire()
- try:
- if not block:
- if self._empty():
- raise Empty
- elif timeout is None:
- while self._empty():
- self.not_empty.wait()
- else:
- if timeout < 0:
- raise ValueError("'timeout' must be a positive number")
- endtime = _time() + timeout
- while self._empty():
- remaining = endtime - _time()
- if remaining <= 0.0:
- raise Empty
- self.not_empty.wait(remaining)
- item = self._get()
- self.not_full.notify()
- return item
- finally:
- self.not_empty.release()
-
- def get_nowait(self):
- """Remove and return an item from the queue without blocking.
-
- Only get an item if one is immediately available. Otherwise
- raise the Empty exception.
- """
- return self.get(False)
-
- # Override these methods to implement other queue organizations
- # (e.g. stack or priority queue).
- # These will only be called with appropriate locks held
-
- # Initialize the queue representation
- def _init(self, maxsize):
- self.maxsize = maxsize
- self.queue = deque()
-
- def _qsize(self):
- return len(self.queue)
-
- # Check whether the queue is empty
- def _empty(self):
- return not self.queue
-
- # Check whether the queue is full
- def _full(self):
- return self.maxsize > 0 and len(self.queue) == self.maxsize
-
- # Put a new item in the queue
- def _put(self, item):
- self.queue.append(item)
-
- # Get an item from the queue
- def _get(self):
- return self.queue.popleft()
diff --git a/fbuild_old/lib/fbuild/compat/__init__.py b/fbuild_old/lib/fbuild/compat/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fbuild_old/lib/fbuild/compat/_subprocess.py b/fbuild_old/lib/fbuild/compat/_subprocess.py
deleted file mode 100644
index fa3d683..0000000
--- a/fbuild_old/lib/fbuild/compat/_subprocess.py
+++ /dev/null
@@ -1,1246 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# This module should remain compatible with Python 2.2, see PEP 291.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <ast...@lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-r"""subprocess - Subprocesses with accessible I/O streams
-
-This module allows you to spawn processes, connect to their
-input/output/error pipes, and obtain their return codes. This module
-intends to replace several other, older modules and functions, like:
-
-os.system
-os.spawn*
-os.popen*
-popen2.*
-commands.*
-
-Information about how the subprocess module can be used to replace these
-modules and functions can be found below.
-
-
-
-Using the subprocess module
-===========================
-This module defines one class called Popen:
-
-class Popen(args, bufsize=0, executable=None,
- stdin=None, stdout=None, stderr=None,
- preexec_fn=None, close_fds=False, shell=False,
- cwd=None, env=None, universal_newlines=False,
- startupinfo=None, creationflags=0):
-
-
-Arguments are:
-
-args should be a string, or a sequence of program arguments. The
-program to execute is normally the first item in the args sequence or
-string, but can be explicitly set by using the executable argument.
-
-On UNIX, with shell=False (default): In this case, the Popen class
-uses os.execvp() to execute the child program. args should normally
-be a sequence. A string will be treated as a sequence with the string
-as the only item (the program to execute).
-
-On UNIX, with shell=True: If args is a string, it specifies the
-command string to execute through the shell. If args is a sequence,
-the first item specifies the command string, and any additional items
-will be treated as additional shell arguments.
-
-On Windows: the Popen class uses CreateProcess() to execute the child
-program, which operates on strings. If args is a sequence, it will be
-converted to a string using the list2cmdline method. Please note that
-not all MS Windows applications interpret the command line the same
-way: The list2cmdline is designed for applications using the same
-rules as the MS C runtime.
-
-bufsize, if given, has the same meaning as the corresponding argument
-to the built-in open() function: 0 means unbuffered, 1 means line
-buffered, any other positive value means use a buffer of
-(approximately) that size. A negative bufsize means to use the system
-default, which usually means fully buffered. The default value for
-bufsize is 0 (unbuffered).
-
-stdin, stdout and stderr specify the executed programs' standard
-input, standard output and standard error file handles, respectively.
-Valid values are PIPE, an existing file descriptor (a positive
-integer), an existing file object, and None. PIPE indicates that a
-new pipe to the child should be created. With None, no redirection
-will occur; the child's file handles will be inherited from the
-parent. Additionally, stderr can be STDOUT, which indicates that the
-stderr data from the applications should be captured into the same
-file handle as for stdout.
-
-If preexec_fn is set to a callable object, this object will be called
-in the child process just before the child is executed.
-
-If close_fds is true, all file descriptors except 0, 1 and 2 will be
-closed before the child process is executed.
-
-if shell is true, the specified command will be executed through the
-shell.
-
-If cwd is not None, the current directory will be changed to cwd
-before the child is executed.
-
-If env is not None, it defines the environment variables for the new
-process.
-
-If universal_newlines is true, the file objects stdout and stderr are
-opened as a text files, but lines may be terminated by any of '\n',
-the Unix end-of-line convention, '\r', the Macintosh convention or
-'\r\n', the Windows convention. All of these external representations
-are seen as '\n' by the Python program. Note: This feature is only
-available if Python is built with universal newline support (the
-default). Also, the newlines attribute of the file objects stdout,
-stdin and stderr are not updated by the communicate() method.
-
-The startupinfo and creationflags, if given, will be passed to the
-underlying CreateProcess() function. They can specify things such as
-appearance of the main window and priority for the new process.
-(Windows only)
-
-
-This module also defines two shortcut functions:
-
-call(*popenargs, **kwargs):
- Run command with arguments. Wait for command to complete, then
- return the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- retcode = call(["ls", "-l"])
-
-check_call(*popenargs, **kwargs):
- Run command with arguments. Wait for command to complete. If the
- exit code was zero then return, otherwise raise
- CalledProcessError. The CalledProcessError object will have the
- return code in the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- check_call(["ls", "-l"])
-
-Exceptions
-----------
-Exceptions raised in the child process, before the new program has
-started to execute, will be re-raised in the parent. Additionally,
-the exception object will have one extra attribute called
-'child_traceback', which is a string containing traceback information
-from the childs point of view.
-
-The most common exception raised is OSError. This occurs, for
-example, when trying to execute a non-existent file. Applications
-should prepare for OSErrors.
-
-A ValueError will be raised if Popen is called with invalid arguments.
-
-check_call() will raise CalledProcessError, if the called process
-returns a non-zero return code.
-
-
-Security
---------
-Unlike some other popen functions, this implementation will never call
-/bin/sh implicitly. This means that all characters, including shell
-metacharacters, can safely be passed to child processes.
-
-
-Popen objects
-=============
-Instances of the Popen class have the following methods:
-
-poll()
- Check if child process has terminated. Returns returncode
- attribute.
-
-wait()
- Wait for child process to terminate. Returns returncode attribute.
-
-communicate(input=None)
- Interact with process: Send data to stdin. Read data from stdout
- and stderr, until end-of-file is reached. Wait for process to
- terminate. The optional input argument should be a string to be
- sent to the child process, or None, if no data should be sent to
- the child.
-
- communicate() returns a tuple (stdout, stderr).
-
- Note: The data read is buffered in memory, so do not use this
- method if the data size is large or unlimited.
-
-The following attributes are also available:
-
-stdin
- If the stdin argument is PIPE, this attribute is a file object
- that provides input to the child process. Otherwise, it is None.
-
-stdout
- If the stdout argument is PIPE, this attribute is a file object
- that provides output from the child process. Otherwise, it is
- None.
-
-stderr
- If the stderr argument is PIPE, this attribute is file object that
- provides error output from the child process. Otherwise, it is
- None.
-
-pid
- The process ID of the child process.
-
-returncode
- The child return code. A None value indicates that the process
- hasn't terminated yet. A negative value -N indicates that the
- child was terminated by signal N (UNIX only).
-
-
-Replacing older functions with the subprocess module
-====================================================
-In this section, "a ==> b" means that b can be used as a replacement
-for a.
-
-Note: All functions in this section fail (more or less) silently if
-the executed program cannot be found; this module raises an OSError
-exception.
-
-In the following examples, we assume that the subprocess module is
-imported with "from subprocess import *".
-
-
-Replacing /bin/sh shell backquote
----------------------------------
-output=`mycmd myarg`
-==>
-output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
-
-
-Replacing shell pipe line
--------------------------
-output=`dmesg | grep hda`
-==>
-p1 = Popen(["dmesg"], stdout=PIPE)
-p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
-output = p2.communicate()[0]
-
-
-Replacing os.system()
----------------------
-sts = os.system("mycmd" + " myarg")
-==>
-p = Popen("mycmd" + " myarg", shell=True)
-pid, sts = os.waitpid(p.pid, 0)
-
-Note:
-
-* Calling the program through the shell is usually not required.
-
-* It's easier to look at the returncode attribute than the
- exitstatus.
-
-A more real-world example would look like this:
-
-try:
- retcode = call("mycmd" + " myarg", shell=True)
- if retcode < 0:
- print >>sys.stderr, "Child was terminated by signal", -retcode
- else:
- print >>sys.stderr, "Child returned", retcode
-except OSError, e:
- print >>sys.stderr, "Execution failed:", e
-
-
-Replacing os.spawn*
--------------------
-P_NOWAIT example:
-
-pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
-==>
-pid = Popen(["/bin/mycmd", "myarg"]).pid
-
-
-P_WAIT example:
-
-retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
-==>
-retcode = call(["/bin/mycmd", "myarg"])
-
-
-Vector example:
-
-os.spawnvp(os.P_NOWAIT, path, args)
-==>
-Popen([path] + args[1:])
-
-
-Environment example:
-
-os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
-==>
-Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
-
-
-Replacing os.popen*
--------------------
-pipe = os.popen(cmd, mode='r', bufsize)
-==>
-pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout
-
-pipe = os.popen(cmd, mode='w', bufsize)
-==>
-pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin
-
-
-(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize)
-==>
-p = Popen(cmd, shell=True, bufsize=bufsize,
- stdin=PIPE, stdout=PIPE, close_fds=True)
-(child_stdin, child_stdout) = (p.stdin, p.stdout)
-
-
-(child_stdin,
- child_stdout,
- child_stderr) = os.popen3(cmd, mode, bufsize)
-==>
-p = Popen(cmd, shell=True, bufsize=bufsize,
- stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
-(child_stdin,
- child_stdout,
- child_stderr) = (p.stdin, p.stdout, p.stderr)
-
-
-(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize)
-==>
-p = Popen(cmd, shell=True, bufsize=bufsize,
- stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
-(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout)
-
-
-Replacing popen2.*
-------------------
-Note: If the cmd argument to popen2 functions is a string, the command
-is executed through /bin/sh. If it is a list, the command is directly
-executed.
-
-(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode)
-==>
-p = Popen(["somestring"], shell=True, bufsize=bufsize
- stdin=PIPE, stdout=PIPE, close_fds=True)
-(child_stdout, child_stdin) = (p.stdout, p.stdin)
-
-
-(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode)
-==>
-p = Popen(["mycmd", "myarg"], bufsize=bufsize,
- stdin=PIPE, stdout=PIPE, close_fds=True)
-(child_stdout, child_stdin) = (p.stdout, p.stdin)
-
-The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen,
-except that:
-
-* subprocess.Popen raises an exception if the execution fails
-* the capturestderr argument is replaced with the stderr argument.
-* stdin=PIPE and stdout=PIPE must be specified.
-* popen2 closes all filedescriptors by default, but you have to specify
- close_fds=True with subprocess.Popen.
-
-
-"""
-
-import sys
-mswindows = (sys.platform == "win32")
-
-import os
-import types
-import traceback
-
-# Exception classes used by this module.
-class CalledProcessError(Exception):
- """This exception is raised when a process run by check_call() returns
- a non-zero exit status. The exit status will be stored in the
- returncode attribute."""
- def __init__(self, returncode, cmd):
- self.returncode = returncode
- self.cmd = cmd
- def __str__(self):
- return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
-
-
-if mswindows:
- import threading
- import msvcrt
- if 0: # <-- change this to use pywin32 instead of the _subprocess driver
- import pywintypes
- from win32api import GetStdHandle, STD_INPUT_HANDLE, \
- STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
- from win32api import GetCurrentProcess, DuplicateHandle, \
- GetModuleFileName, GetVersion
- from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
- from win32pipe import CreatePipe
- from win32process import CreateProcess, STARTUPINFO, \
- GetExitCodeProcess, STARTF_USESTDHANDLES, \
- STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
- from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
- else:
- from _subprocess import *
- class STARTUPINFO:
- dwFlags = 0
- hStdInput = None
- hStdOutput = None
- hStdError = None
- wShowWindow = 0
- class pywintypes:
- error = IOError
-else:
- import select
- import errno
- import fcntl
- import pickle
-
-__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
-
-try:
- MAXFD = os.sysconf("SC_OPEN_MAX")
-except:
- MAXFD = 256
-
-# True/False does not exist on 2.2.0
-try:
- False
-except NameError:
- False = 0
- True = 1
-
-_active = []
-
-def _cleanup():
- for inst in _active[:]:
- if inst.poll(_deadstate=sys.maxint) >= 0:
- try:
- _active.remove(inst)
- except ValueError:
- # This can happen if two threads create a new Popen instance.
- # It's harmless that it was already removed, so ignore.
- pass
-
-PIPE = -1
-STDOUT = -2
-
-
-def call(*popenargs, **kwargs):
- """Run command with arguments. Wait for command to complete, then
- return the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- retcode = call(["ls", "-l"])
- """
- return Popen(*popenargs, **kwargs).wait()
-
-
-def check_call(*popenargs, **kwargs):
- """Run command with arguments. Wait for command to complete. If
- the exit code was zero then return, otherwise raise
- CalledProcessError. The CalledProcessError object will have the
- return code in the returncode attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- check_call(["ls", "-l"])
- """
- retcode = call(*popenargs, **kwargs)
- cmd = kwargs.get("args")
- if cmd is None:
- cmd = popenargs[0]
- if retcode:
- raise CalledProcessError(retcode, cmd)
- return retcode
-
-
-def list2cmdline(seq):
- """
- Translate a sequence of arguments into a command line
- string, using the same rules as the MS C runtime:
-
- 1) Arguments are delimited by white space, which is either a
- space or a tab.
-
- 2) A string surrounded by double quotation marks is
- interpreted as a single argument, regardless of white space
- contained within. A quoted string can be embedded in an
- argument.
-
- 3) A double quotation mark preceded by a backslash is
- interpreted as a literal double quotation mark.
-
- 4) Backslashes are interpreted literally, unless they
- immediately precede a double quotation mark.
-
- 5) If backslashes immediately precede a double quotation mark,
- every pair of backslashes is interpreted as a literal
- backslash. If the number of backslashes is odd, the last
- backslash escapes the next double quotation mark as
- described in rule 3.
- """
-
- # See
- # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
- result = []
- needquote = False
- for arg in seq:
- bs_buf = []
-
- # Add a space to separate this argument from the others
- if result:
- result.append(' ')
-
- needquote = (" " in arg) or ("\t" in arg) or arg == ""
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- # Don't know if we need to double yet.
- bs_buf.append(c)
- elif c == '"':
- # Double backspaces.
- result.append('\\' * len(bs_buf)*2)
- bs_buf = []
- result.append('\\"')
- else:
- # Normal char
- if bs_buf:
- result.extend(bs_buf)
- bs_buf = []
- result.append(c)
-
- # Add remaining backspaces, if any.
- if bs_buf:
- result.extend(bs_buf)
-
- if needquote:
- result.extend(bs_buf)
- result.append('"')
-
- return ''.join(result)
-
-
-class Popen(object):
- def __init__(self, args, bufsize=0, executable=None,
- stdin=None, stdout=None, stderr=None,
- preexec_fn=None, close_fds=False, shell=False,
- cwd=None, env=None, universal_newlines=False,
- startupinfo=None, creationflags=0):
- """Create new Popen instance."""
- _cleanup()
-
- self._child_created = False
- if not isinstance(bufsize, (int, long)):
- raise TypeError("bufsize must be an integer")
-
- if mswindows:
- if preexec_fn is not None:
- raise ValueError("preexec_fn is not supported on Windows "
- "platforms")
- if close_fds:
- raise ValueError("close_fds is not supported on Windows "
- "platforms")
- else:
- # POSIX
- if startupinfo is not None:
- raise ValueError("startupinfo is only supported on Windows "
- "platforms")
- if creationflags != 0:
- raise ValueError("creationflags is only supported on Windows "
- "platforms")
-
- self.stdin = None
- self.stdout = None
- self.stderr = None
- self.pid = None
- self.returncode = None
- self.universal_newlines = universal_newlines
-
- # Input and output objects. The general principle is like
- # this:
- #
- # Parent Child
- # ------ -----
- # p2cwrite ---stdin---> p2cread
- # c2pread <--stdout--- c2pwrite
- # errread <--stderr--- errwrite
- #
- # On POSIX, the child objects are file descriptors. On
- # Windows, these are Windows file handles. The parent objects
- # are file descriptors on both platforms. The parent objects
- # are None when not using PIPEs. The child objects are None
- # when not redirecting.
-
- (p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite) = self._get_handles(stdin, stdout, stderr)
-
- self._execute_child(args, executable, preexec_fn, close_fds,
- cwd, env, universal_newlines,
- startupinfo, creationflags, shell,
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
-
- # On Windows, you cannot just redirect one or two handles: You
- # either have to redirect all three or none. If the subprocess
- # user has only redirected one or two handles, we are
- # automatically creating PIPEs for the rest. We should close
- # these after the process is started. See bug #1124861.
- if mswindows:
- if stdin is None and p2cwrite is not None:
- os.close(p2cwrite)
- p2cwrite = None
- if stdout is None and c2pread is not None:
- os.close(c2pread)
- c2pread = None
- if stderr is None and errread is not None:
- os.close(errread)
- errread = None
-
- if p2cwrite:
- self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
- if c2pread:
- if universal_newlines:
- self.stdout = os.fdopen(c2pread, 'rU', bufsize)
- else:
- self.stdout = os.fdopen(c2pread, 'rb', bufsize)
- if errread:
- if universal_newlines:
- self.stderr = os.fdopen(errread, 'rU', bufsize)
- else:
- self.stderr = os.fdopen(errread, 'rb', bufsize)
-
-
- def _translate_newlines(self, data):
- data = data.replace("\r\n", "\n")
- data = data.replace("\r", "\n")
- return data
-
-
- def __del__(self):
- if not self._child_created:
- # We didn't get to successfully create a child process.
- return
- # In case the child hasn't been waited on, check if it's done.
- self.poll(_deadstate=sys.maxint)
- if self.returncode is None and _active is not None:
- # Child is still running, keep us alive until we can wait on it.
- _active.append(self)
-
-
- def communicate(self, input=None):
- """Interact with process: Send data to stdin. Read data from
- stdout and stderr, until end-of-file is reached. Wait for
- process to terminate. The optional input argument should be a
- string to be sent to the child process, or None, if no data
- should be sent to the child.
-
- communicate() returns a tuple (stdout, stderr)."""
-
- # Optimization: If we are only using one pipe, or no pipe at
- # all, using select() or threads is unnecessary.
- if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
- stdout = None
- stderr = None
- if self.stdin:
- if input:
- self.stdin.write(input)
- self.stdin.close()
- elif self.stdout:
- stdout = self.stdout.read()
- elif self.stderr:
- stderr = self.stderr.read()
- self.wait()
- return (stdout, stderr)
-
- return self._communicate(input)
-
-
- if mswindows:
- #
- # Windows methods
- #
- def _get_handles(self, stdin, stdout, stderr):
- """Construct and return tupel with IO objects:
- p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
- """
- if stdin is None and stdout is None and stderr is None:
- return (None, None, None, None, None, None)
-
- p2cread, p2cwrite = None, None
- c2pread, c2pwrite = None, None
- errread, errwrite = None, None
-
- if stdin is None:
- p2cread = GetStdHandle(STD_INPUT_HANDLE)
- if p2cread is not None:
- pass
- elif stdin is None or stdin == PIPE:
- p2cread, p2cwrite = CreatePipe(None, 0)
- # Detach and turn into fd
- p2cwrite = p2cwrite.Detach()
- p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
- elif isinstance(stdin, int):
- p2cread = msvcrt.get_osfhandle(stdin)
- else:
- # Assuming file-like object
- p2cread = msvcrt.get_osfhandle(stdin.fileno())
- p2cread = self._make_inheritable(p2cread)
-
- if stdout is None:
- c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
- if c2pwrite is not None:
- pass
- elif stdout is None or stdout == PIPE:
- c2pread, c2pwrite = CreatePipe(None, 0)
- # Detach and turn into fd
- c2pread = c2pread.Detach()
- c2pread = msvcrt.open_osfhandle(c2pread, 0)
- elif isinstance(stdout, int):
- c2pwrite = msvcrt.get_osfhandle(stdout)
- else:
- # Assuming file-like object
- c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
- c2pwrite = self._make_inheritable(c2pwrite)
-
- if stderr is None:
- errwrite = GetStdHandle(STD_ERROR_HANDLE)
- if errwrite is not None:
- pass
- elif stderr is None or stderr == PIPE:
- errread, errwrite = CreatePipe(None, 0)
- # Detach and turn into fd
- errread = errread.Detach()
- errread = msvcrt.open_osfhandle(errread, 0)
- elif stderr == STDOUT:
- errwrite = c2pwrite
- elif isinstance(stderr, int):
- errwrite = msvcrt.get_osfhandle(stderr)
- else:
- # Assuming file-like object
- errwrite = msvcrt.get_osfhandle(stderr.fileno())
- errwrite = self._make_inheritable(errwrite)
-
- return (p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
-
-
- def _make_inheritable(self, handle):
- """Return a duplicate of handle, which is inheritable"""
- return DuplicateHandle(GetCurrentProcess(), handle,
- GetCurrentProcess(), 0, 1,
- DUPLICATE_SAME_ACCESS)
-
-
- def _find_w9xpopen(self):
- """Find and return absolut path to w9xpopen.exe"""
- w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- # Eeek - file-not-found - possibly an embedding
- # situation - see if we can locate it in sys.exec_prefix
- w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
- "w9xpopen.exe")
- if not os.path.exists(w9xpopen):
- raise RuntimeError("Cannot locate w9xpopen.exe, which is "
- "needed for Popen to work with your "
- "shell or platform.")
- return w9xpopen
-
-
- def _execute_child(self, args, executable, preexec_fn, close_fds,
- cwd, env, universal_newlines,
- startupinfo, creationflags, shell,
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite):
- """Execute program (MS Windows version)"""
-
- if not isinstance(args, types.StringTypes):
- args = list2cmdline(args)
-
- # Process startup details
- if startupinfo is None:
- startupinfo = STARTUPINFO()
- if None not in (p2cread, c2pwrite, errwrite):
- startupinfo.dwFlags |= STARTF_USESTDHANDLES
- startupinfo.hStdInput = p2cread
- startupinfo.hStdOutput = c2pwrite
- startupinfo.hStdError = errwrite
-
- if shell:
- startupinfo.dwFlags |= STARTF_USESHOWWINDOW
- startupinfo.wShowWindow = SW_HIDE
- comspec = os.environ.get("COMSPEC", "cmd.exe")
- args = comspec + " /c " + args
- if (GetVersion() >= 0x80000000L or
- os.path.basename(comspec).lower() == "command.com"):
- # Win9x, or using command.com on NT. We need to
- # use the w9xpopen intermediate program. For more
- # information, see KB Q150956
- # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
- w9xpopen = self._find_w9xpopen()
- args = '"%s" %s' % (w9xpopen, args)
- # Not passing CREATE_NEW_CONSOLE has been known to
- # cause random failures on win9x. Specifically a
- # dialog: "Your program accessed mem currently in
- # use at xxx" and a hopeful warning about the
- # stability of your system. Cost is Ctrl+C wont
- # kill children.
- creationflags |= CREATE_NEW_CONSOLE
-
- # Start the process
- try:
- hp, ht, pid, tid = CreateProcess(executable, args,
- # no special security
- None, None,
- # must inherit handles to pass std
- # handles
- 1,
- creationflags,
- env,
- cwd,
- startupinfo)
- except pywintypes.error, e:
- # Translate pywintypes.error to WindowsError, which is
- # a subclass of OSError. FIXME: We should really
- # translate errno using _sys_errlist (or simliar), but
- # how can this be done from Python?
- raise WindowsError(*e.args)
-
- # Retain the process handle, but close the thread handle
- self._child_created = True
- self._handle = hp
- self.pid = pid
- ht.Close()
-
- # Child is launched. Close the parent's copy of those pipe
- # handles that only the child should have open. You need
- # to make sure that no handles to the write end of the
- # output pipe are maintained in this process or else the
- # pipe will not close when the child process exits and the
- # ReadFile will hang.
- if p2cread is not None:
- p2cread.Close()
- if c2pwrite is not None:
- c2pwrite.Close()
- if errwrite is not None:
- errwrite.Close()
-
-
- def poll(self, _deadstate=None):
- """Check if child process has terminated. Returns returncode
- attribute."""
- if self.returncode is None:
- if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
- self.returncode = GetExitCodeProcess(self._handle)
- return self.returncode
-
-
- def wait(self):
- """Wait for child process to terminate. Returns returncode
- attribute."""
- if self.returncode is None:
- obj = WaitForSingleObject(self._handle, INFINITE)
- self.returncode = GetExitCodeProcess(self._handle)
- return self.returncode
-
-
- def _readerthread(self, fh, buffer):
- buffer.append(fh.read())
-
-
- def _communicate(self, input):
- stdout = None # Return
- stderr = None # Return
-
- if self.stdout:
- stdout = []
- stdout_thread = threading.Thread(target=self._readerthread,
- args=(self.stdout, stdout))
- stdout_thread.setDaemon(True)
- stdout_thread.start()
- if self.stderr:
- stderr = []
- stderr_thread = threading.Thread(target=self._readerthread,
- args=(self.stderr, stderr))
- stderr_thread.setDaemon(True)
- stderr_thread.start()
-
- if self.stdin:
- if input is not None:
- self.stdin.write(input)
- self.stdin.close()
-
- if self.stdout:
- stdout_thread.join()
- if self.stderr:
- stderr_thread.join()
-
- # All data exchanged. Translate lists into strings.
- if stdout is not None:
- stdout = stdout[0]
- if stderr is not None:
- stderr = stderr[0]
-
- # Translate newlines, if requested. We cannot let the file
- # object do the translation: It is based on stdio, which is
- # impossible to combine with select (unless forcing no
- # buffering).
- if self.universal_newlines and hasattr(file, 'newlines'):
- if stdout:
- stdout = self._translate_newlines(stdout)
- if stderr:
- stderr = self._translate_newlines(stderr)
-
- self.wait()
- return (stdout, stderr)
-
- else:
- #
- # POSIX methods
- #
- def _get_handles(self, stdin, stdout, stderr):
- """Construct and return tupel with IO objects:
- p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
- """
- p2cread, p2cwrite = None, None
- c2pread, c2pwrite = None, None
- errread, errwrite = None, None
-
- if stdin is None:
- pass
- elif stdin == PIPE:
- p2cread, p2cwrite = os.pipe()
- elif isinstance(stdin, int):
- p2cread = stdin
- else:
- # Assuming file-like object
- p2cread = stdin.fileno()
-
- if stdout is None:
- pass
- elif stdout == PIPE:
- c2pread, c2pwrite = os.pipe()
- elif isinstance(stdout, int):
- c2pwrite = stdout
- else:
- # Assuming file-like object
- c2pwrite = stdout.fileno()
-
- if stderr is None:
- pass
- elif stderr == PIPE:
- errread, errwrite = os.pipe()
- elif stderr == STDOUT:
- errwrite = c2pwrite
- elif isinstance(stderr, int):
- errwrite = stderr
- else:
- # Assuming file-like object
- errwrite = stderr.fileno()
-
- return (p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
-
-
- def _set_cloexec_flag(self, fd):
- try:
- cloexec_flag = fcntl.FD_CLOEXEC
- except AttributeError:
- cloexec_flag = 1
-
- old = fcntl.fcntl(fd, fcntl.F_GETFD)
- fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
-
-
- def _close_fds(self, but):
- for i in xrange(3, MAXFD):
- if i == but:
- continue
- try:
- os.close(i)
- except:
- pass
-
-
- def _execute_child(self, args, executable, preexec_fn, close_fds,
- cwd, env, universal_newlines,
- startupinfo, creationflags, shell,
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite):
- """Execute program (POSIX version)"""
-
- if isinstance(args, types.StringTypes):
- args = [args]
- else:
- args = list(args)
-
- if shell:
- args = ["/bin/sh", "-c"] + args
-
- if executable is None:
- executable = args[0]
-
- # For transferring possible exec failure from child to parent
- # The first char specifies the exception type: 0 means
- # OSError, 1 means some other error.
- errpipe_read, errpipe_write = os.pipe()
- self._set_cloexec_flag(errpipe_write)
-
- self.pid = os.fork()
- self._child_created = True
- if self.pid == 0:
- # Child
- try:
- # Close parent's pipe ends
- if p2cwrite:
- os.close(p2cwrite)
- if c2pread:
- os.close(c2pread)
- if errread:
- os.close(errread)
- os.close(errpipe_read)
-
- # Dup fds for child
- if p2cread:
- os.dup2(p2cread, 0)
- if c2pwrite:
- os.dup2(c2pwrite, 1)
- if errwrite:
- os.dup2(errwrite, 2)
-
- # Close pipe fds. Make sure we don't close the same
- # fd more than once, or standard fds.
- if p2cread and p2cread not in (0,):
- os.close(p2cread)
- if c2pwrite and c2pwrite not in (p2cread, 1):
- os.close(c2pwrite)
- if errwrite and errwrite not in (p2cread, c2pwrite, 2):
- os.close(errwrite)
-
- # Close all other fds, if asked for
- if close_fds:
- self._close_fds(but=errpipe_write)
-
- if cwd is not None:
- os.chdir(cwd)
-
- if preexec_fn:
- apply(preexec_fn)
-
- if env is None:
- os.execvp(executable, args)
- else:
- os.execvpe(executable, args, env)
-
- except:
- exc_type, exc_value, tb = sys.exc_info()
- # Save the traceback and attach it to the exception object
- exc_lines = traceback.format_exception(exc_type,
- exc_value,
- tb)
- exc_value.child_traceback = ''.join(exc_lines)
- os.write(errpipe_write, pickle.dumps(exc_value))
-
- # This exitcode won't be reported to applications, so it
- # really doesn't matter what we return.
- os._exit(255)
-
- # Parent
- os.close(errpipe_write)
- if p2cread and p2cwrite:
- os.close(p2cread)
- if c2pwrite and c2pread:
- os.close(c2pwrite)
- if errwrite and errread:
- os.close(errwrite)
-
- # Wait for exec to fail or succeed; possibly raising exception
- data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB
- os.close(errpipe_read)
- if data != "":
- os.waitpid(self.pid, 0)
- child_exception = pickle.loads(data)
- raise child_exception
-
-
- def _handle_exitstatus(self, sts):
- if os.WIFSIGNALED(sts):
- self.returncode = -os.WTERMSIG(sts)
- elif os.WIFEXITED(sts):
- self.returncode = os.WEXITSTATUS(sts)
- else:
- # Should never happen
- raise RuntimeError("Unknown child exit status!")
-
-
- def poll(self, _deadstate=None):
- """Check if child process has terminated. Returns returncode
- attribute."""
- if self.returncode is None:
- try:
- pid, sts = os.waitpid(self.pid, os.WNOHANG)
- if pid == self.pid:
- self._handle_exitstatus(sts)
- except os.error:
- if _deadstate is not None:
- self.returncode = _deadstate
- return self.returncode
-
-
- def wait(self):
- """Wait for child process to terminate. Returns returncode
- attribute."""
- if self.returncode is None:
- pid, sts = os.waitpid(self.pid, 0)
- self._handle_exitstatus(sts)
- return self.returncode
-
-
- def _communicate(self, input):
- read_set = []
- write_set = []
- stdout = None # Return
- stderr = None # Return
-
- if self.stdin:
- # Flush stdio buffer. This might block, if the user has
- # been writing to .stdin in an uncontrolled fashion.
- self.stdin.flush()
- if input:
- write_set.append(self.stdin)
- else:
- self.stdin.close()
- if self.stdout:
- read_set.append(self.stdout)
- stdout = []
- if self.stderr:
- read_set.append(self.stderr)
- stderr = []
-
- input_offset = 0
- while read_set or write_set:
- rlist, wlist, xlist = select.select(read_set, write_set, [])
-
- if self.stdin in wlist:
- # When select has indicated that the file is writable,
- # we can write up to PIPE_BUF bytes without risk
- # blocking. POSIX defines PIPE_BUF >= 512
- bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
- input_offset += bytes_written
- if input_offset >= len(input):
- self.stdin.close()
- write_set.remove(self.stdin)
-
- if self.stdout in rlist:
- data = os.read(self.stdout.fileno(), 1024)
- if data == "":
- self.stdout.close()
- read_set.remove(self.stdout)
- stdout.append(data)
-
- if self.stderr in rlist:
- data = os.read(self.stderr.fileno(), 1024)
- if data == "":
- self.stderr.close()
- read_set.remove(self.stderr)
- stderr.append(data)
-
- # All data exchanged. Translate lists into strings.
- if stdout is not None:
- stdout = ''.join(stdout)
- if stderr is not None:
- stderr = ''.join(stderr)
-
- # Translate newlines, if requested. We cannot let the file
- # object do the translation: It is based on stdio, which is
- # impossible to combine with select (unless forcing no
- # buffering).
- if self.universal_newlines and hasattr(file, 'newlines'):
- if stdout:
- stdout = self._translate_newlines(stdout)
- if stderr:
- stderr = self._translate_newlines(stderr)
-
- self.wait()
- return (stdout, stderr)
-
-
-def _demo_posix():
- #
- # Example 1: Simple redirection: Get process list
- #
- plist = Popen(["ps"], stdout=PIPE).communicate()[0]
- print "Process list:"
- print plist
-
- #
- # Example 2: Change uid before executing child
- #
- if os.getuid() == 0:
- p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
- p.wait()
-
- #
- # Example 3: Connecting several subprocesses
- #
- print "Looking for 'hda'..."
- p1 = Popen(["dmesg"], stdout=PIPE)
- p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
- print repr(p2.communicate()[0])
-
- #
- # Example 4: Catch execution error
- #
- print
- print "Trying a weird file..."
- try:
- print Popen(["/this/path/does/not/exist"]).communicate()
- except OSError, e:
- if e.errno == errno.ENOENT:
- print "The file didn't exist. I thought so..."
- print "Child traceback:"
- print e.child_traceback
- else:
- print "Error", e.errno
- else:
- print >>sys.stderr, "Gosh. No error."
-
-
-def _demo_windows():
- #
- # Example 1: Connecting several subprocesses
- #
- print "Looking for 'PROMPT' in set output..."
- p1 = Popen("set", stdout=PIPE, shell=True)
- p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
- print repr(p2.communicate()[0])
-
- #
- # Example 2: Simple execution of program
- #
- print "Executing calc..."
- p = Popen("calc")
- p.wait()
-
-
-if __name__ == "__main__":
- if mswindows:
- _demo_windows()
- else:
- _demo_posix()
diff --git a/fbuild_old/lib/fbuild/flxbuild/__init__.py b/fbuild_old/lib/fbuild/flxbuild/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fbuild_old/lib/fbuild/flxbuild/c_base.py b/fbuild_old/lib/fbuild/flxbuild/c_base.py
deleted file mode 100644
index f119a57..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/c_base.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-
-from fbuild.flxbuild.c_cxx_base import c_cxx_base
-
-class c_base(c_cxx_base):
- def __init__(self, *args, **kwds):
- super(c_base, self).__init__(*args, **kwds)
-
- self.options.EXT_SRC_MAIN = ".c"
- self.options.EXT_SRC_LIB = ".c"
-
- def report_isnan(self):
- opt = self.options
-
- if opt.HAVE_C99_ISNAN_IN_MATH:
- print "C99 NaN Support in : <math.h>"
- elif opt.HAVE_BSD_ISNAN_IN_MATH:
- print "BSD NaN Support in : <math.h>"
- elif opt.HAVE_BSD_ISNAN_IN_IEEEFP:
- print "BSD NaN Support in : <ieeefp.h>"
- else:
- print "NaN : EMULATED"
-
- if opt.HAVE_C99_ISINF_IN_MATH:
- print "C99 INF Support in : <math.h>"
- elif opt.HAVE_BSD_ISINF_IN_MATH:
- print "BSD INF Support in : <math.h>"
- elif opt.HAVE_BSD_ISINF_IN_IEEEFP:
- print "BSD INF Support in : <ieeefp.h>"
- else:
- print "INF : EMULATED"
diff --git a/fbuild_old/lib/fbuild/flxbuild/c_cxx_base.py b/fbuild_old/lib/fbuild/flxbuild/c_cxx_base.py
deleted file mode 100644
index 7881196..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/c_cxx_base.py
+++ /dev/null
@@ -1,1180 +0,0 @@
-import os
-import sys
-
-from fbuild.flxbuild.flxutil import mkdirs, ExecutionError
-from fbuild.flxbuild.compiler_base import compiler_base
-
-class c_cxx_base(compiler_base):
- DEFAULT_COM = None
- DEFAULT_AR = None
- DEFAULT_RANLIB = None
-
- def set_options(self,
- COM=None,
- AR=None,
- RANLIB=None,
- build="posix",
- model="detect",
- use="generic",
- include_paths=None,
- lib_paths=None,
- **kwds):
- super(c_cxx_base, self).set_options(**kwds)
-
- opt = self.options
-
- # RF: this looks like in most cases it replaces None with None...
- if COM is None: COM = self.DEFAULT_COM
- if AR is None: AR = self.DEFAULT_AR
- if RANLIB is None: RANLIB = self.DEFAULT_RANLIB
- if include_paths is None: include_paths = []
- if lib_paths is None: lib_paths = []
-
- assert COM, 'need to specify a compiler'
- assert AR, 'need to specify a static library linker'
-
- opt.COM = COM
- opt.AR = AR
- opt.RANLIB = RANLIB
- opt.build = build
- opt.model = model
- opt.use = use
- opt.include_paths = include_paths
- opt.lib_paths = lib_paths
-
- # defaults for gcc specific stuff
- opt.HAVE_GNU_X86 = False
- opt.HAVE_GNU_X86_64 = False
- opt.USE_REGPARM3 = False
- opt.HAVE_CGOTO = False
- opt.HAVE_ASM_LABELS = False
- opt.HAVE_STL_GNU_CXX = False
- opt.HAVE_GNU_BUILTIN_EXPECT = False
-
- opt.HAVE_PIC = False
- opt.HAVE_STATIC_OPENMP = False
- opt.HAVE_SHARED_OPENMP = False
-
- print "COM =", opt.COM
-
- ########
-
- # RF: would be nice to know which kind of source was being written
- def write_src(self, data, basename=os.path.join('tmp', 'tmp')):
- mkdirs(os.path.dirname(basename))
-
- f = open(basename + self.options.EXT_SRC_MAIN, "w")
- try:
- f.write(data + '\n')
- finally:
- f.close()
-
- return basename
-
- # RF: temporarily added this to get the dll building config tests working.
- # previously write_src was creating .cpp files and the build_dll code
- # was looking for .cxx. This is a work around.
- def write_lib_src(self, data, basename=os.path.join('tmp', 'tmp')):
- mkdirs(os.path.dirname(basename))
-
- f = open(basename + self.options.EXT_SRC_LIB, "w")
- try:
- f.write(data + '\n')
- finally:
- f.close()
-
- return basename
-
- ########
-
- def compile_thing(self, COM, EXT_SRC, EXT_DST, basenames,
- outdir='',
- include_paths=[],
- macros=[],
- optimise=False,
- debug=False,
- CFLAGS=None,
- log=None,
- ):
- opt = self.options
-
- objects = []
- for basename in basenames:
- src = self.find_in_src_dir(basename + EXT_SRC)
- dst = os.path.join(outdir, basename + EXT_DST)
- mkdirs(os.path.dirname(dst))
-
- # compile for a dll: position independent code etc
- cmd = [COM]
-
- # debug symbols
- if debug: cmd.append(opt.DEBUG_FLAGS)
-
- # optimisation
- if optimise: cmd.append(opt.OPTIMISE)
-
- # include path
- for i in include_paths: cmd.extend((opt.SPEC_INCLUDE, i))
- for i in opt.include_paths: cmd.extend((opt.SPEC_INCLUDE, i))
-
- # output file
- cmd.append(opt.SPEC_OBJ_FILENAME + dst)
-
- #macros
- for i in macros: cmd.append(opt.SPEC_DEFINE + i)
-
- if opt.use == "host": cmd.append(opt.SPEC_DEFINE + "HOST_BUILD")
- elif opt.use == "target": cmd.append(opt.SPEC_DEFINE + "TARGET_BUILD")
-
- if CFLAGS: cmd.append(CFLAGS)
-
- #input file
- cmd.append(src)
-
- # compile it
- self.shell(*cmd, **dict(log=log))
-
- objects.append(dst)
-
- return objects
-
- ####
-
- def compile_static_thing(self, COM, EXT_SRC, *args, **kwds):
- if self.options.HAVE_STATIC_OPENMP: COM += ' ' + self.options.OPENMP
-
- return self.compile_thing(COM, EXT_SRC, self.options.EXT_STATIC_OBJ,
- *args, **kwds)
-
-
- # compile a file to an object suitable for inclusion in a static link
- # version of the RTL
- def compile_static_rtl(self, *args, **kwds):
- opt = self.options
- COM = opt.__dict__.get('CCOBJ_STATIC_RTL', opt.COM + ' ' + opt.SPEC_COMPILE_OBJ)
- return self.compile_static_thing(COM, opt.EXT_SRC_LIB, *args, **kwds)
-
-
- def compile_static_main(self, *args, **kwds):
- opt = self.options
- COM = opt.__dict__.get('CCOBJ_STATIC_MAIN', opt.COM + ' ' + opt.SPEC_COMPILE_OBJ)
- return self.compile_static_thing(COM, opt.EXT_SRC_MAIN, *args, **kwds)
-
-
- def compile_felix_static(self, *args, **kwds):
- opt = self.options
- COM = opt.__dict__.get('CCOBJ_STATIC_FLX', opt.COM + ' ' + opt.SPEC_COMPILE_OBJ)
- return self.compile_static_thing(COM, opt.EXT_SRC_LIB, *args, **kwds)
-
- ####
-
- def compile_shared_thing(self, COM, EXT_SRC, *args, **kwds):
- if self.options.HAVE_PIC: COM += ' ' + self.options.PIC
-
- # JS: openmp support now detected, and the right compiler option applied
- # Windows prof. ed. 2005 supports openMP for dynamic link only
- # Express doesn't support openMP
- # Xbox also supports static link, but we don't handle that here
- #
- # So far, we don't link the right library.. can't see at present how
- # to get the library in, and not sure that /openmp doesn't link it
- # automatically.
- # It's called vcomp.lib/vcomp.dll.
- # this has to specified with LDFLAGS to go after the 'link' directive
- # assuming the /openmp switch doesn't force the right linkage
-
- if self.options.HAVE_SHARED_OPENMP: COM += ' ' + self.options.OPENMP
-
- # add shared flag, then execute
- return self.compile_thing(COM, EXT_SRC, self.options.EXT_SHARED_OBJ,
- *args, **kwds)
-
-
- # compile a file to an object suitable for inclusion in a mainline
- # which links to shared libraries
- def compile_shared_rtl(self, *args, **kwds):
- opt = self.options
- COM = opt.__dict__.get('CCOBJ_DYNAMIC_RTL', opt.COM + ' ' + opt.SPEC_COMPILE_OBJ)
- return self.compile_shared_thing(COM, opt.EXT_SRC_LIB, *args, **kwds)
-
-
- def compile_shared_main(self, *args, **kwds):
- opt = self.options
- COM = opt.__dict__.get('CCOBJ_DYNAMIC_MAIN', opt.COM + ' ' + opt.SPEC_COMPILE_OBJ)
- return self.compile_shared_thing(COM, opt.EXT_SRC_MAIN, *args, **kwds)
-
-
- def compile_felix_dll(self, *args, **kwds):
- opt = self.options
- COM = opt.__dict__.get('CCOBJ_DYNAMIC_FLX', opt.COM + ' ' + opt.SPEC_COMPILE_OBJ)
- return self.compile_shared_thing(COM, opt.EXT_SRC_LIB, *args, **kwds)
-
- ########
-
- def link_thing(self, LINK, EXT_DST, objects, outfile,
- lib_paths=[],
- libs=[],
- LDFLAGS="",
- log=None,
- ):
- dst = outfile + EXT_DST
-
- opt = self.options
- cmd = [LINK]
- cmd.append(opt.SPEC_EXE_FILENAME + dst)
- cmd.extend(objects)
-
- # RF: this is a hack to make sure that /link come before all link flags
- # under visual studio. I tried making sure that LDFLAGS had /link first
- # and moved it to just after the obj files, but that broke the gcc build
- # as some folk pass the libs in "libs" and others pass them directly
- # as LDFLAGS args. So much for encapsulation, eh?
- if opt.PRE_LINK_FLAGS: cmd.append(opt.PRE_LINK_FLAGS)
-
- for i in lib_paths: cmd.append(opt.SPEC_LIBPATH + i)
- for i in opt.lib_paths: cmd.append(opt.SPEC_LIBPATH + i)
-
- for i in libs:
- if opt.POSIX and i.startswith('lib'):
- i = i[3:]
-
- cmd.append(opt.SPEC_LIB + i)
-
- cmd.append(LDFLAGS)
-
- mkdirs(os.path.dirname(outfile))
-
- self.shell(*cmd, **dict(log=log))
-
- return dst
-
- ####
-
- def link_static_thing(self, COM, EXT_DST, *args, **kwds):
- return self.link_thing(COM, EXT_DST, *args, **kwds)
-
-
- def link_static_rtl(self, objects, outfile,
- lib_paths=[],
- libs=[],
- LDFLAGS="",
- log=None):
- opt = self.options
-
- filename = outfile + opt.EXT_LIB
-
- mkdirs(os.path.dirname(outfile))
-
- self.shell(opt.AR, opt.SPEC_AR_OUT_FILENAME + filename,
- *objects, **dict(log=log))
-
- if opt.RANLIB:
- self.shell(opt.RANLIB, filename, log=log)
-
- return filename
-
-
- def link_static_program(self, *args, **kwds):
- opt = self.options
- return self.link_static_thing(opt.CCLINK_STATIC, opt.EXT_EXE, *args, **kwds)
-
- ####
-
- def link_shared_thing(self, COM, EXT_DST, *args, **kwds):
- return self.link_thing(COM, EXT_DST, *args, **kwds)
-
-
- def link_shared_rtl(self, *args, **kwds):
- opt = self.options
- return self.link_shared_thing(opt.CCLINK_DYNAMIC_RTL, opt.EXT_DYLIB,
- *args, **kwds)
-
-
- def link_shared_dll(self, *args, **kwds):
- opt = self.options
- return self.link_shared_thing(opt.CCLINK_DYNAMIC_FLX, opt.EXT_SHLIB,
- *args, **kwds)
-
-
- def link_shared_program(self, *args, **kwds):
- opt = self.options
- return self.link_shared_thing(opt.CCLINK_DYNAMIC_MAIN, opt.EXT_EXE,
- *args, **kwds)
-
- ########
-
- def build_thing(self, compile_fn, link_fn, basenames,
- outfile=None,
- outdir='',
- objects=[],
- include_paths=[],
- macros=[],
- optimise=False,
- debug=False,
- CFLAGS="",
- lib_paths=[],
- libs=[],
- LDFLAGS="",
- log=None,
- ):
- if type(basenames) != type([]):
- basenames = [basenames]
-
- assert basenames
-
- objects = objects + compile_fn(basenames,
- outdir=outdir,
- include_paths=include_paths,
- macros=macros,
- optimise=optimise,
- debug=debug,
- CFLAGS=CFLAGS,
- log=log,
- )
-
- if outfile is None:
- outfile = basenames[0]
-
- return link_fn(objects, outfile,
- lib_paths=lib_paths,
- libs=libs,
- LDFLAGS=LDFLAGS,
- log=log,
- )
-
-
- def build_static_rtl(self, *args, **kwds):
- return self.build_thing(self.compile_static_rtl, self.link_static_rtl,
- *args, **kwds)
-
- def build_felix_static(self, *args, **kwds):
- return self.build_thing(self.compile_felix_static, self.link_static_program,
- *args, **kwds)
-
- def build_static_program(self, *args, **kwds):
- return self.build_thing(self.compile_static_main, self.link_static_program,
- *args, **kwds)
-
- def build_shared_rtl(self, *args, **kwds):
- return self.build_thing(self.compile_shared_rtl, self.link_shared_rtl,
- *args, **kwds)
-
- def build_shared_dll(self, *args, **kwds):
- return self.build_thing(self.compile_felix_dll, self.link_shared_dll,
- *args, **kwds)
-
- def build_shared_program(self, *args, **kwds):
- return self.build_thing(self.compile_shared_main, self.link_shared_program,
- *args, **kwds)
-
- ########
-
- def run_static_program(self, *args, **kwds):
- filename = self.build_static_program(*args, **kwds)
- return self.shell(filename)
-
- def run_shared_program(self, *args, **kwds):
- filename = self.build_shared_program(*args, **kwds)
- return self.shell(filename)
-
- ########
-
- def run_static_string_program(self, data, basename, *args, **kwds):
- basename = self.write_src(data, basename)
- return self.run_static_program(basename, *args, **kwds)
-
-
- def run_shared_string_program(self, data, basename, *args, **kwds):
- basename = self.write_src(data, basename)
- return self.run_shared_program(basename, *args, **kwds)
-
- ########
-
- def build_string_program(self, data, basename='tmp', **kwds):
- filename = self.write_src(data, basename)
- return self.build_static_program(filename, **kwds)
-
- ####
-
- def compile_dummy_main(self, **kwds):
- basename = 'tmp' + os.sep + 'dummy'
- filename = self.write_src(
- 'int main(int argc, char** argv) { return 0; }', basename)
- return self.compile_static_main([filename], **kwds)
-
-
- def compile_dummy_lib_program(self, **kwds):
- basename = 'tmp' + os.sep + 'dummy_lib'
- # RF: This can be compiled as both c and c++ these days it seems
- # hence conditional extern "C"
- # P.S. This lovingly hand crafted function doesn't seem to be called
- sys.exit(1234) # let's see!
- proggy = """
-#ifdef __cplusplus
-extern "C"
-#endif
-int fred(int argc, char** argv) { return 0; }
-"""
-
- filename = self.write_src(proggy, basename)
-
- return self.build_static_rtl([filename], **kwds)
-
- ########
-
- def check_macro_defined(self, macro, header=''):
- if header:
- header = '#include <%s>' % header
-
- filename = "tmp" + os.sep + "mchk"
- self.write_src("""
-%s
-#ifndef %s
-#error %s
-#endif
-int main(int argc, char** argv) {return 0;}
-""" % (header, macro, macro), filename)
-
- try:
- self.compile_static_main([filename])
- except ExecutionError:
- if header:
- print "NOT defined %s in %s" % (macro, header)
- else:
- print "NOT defined", macro
- return False
- else:
- if header:
- print "#defined %s in %s" % (macro, header)
- else:
- print "#defined", macro
- return True
-
-
- def check_header_exists(self, name):
- basename = "tmp" + os.sep + "hchk"
- filename = self.write_src('#include <%s>\nint main(int argc, char** argv) { return 0; }' % name, basename)
- try:
- self.compile_static_main([filename])
- print "#include <%s>" % name
- return True
- except ExecutionError:
- print "NO HEADER <%s>" % name
- return False
-
- ########
-
- def get_type_size(self, typedef, header=''):
- if header:
- header = '#include <%s>' % header
-
- if typedef[:7] != 'typedef':
- t = 'typedef %s t;' % typedef
- else:
- t = typedef
-
- filename = "tmp" + os.sep + "type_size"
- self.write_src("""
-#include <stddef.h>
-#include <stdio.h>
-%s
-
-%s
-
-int main(int argc, char** argv) {
- printf("%%d\\n",(int)sizeof(t));
- return 0;
-}
-""" % (header, t), filename)
-
- try:
- lines = self.run_static_program(filename)
- except ExecutionError:
- return None
-
- size = int(lines[0])
- return size
-
-
- def get_type_align(self, typedef, header=''):
- """the typedef defines alias 't' for the type"""
- if header:
- header = '#include <%s>' % header
-
- if typedef[:7] != 'typedef':
- t = 'typedef %s t;' % typedef
- else:
- t = typedef
-
- filename = "tmp" + os.sep + "type_align"
- self.write_src("""
-#include <stddef.h>
-#include <stdio.h>
-%s
-
-%s
-
-struct TEST {
- char c;
- t mem;
-};
-
-int main(int argc, char** argv) {
- printf("%%d\\n",(int)offsetof(struct TEST,mem));
- return 0;
-}
-""" % (header, t), filename)
-
- try:
- lines = self.run_static_program(filename)
- except ExecutionError:
- return None
-
- align = int(lines[0])
- print '%s: align: %s' % (typedef, align)
- return align
-
-
- def get_type_size_sign(self, typedef1, typedef2=None, header=''):
- if header:
- header = '#include <%s>' % header
-
- if typedef2 is None:
- typedef2 = typedef1
- expr1 = '(t1)0'
- expr2 = '(t1)~3 < ' + expr1
- else:
- expr1 = '(t1)0 + (t2)0'
- expr2 = '(t1)~3 + (t2)1 < ' + expr1
-
- filename = "tmp" + os.sep + "type_size_sign"
- self.write_src("""
-#include <stddef.h>
-#include <stdio.h>
-%s
-
-typedef %s t1;
-typedef %s t2;
-
-int main(int argc, char** argv) {
- printf("%%d\\n",(int)sizeof(%s));
- printf("%%d\\n", %s);
-
- return 0;
-}
-""" % (header, typedef1, typedef2, expr1, expr2), filename)
-
- try:
- lines = self.run_static_program(filename)
- except ExecutionError:
- return None, None
- size = int(lines[0])
- sign = int(lines[1])
- if typedef1 == typedef2:
- print '(%s)0: sign: %s' % (typedef1, sign)
- else:
- print '(%s)0 + (%s)0: sign: %s' % (typedef1, typedef2, sign)
-
- return size, sign
-
- ########
-
- native_int_types = [ \
- ('SCHAR', 'signed char'),
- ('UCHAR', 'unsigned char'),
- ('CHAR', 'char'),
- ('SHORT', 'short'),
- ('USHORT', 'unsigned short'),
- ('INT', 'int'),
- ('UINT', 'unsigned int'),
- ('LONG', 'long'),
- ('ULONG', 'unsigned long'),
- ('LONGLONG', 'long long'),
- ('ULONGLONG', 'unsigned long long'),
- ('BOOL', 'bool'),
- ]
-
-
- def detect_type_data(self, m, t, header=''):
- size = self.get_type_size(t, header)
-
- opt = self.options
-
- if size is None:
- if header:
- print "NO TYPE %s in <%s>" % (t, header)
- else:
- print "NO TYPE %s" % t
-
- opt.__dict__['HAVE_' + m] = False
- else:
- print '%s: size: %s' % (t, size)
-
- align = self.get_type_align(t, header)
-
- opt.__dict__['HAVE_' + m] = True
- opt.__dict__['SIZEOF_' + m] = size
- opt.__dict__['ALIGNOF_' + m] = align
-
-
- def detect_int_data(self):
- opt = self.options
-
- # find if we have stdint.h
- opt.HAVE_STDINT = self.check_header_exists("stdint.h")
-
- opt.sizesign2type = {}
-
- for m, t in self.native_int_types:
- self.detect_type_data(m, t)
- sizesign = self.get_type_size_sign(t)
-
- opt.sizesign2type[sizesign] = opt.sizesign2type.get(sizesign, t)
-
-
- def find_alias(self, t1, t2=None, header=''):
- size, sign = self.get_type_size_sign(t1, t2, header)
- if size is None:
- return None
- return self.options.sizesign2type[size, sign]
-
-
- def detect_aliases(self):
- opt = self.options
- opt.arith_conv = {}
-
- for m1, t1 in self.native_int_types:
- for m2, t2 in self.native_int_types:
- alias = self.find_alias(t1, t2)
- if alias is not None:
- opt.arith_conv[(t1, t2)] = alias
-
- for t in ['ptrdiff_t', 'size_t', 'wchar_t']:
- alias = self.find_alias(t)
- if alias:
- opt.__dict__['ALIAS_' + t] = alias
-
- if opt.HAVE_STDINT:
- for t in [
- 'int8_t', 'uint8_t',
- 'int16_t', 'uint16_t',
- 'int32_t', 'uint32_t',
- 'int64_t', 'uint64_t',
- 'intmax_t', 'uintmax_t',
- 'intptr_t', 'uintptr_t',
- ]:
- alias = self.find_alias(t, header='stdint.h')
- if alias:
- opt.__dict__['ALIAS_' + t] = alias
-
- if not getattr(opt,"ALIAS_int8",None):
- opt.ALIAS_int8_t="signed char"
-
- if not getattr(opt,"ALIAS_uint8",None):
- opt.ALIAS_uint8_t="unsigned char"
-
- sizes = {
- opt.SIZEOF_SHORT*8 : "short",
- opt.SIZEOF_INT*8: "int",
- opt.SIZEOF_LONG*8 : "long",
- }
-
- if opt.HAVE_LONGLONG:
- sizes[opt.SIZEOF_LONGLONG*8]="long long"
-
- if not getattr(opt,"ALIAS_intmax_t",None):
- opt.ALIAS_intmax_t="long"
- opt.ALIAS_uintmax_t="unsigned long"
- if opt.HAVE_LONGLONG:
- opt.ALIAS_intmax_t="long long"
- opt.ALIAS_uintmax_t="unsigned long long"
-
- for size in [16,32,64]:
- if not getattr(opt,"ALIAS_int"+str(size)+"_t",None):
- try:
- t = sizes[size]
- opt.__dict__["ALIAS_int"+str(size)+"_t"]=t
- opt.__dict__["ALIAS_uint"+str(size)+"_t"]="unsigned " + t
- except KeyError:
- opt.__dict__["ALIAS_int"+str(size)+"_t"]="emul_int"+str(size)
- opt.__dict__["ALIAS_uint"+str(size)+"_t"]="emul_uint"+str(size)
-
- if not getattr(opt,"ALIAS_intptr_t",None):
- try:
- opt.ALIAS_intptr_t=sizes[opt.SIZEOF_VOIDP*8]
- opt.ALIAS_uintptr_t="unsigned "+sizes[opt.SIZEOF_VOIDP*8]
- except:
- print "NO INTEGER THE SIZE OF A VOID*!"
- sys.exit(1)
-
- def detect_c_type_data(self):
- self.detect_int_data()
-
- std_dtypes = [
- ('BOOL', 'bool', ''),
- ('FLOAT', 'float', ''),
- ('DOUBLE', 'double', ''),
- ('LONGDOUBLE', 'long double', ''),
- ('ENUM', 'typedef enum enum_t {tag} t;', ''),
- ('VOIDP', 'void *', ''),
- ('FUNCP', 'typedef void (*t)(void);', ''),
- ('CBOOL', '_Bool', ''),
- ('WCHAR', 'wchar_t', 'stddef.h'),
- ('PTRDIFF', 'ptrdiff_t', 'stddef.h'),
- ('INTPTR', 'intptr_t', 'stdint.h'),
- ('UINTPTR', 'uintptr_t', 'stdint.h'),
- ('INTMAX', 'intmax_t', 'stdint.h'),
- ('UINTMAX', 'uintmax_t', 'stdint.h'),
- ('SIZE', 'size_t', 'stddef.h'),
- ('COMPLEX', 'float _Complex', ''),
- ('DOUBLECOMPLEX', 'double _Complex', ''),
- ('LONGDOUBLECOMPLEX', 'long double _Complex', ''),
- ('IMAGINARY', 'float _Imaginary', ''),
- ('DOUBLEIMAGINARY', 'double _Imaginary', ''),
- ('LONGDOUBLEIMAGINARY', 'long double _Imaginary', ''),
- ('INT8', 'int8_t', 'stdint.h'),
- ('INT16', 'int16_t', 'stdint.h'),
- ('INT32', 'int32_t', 'stdint.h'),
- ('INT64', 'int64_t', 'stdint.h'),
- ('UINT8', 'uint8_t', 'stdint.h'),
- ('UINT16', 'uint16_t', 'stdint.h'),
- ('UINT32', 'uint32_t', 'stdint.h'),
- ('UINT64', 'uint64_t', 'stdint.h'),
- ]
-
- for m, t, f in std_dtypes:
- self.detect_type_data(m, t, header=f)
-
- self.detect_aliases()
-
- opt = self.options
-
-
- def detect_win32(self):
- return self.check_macro_defined("_WIN32")
-
- def detect_win64(self):
- return self.check_macro_defined("_WIN64")
-
- def detect_posix(self):
- if self.check_macro_defined("_WIN32"):
- return False
- else:
- return True
-
- def detect_cygwin(self):
- return self.check_macro_defined("__CYGWIN__")
-
- def detect_osx(self):
- return self.check_macro_defined("__APPLE__")
-
- def detect_osx_version(self):
- if not self.options.MACOSX:
- return None
- else:
- # query gestalt to determine os version
- try:
- from gestalt import gestalt
- import MacOS
- except ImportError:
- return None
-
- try:
- sysv = gestalt('sysv')
- except (RuntimeError, MacOS.Error):
- return None
-
- major = (sysv & 0xff00) >> 8
- minor = (sysv & 0x00f0) >> 4
- patch = (sysv & 0x000f)
-
- # convert into decimal
- major = int(hex(major)[2:])
-
- version = major*100 + minor*10 + patch
-
- print 'MACOSX VERSION:', version
-
- return version
-
- def detect_bsd(self):
- return self.check_macro_defined("BSD")
-
- def detect_solaris(self):
- # can't find a symbol for solaris...
- # return self.check_macro_defined("__SOLARIS__")
- try:
- # print SunOS on solaris
- return self.shell("uname", verbose=False) == 'SunOS\n'
- except ExecutionError:
- return False
-
- def detect_linux(self):
- return self.check_macro_defined("__linux__")
-
- # only allows 4 models: cygwin, osx, win32 and posix
- # nocygwin = mingw = win32
- def detect_model(self):
- opt = self.options
-
- # check that we can use the compiler
- self.compile_dummy_main()
-
- opt.MACOSX = self.detect_osx()
- opt.MACOSX_VERSION = self.detect_osx_version()
- opt.BSD = self.detect_bsd()
- opt.SOLARIS = self.detect_solaris()
- opt.LINUX= self.detect_linux()
- opt.WIN32 = self.detect_win32()
- opt.WIN64 = self.detect_win64()
- opt.CYGWIN = self.detect_cygwin()
- opt.POSIX = self.detect_posix()
-
- sum = opt.MACOSX + opt.WIN32 + opt.CYGWIN
- if sum > 1:
- print "INCOMPATIBLE MODELS DETECTED"
- print "MACOSX",opt.MACOSX
- print "CYGWIN",opt.CYGWIN
- print "WIN32",opt.WIN32
- else:
- if opt.model == "detect":
- if opt.CYGWIN: opt.model = "cygwin"
- if opt.MACOSX: opt.model = "osx"
- if opt.WIN32: opt.model = "win32"
- if opt.WIN64: opt.model = "win64"
- if opt.SOLARIS: opt.model = "solaris"
- if opt.LINUX: opt.model = "linux"
- if opt.BSD: opt.model = "bsd"
- if opt.model == "detect": opt.model = "posix"
- if opt.model in ["mingw", "nocygwin"]: opt.model = "win32"
- if opt.model in ["posix"] and opt.CYGWIN: opt.model = "cygwin"
-
- if opt.model not in ["posix", "linux", "solaris", "bsd", "osx", "cygwin", "win32", "win64"]:
- print "UNKNOWN MODEL", opt.model
- sys.exit(1)
-
- print "MODEL=", self.options.model
-
- ########
-
- def detect_intsizes(self):
- """find misc info about endianess"""
-
- opt = self.options
-
- filename = "tmp" + os.sep + "intsizes"
-
- try:
- output = self.run_static_string_program(r"""
-#include <stdio.h>
-#include <stddef.h>
-
-enum enum_t {e_tag};
-typedef void (*fp_t)(void);
-
-union endian_t {
- unsigned long x;
- unsigned char y[sizeof(unsigned long)];
-} endian;
-
-int main(int argc, char** argv) {
- printf("CHAR_IS_UNSIGNED=%d\n",((char)0xFF)>0?1:0);
- endian.x = 1ul;
- printf("LITTLE_ENDIAN=%d\n", endian.y[0]);
- printf("BIG_ENDIAN=%d\n", endian.y[sizeof(unsigned long)-1]);
-
- return 0;
-}
-""", filename)
- except ExecutionError:
- print "FATAL: can't determine sizes of ints"
- raise
- ## THIS CALL CANNOT USE THE SHELL BECAUSE IT REDIRECTS OUTPUT
- os.system("tmp"+os.sep+"intsizes > tmp"+os.sep+"intsizes.py")
- f = open("tmp"+os.sep+"intsizes.py")
- try:
- exec f in opt.__dict__
- finally:
- f.close()
- # RF (zzz): getting \r\n output from target/xcompile stage,
- # under nocygwin which is confusing 'exec' which flags them
- # as syntax errors. re-enabling the old code (not sure why it was
- # changed as it sure as hell weren't broke).
-
- # exec string.join(output, '\n') in opt.__dict__
-
- if opt.CHAR_IS_UNSIGNED:
- print "char is unsigned"
- else:
- print "char is signed"
-
- if opt.BIG_ENDIAN: print "Big Endian byte order detected"
- if opt.LITTLE_ENDIAN: print "Little Endian byte order detected"
-
-
- def detect_alignment(self):
- #calculate alignment tables
- vbls = [
- ("ALIGNOF_CBOOL","_Bool"),
- ("ALIGNOF_BOOL","bool"),
- ("ALIGNOF_SHORT","short"),
- ("ALIGNOF_INT","int"),
- ("ALIGNOF_LONG","long"),
- ("ALIGNOF_LONGLONG","long long"),
-
- ("ALIGNOF_FLOAT","float"),
- ("ALIGNOF_DOUBLE","double"),
- ("ALIGNOF_LONGDOUBLE","long double"),
-
- ("ALIGNOF_WCHAR","wchar_t"),
- ("ALIGNOF_VOIDP","void*"),
- ]
-
- opt = self.options
-
- opt.MAX_ALIGN = 1
- opt.flx_aligns = {}
- for k, t in vbls:
- try:
- v = opt.__dict__[k]
- opt.flx_aligns[v]=t
- except KeyError:
- pass
- else:
- if v > opt.MAX_ALIGN:
- opt.MAX_ALIGN = v
- opt.flx_aligns[1] = "char"
-
-
- def detect_isnan(self):
- # find if we have C99 isnan in <math.h>
- try:
- self.build_string_program(r"""
-#include <math.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- isnan(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_math')
- self.options.HAVE_C99_ISNAN_IN_MATH = True
- print "C99 isnan found in <math.h>"
- except ExecutionError:
- self.options.HAVE_C99_ISNAN_IN_MATH = False
-
- # find if we have BSD isnanf in <math.h> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <math.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- isnanf(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_math')
- self.options.HAVE_BSD_ISNAN_IN_MATH = True
- print "Isnan found in <math.h>"
- except ExecutionError:
- self.options.HAVE_BSD_ISNAN_IN_MATH = False
-
- # find if we have BSD isnanf in <ieeefp.h> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <ieeefp.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- isnanf(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_ieeefp')
- self.options.HAVE_BSD_ISNAN_IN_IEEEFP = True
- print "Isnan found in <ieeefp.h>"
- except ExecutionError:
- self.options.HAVE_BSD_ISNAN_IN_IEEEFP = False
-
- # find if we have C99 isinf in <math.h>
- try:
- self.build_string_program(r"""
-#include <math.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- isinf(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_math')
- self.options.HAVE_C99_ISINF_IN_MATH = True
- print "C99 isnan found in <math.h>"
- except ExecutionError:
- self.options.HAVE_C99_ISINF_IN_MATH = False
-
- # find if we have BSD isinff in <math.h> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <math.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- isinff(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_math')
- self.options.HAVE_BSD_ISINF_IN_MATH = True
- print "BSD isinf found in <math.h>"
- except ExecutionError:
- self.options.HAVE_BSD_ISINF_IN_MATH = False
-
- # find if we have BSD isinff in <ieeefp.h> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <ieeefp.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- isinff(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_ieeefp')
- self.options.HAVE_BSD_ISINF_IN_IEEEFP = True
- print "BSD isinf found in <ieeefp.h>"
- except ExecutionError:
- self.options.HAVE_BSD_ISINF_IN_IEEEFP = False
-
- # find if we have C99 isfinite in <math.h>
- try:
- self.build_string_program(r"""
-#include <math.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- isfinite(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_math')
- self.options.HAVE_C99_ISFINITE_IN_MATH = True
- print "C99 isfinite found in <math.h>"
- except ExecutionError:
- self.options.HAVE_C99_ISFINITE_IN_MATH = False
-
- # find if we have BSD finitef in <math.h> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <math.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- finitef(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_math')
- self.options.HAVE_BSD_FINITE_IN_MATH = True
- print "BSD finitef found in <math.h>"
- except ExecutionError:
- self.options.HAVE_BSD_FINITE_IN_MATH = False
-
- # find if we have BSD isfinitef in <ieeefp.h> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <ieeefp.h>
-
-int main(int argc,char** argv) {
- float f = 0.0;
- finitef(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_ieeefp')
- self.options.HAVE_BSD_FINITE_IN_IEEEFP = True
- print "BSD finitef found in <ieeefp.h>"
- except ExecutionError:
- self.options.HAVE_BSD_FINITE_IN_IEEEFP = False
-
-
- def detect_vsnprintf(self):
- opt = self.options
-
- filename = self.write_src(r"""
-#include <stdio.h>
-#include <stdarg.h>
-
-int check(char const*fmt,...)
-{
- va_list ap;
- va_start(ap,fmt);
- int n = vsnprintf(NULL,0,fmt,ap);
- va_end(ap);
- return n!=3;
-}
-
-int main(int argc,char** argv) {
- return check("%s","XXX"); // 0 means pass
-}
-""", 'tmp' + os.sep + 'vsnprintf')
-
- try:
- lines = self.run_static_program(filename)
- except ExecutionError:
- opt.HAVE_VSNPRINTF = False
- else:
- opt.HAVE_VSNPRINTF = True
-
- if opt.HAVE_VSNPRINTF:
- print "vsnprintf() supported"
- else:
- print "vsnprintf() NOT supported"
-
- ########
-
- def detect_compiler_options(self):
- pass
-
- def check_options(self):
- self.detect_model()
-
- self.detect_compiler_options()
-
- self.detect_intsizes()
- self.detect_c_type_data()
- self.detect_alignment()
- self.detect_isnan()
- self.detect_vsnprintf()
-
- # would like to know if we have SDL_opengl. that's done by compiling
- # and running sdl_opengl.cxx Needs can be compiled like
- # g++ `sdl-config --cflags` sdl_opengl.cxx `sdl-config --libs`
- # but how is that done portably? Does win32 even have sdl-config?
-
-
- def report_config(self):
- opt = self.options
- print "**********************************************"
- print opt.COM, opt.use, "configuration"
- print "**********************************************"
- print "model=", opt.model
- print "static library tool #1 :", opt.AR
- print "static library tool #2 :", opt.RANLIB
- print
- print "Command to compile static Felix rtl :", opt.CCOBJ_STATIC_RTL
- print "Command to compile shared Felix rtl :", opt.CCOBJ_DYNAMIC_RTL
- print "Command to link shared Felix rtl :", opt.CCLINK_DYNAMIC_RTL
- print
- print "Command to compile static Felix driver :", opt.CCOBJ_STATIC_MAIN
- print "Command to compile dynamic Felix driver :", opt.CCOBJ_DYNAMIC_MAIN
- print "Command to link dynamic Felix driver :", opt.CCLINK_DYNAMIC_MAIN
- print
- print "Command to compile static Felix object :", opt.CCOBJ_STATIC_FLX
- print "Command to compile loadable Felix object:", opt.CCOBJ_DYNAMIC_FLX
- print "Command to link loadable Felix object :", opt.CCLINK_DYNAMIC_FLX
- print
- print "Extension for static object file :", opt.EXT_STATIC_OBJ
- print "Extension for shared object file :", opt.EXT_SHARED_OBJ
- print "Extension for static archive :", opt.EXT_LIB
- print "Extension for loadable RTL :", opt.EXT_DYLIB
- print "Extension for flx modules :", opt.EXT_SHLIB
- print "Extension for executable :", opt.EXT_EXE
- print "RTL in directory :", opt.SHLIB_DIR
-
- print
- self.report_isnan()
diff --git a/fbuild_old/lib/fbuild/flxbuild/compiler_base.py b/fbuild_old/lib/fbuild/flxbuild/compiler_base.py
deleted file mode 100644
index 9cdfa20..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/compiler_base.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import os
-import sys
-
-from fbuild.flxbuild.flxutil import xqt
-
-class compiler_base(object):
- def __init__(self, verbose=False, quiet=False):
- class Options:
- pass
- self.options = Options()
- self.verbose = verbose
- self.quiet = quiet
-
- def shell(self, *args, **kwds):
- kwds.setdefault('verbose', self.verbose)
- kwds.setdefault('quiet', self.quiet)
-
- return xqt(*args, **kwds)
-
- def set_options(self):
- pass
-
- def save_options(self, filename):
- f = open(filename, "w")
- ks = self.options.__dict__.keys()
- ks.sort()
- for k in ks:
- if k[0] != '_': # do not save __builtins__
- v = self.options.__dict__[k]
- f.write(k+'='+repr(v) + "\n")
- f.close()
-
-
- def load_options(self,filename):
- f = open(filename)
- exec f in self.options.__dict__
- f.close()
-
- def find_in_src_dir(self, filename):
- # check first if the file exists in the felix directory
- # if it does, use it instead of the one in the local directory
- try:
- from config import src_dir
- except ImportError:
- pass
- else:
- f = os.path.join(src_dir, filename)
- if os.path.exists(f):
- return f
-
- return filename
diff --git a/fbuild_old/lib/fbuild/flxbuild/config_support.py b/fbuild_old/lib/fbuild/flxbuild/config_support.py
deleted file mode 100644
index b3033c9..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/config_support.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import os
-
-from fbuild.flxbuild.flxutil import xqt, ExecutionError
-
-def pr(f,x):
- print x
- print >> f, x
-
-def pa(f,this,s):
- try:
- x = s + "=" + repr(this[s])
- except (KeyError, EnvironmentError):
- print 'UNABLE TO FIND VALUE FOR ATTRIBUTE:', repr(s)
- print >> f, s, '= None # EDIT ME!!'
- else:
- pr(f,x)
-
-def pne(f,s):
- try:
- x = s + "=" + repr(this[s])
- except EnvironmentError:
- print "UNABLE TO FIND VALUE FOR ATTRIBUTE '"+s+"'"
- print >> f, s, '= None # EDIT ME!!'
- else:
- print >> f, x
-
-
-def cwrite(c):
- f = "config"+os.sep+""+c+"_config.py"
- print "--------------------------"
- print "Creating "+f
- f= open(f,"w")
- return f
-
-# needs to be conditionalised on Unix
-def locate_file(fname):
- print "Try to find",fname
- n = len(fname)
- cmd = "locate " + fname
- try:
- lines = xqt(cmd)
- except ExecutionError:
- print "Cannot execute Unix locate command to find file",fname
- return None
-
- candidates = []
- for line in lines:
- candidate = line.strip()
- if candidate[-n:] == fname:
- candidates.append(candidate[0:-n])
-
- if len(candidates) == 0:
- print "Cannot find directory containing file",fname
- return None
-
- if len(candidates) == 1:
- print "Found unique directory ",candidates[0],"containing file",fname
- return candidates[0]
- else:
- print "Found multiple directories containing file",fname
- s = candidates[0]
- for k in candidates:
- print "Dir: ",k
- if len(k) < len(s): s = k
- print "Using shortest one:",s
- return s
diff --git a/fbuild_old/lib/fbuild/flxbuild/cxx_base.py b/fbuild_old/lib/fbuild/flxbuild/cxx_base.py
deleted file mode 100644
index 3d2aff2..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/cxx_base.py
+++ /dev/null
@@ -1,104 +0,0 @@
-import os
-
-from fbuild.flxbuild.c_cxx_base import c_cxx_base
-from fbuild.flxbuild.flxutil import ExecutionError
-
-class cxx_base(c_cxx_base):
- def __init__(self, *args, **kwds):
- super(cxx_base, self).__init__(*args, **kwds)
-
- self.options.EXT_SRC_MAIN = ".cxx"
- self.options.EXT_SRC_LIB = ".cpp"
-
-
- def detect_static_initialization(self):
- # find if we have static const init in class
- # [not so much an extension as a bug if we don't]
- try:
- self.build_string_program(r"""
-struct X {
- static const int i = 1;
-};
-
-int main(int argc, char** argv) {
- return 0;
-}
-""", 'tmp' + os.sep + 'check_inclass')
- self.options.HAVE_INCLASS_MEMBER_INITIALIZATION = True
- print "Inclass member initialisation supported"
- except ExecutionError:
- self.options.HAVE_INCLASS_MEMBER_INITIALIZATION = False
- print "Inclass member initialisation NOT supported"
-
-
- def detect_cmath_isnan(self):
- # find if we have BSD isnan in <cmath> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <cmath>
-
-int main(int argc, char** argv) {
- float f = 0.0;
- std::isnan(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_cmath')
- self.options.HAVE_CXX_ISNAN_IN_CMATH = True
- print "C++ isnan found in <cmath>"
- except ExecutionError:
- self.options.HAVE_CXX_ISNAN_IN_CMATH = False
-
- # find if we have BSD isinf in <cmath> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <cmath>
-
-int main(int argc, char** argv) {
- float f = 0.0;
- std::isinf(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_cmath')
- self.options.HAVE_CXX_ISINF_IN_CMATH = True
- print "C++ isinf found in <cmath>"
- except ExecutionError:
- self.options.HAVE_CXX_ISINF_IN_CMATH = False
-
- # find if we have BSD isinf in <cmath> (NAUGHTY!)
- try:
- self.build_string_program(r"""
-#include <cmath>
-
-int main(int argc, char** argv) {
- float f = 0.0;
- std::isfinite(f);
- return 0;
-}
-""", 'tmp' + os.sep + 'nan_cmath')
- self.options.HAVE_CXX_ISFINITE_IN_CMATH = True
- print "C++ isfinite found in <cmath>"
- except ExecutionError:
- self.options.HAVE_CXX_ISFINITE_IN_CMATH = False
-
-
-
- def check_options(self):
- super(cxx_base, self).check_options()
-
- self.detect_static_initialization()
- self.detect_cmath_isnan()
-
- def report_isnan(self):
- opt = self.options
-
- if opt.HAVE_CXX_ISNAN_IN_CMATH:
- print "C++ isnan Support in : <cmath>"
- else:
- print "C++ isnan : EMULATED"
-
- if opt.HAVE_CXX_ISINF_IN_CMATH:
- print "C++ isinf Support in : <cmath>"
- elif opt.HAVE_CXX_ISFINITE_IN_CMATH:
- print "C++ isfinite Support in : <cmath>"
- else:
- print "C++ isinf : EMULATED"
diff --git a/fbuild_old/lib/fbuild/flxbuild/flxutil.py b/fbuild_old/lib/fbuild/flxbuild/flxutil.py
deleted file mode 100644
index 7dd5a88..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/flxutil.py
+++ /dev/null
@@ -1,234 +0,0 @@
-# build system utility module
-import os
-import sys
-import glob
-import time
-import StringIO
-import shutil
-
-def filetime(f):
- try:
- return os.path.getmtime(f)
- except EnvironmentError:
- return 0
-
-# returns the time of the newest file of a set
-# if a file is missing, the time is in the future
-# (since we have no record of when it was deleted,
-# we assume it was vey recently :)
-
-def newest_filetime(fs):
- m = 0
- for f in fs:
- x = filetime(f)
- if x == 0: return time.time()+1000.0
- m = max(m,x)
- return m
-
-# returns the time of the oldest file of a set
-# if a file is missing, the time is before the
-# birth of the universe .. well PC's anyhow :)
-
-def oldest_filetime(fs):
- m = 0
- for f in fs:
- x = filetime(f)
- if x == 0: raise MakeError # missing files not allowed
- m = max(m,x)
- return m
-
-def fmtime(t):
- s = "%04d %02d %02d %02d %02d %02d" % (time.localtime(t)[:6])
- return s
-
-def append_unique(s,x):
- if x not in s: return s+[x]
- else: return s
-
-def closure1(d,i,o):
- if i not in o:
- o = o + [i]
- e = d.get(i,[])
- for k in e:
- if k not in o:
- o = closure1(d,k,o)
- return o
-
-# d is a dictionary T -> T list
-# s is a list
-# closure (d,s) returns the closure of s wrt d
-#
-# normally d is a dependency map for packages
-# and s is set of root packages to be rebuilt
-# result is the all the packages that need rebuild
-
-def closure(d,s):
- o = []
- for i in s:
- o = closure1(d,i,o)
- return o
-
-# given a map T -> T list
-# return the inverse map
-
-def invert(d):
- m = {}
- for k in d.keys():
- for v in d[k]:
- m[v] = append_unique(m.get(v,[]),k)
- return m
-
-def erasefile(f):
- try: os.unlink(f)
- except EnvironmentError: pass
-
-def unix2native(f):
- if os.path.splitdrive(f)[0] or f.startswith(os.sep):
- return f
-
- return os.path.join(*f.split('/'))
-
-def deletefile(f):
- erasefile(unix2native(f))
-
-def mkdirs(x):
- if x and not os.path.exists(x):
- os.makedirs(x)
-
-def erasedir(d):
- fs = glob.glob(d+os.sep+"*")
- for f in fs: erasefile(f)
- try: os.rmdir(d)
- except EnvironmentError: pass
-
-def execute(cmd, verbose=False, quiet=False, invert_result=False, log=None):
- if log is None: log = sys.stdout
-
- cmd = ' '.join(cmd)
-
- if verbose and not quiet: print >> log, '>', cmd
- log.flush()
-
- fout = os.popen(cmd, 'r')
- stdout = []
- try:
- for line in fout:
- stdout.append(line)
-
- if verbose and not quiet:
- log.write(line)
- finally:
- result = fout.close()
-
- log.flush()
-
- if invert_result:
- if result:
- result = 0
- else:
- result = 1
-
- if quiet < 2:
- if result and not verbose:
- print >> log, cmd, 'failed!'
-
- if result and not invert_result:
- print >> log, ' .. ERROR CODE', hex(result), ':', cmd
-
- log.flush()
-
- if result:
- raise ExecutionError(cmd, result)
-
- return stdout
-
-def xqt(*commands, **kwds):
- return execute(commands, **kwds)
-
-def file_exists(f):
- try:
- os.stat(f)
- return 1
- except EnvironmentError: return 0
-
-class Tee(object):
- def __init__(self, stdout=sys.stdout):
- self.stdout = stdout
- self.file = StringIO.StringIO()
-
- def write(self, s, quiet=0):
- if not quiet:
- self.stdout.write(s)
- self.file.write(s)
-
- def flush(self):
- self.stdout.flush()
- self.file.flush()
-
- def getvalue(self):
- return self.file.getvalue()
-
-def tee_cmd(cmd, stdout, bufsize=1024):
- # we try to use subprocess because we can get the exit code on windows
- try:
- import subprocess
- except ImportError:
- p = os.popen(' '.join(cmd), 'r', bufsize)
- while 1:
- buf = os.read(p.fileno(), bufsize)
- if buf:
- stdout.write(buf)
- else:
- break
- return p.close()
- else:
- p = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- bufsize=bufsize)
- while True:
- buf = os.read(p.stdout.fileno(), bufsize)
- if buf:
- stdout.write(buf)
- else:
- break
-
- return p.wait()
-
-
-class MakeError(EnvironmentError):
- def __init__(self, command=None, stdout=[], stderr=[]):
- self.command = command
- self.stdout = ''.join(stdout)
- self.stderr = ''.join(stderr)
-
- def __str__(self):
- s = []
- if self.command is not None:
- s.append('COMMAND: ' + self.command)
-
- if self.stdout:
- s.append('STDOUT:\n' + self.stdout)
-
- if self.stderr:
- s.append('STDERR:\n' + self.stderr)
-
- return '\n'.join(s)
-
-class ExecutionError(Exception):
- def __init__(self, command, returncode=None):
- self.command = command
- self.returncode = returncode
-
- def __str__(self):
- if self.returncode is None:
- return 'Command failed: %s' % self.command
- else:
- return 'Command failed [%s]: %s' % (self.returncode, self.command)
-
-class MissingFile(Exception):
- def __init__(self, filename):
- self.filename = filename
-
- def __str__(self):
- return 'File not found: ' + self.filename
diff --git a/fbuild_old/lib/fbuild/flxbuild/gcc_class.py b/fbuild_old/lib/fbuild/flxbuild/gcc_class.py
deleted file mode 100644
index dc858cd..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/gcc_class.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from fbuild.flxbuild.c_base import c_base
-from fbuild.flxbuild.gnu_mixin import gnu_mixin
-
-class gcc(gnu_mixin, c_base):
- DEFAULT_COM = 'gcc'
-
- def check_options(self):
- c_base.check_options(self)
- gnu_mixin.check_options(self)
diff --git a/fbuild_old/lib/fbuild/flxbuild/gnu_mixin.py b/fbuild_old/lib/fbuild/flxbuild/gnu_mixin.py
deleted file mode 100644
index f107d99..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/gnu_mixin.py
+++ /dev/null
@@ -1,731 +0,0 @@
-import os
-import sys
-
-import fbuild.flxbuild
-from fbuild.flxbuild.flxutil import ExecutionError
-from fbuild.flxbuild.c_cxx_base import c_cxx_base
-
-class gnu_mixin(object):
- DEFAULT_AR = 'ar -rc'
- DEFAULT_RANLIB = 'ranlib'
-
- def set_options(self, STRIP="strip", **kwds):
- """these options are model dependent and have to
- be supplied by the client"""
-
- super(gnu_mixin, self).set_options(**kwds)
-
- opt = self.options
- opt.STRIP = STRIP
-
- self.lang = opt.COM
-
- if opt.model == "nocygwin":
- opt.COM = opt.COM + "-mno-cygwin"
- opt.EXT_STATIC_OBJ = "_static.obj" # temporary hack
- opt.EXT_SHARED_OBJ = "_dynamic.obj" # temporary hack
-
- ########
-
- def detect_model(self):
- opt = self.options
-
- # default options
- opt.EXT_STATIC_OBJ = "_static.o"
- opt.EXT_SHARED_OBJ = "_dynamic.o"
- opt.EXT_LIB = ".a"
- opt.EXT_EXE = ""
- opt.EXT_SHLIB = ".so"
- opt.EXT_DYLIB = ".so"
- opt.SHLIB_DIR = os.path.join("lib", "rtl")
-
- opt.SPEC_COMPILE_OBJ = "-c"
- opt.SPEC_OBJ_FILENAME = "-o "
- opt.SPEC_EXE_FILENAME = "-o "
- opt.SPEC_DEFINE = "-D"
- opt.SPEC_INCLUDE = "-I"
- opt.SPEC_LIBPATH = "-L"
- opt.SPEC_LIB = "-l"
- opt.SPEC_AR_OUT_FILENAME = ""
- opt.DEBUG_FLAGS = "-g"
- #opt.OPTIMISE = "-O3 -fomit-frame-pointer --inline -DNDEBUG"
- opt.OPTIMISE = "-O3 -fomit-frame-pointer --inline"
-
- # RF: a hack to make the VS build work. sort of an "any last words"
- # argument/hook before link directives.
- opt.PRE_LINK_FLAGS = ""
-
- super(gnu_mixin, self).detect_model()
-
- if opt.model in ["cygwin", "win32", "win64"]:
- opt.EXT_EXE = ".exe"
- opt.EXT_SHLIB = ".dll"
- opt.EXT_DYLIB = opt.EXT_SHLIB
- opt.SHLIB_DIR = "bin"
-
- if opt.model in ["win32", "win64"]:
- opt.EXT_STATIC_OBJ = "_static.obj"
- opt.EXT_SHARED_OBJ = "_dynamic.obj"
-
- if opt.model == "osx":
- # flags for rtl & flx executable compilation & linking taken from
- # http://fink.sourceforge.net/doc/porting/shared.php with many thanks
-
- # differentiated now because osx treats dylibs
- # and plugin-type libraries.
- opt.EXT_DYLIB = ".dylib"
-
-
- def detect_warning_flags(self):
- opt = self.options
-
- # find if we have g++ with -Wno-invalid-offsetof
- #
- # NOTE: this is a hack .. Felix makes offsetof() errors
- # We HAVE to detect if the switch turn them off is available
- # first, and if not use -w, otherwise we use -Wall -Wno-offsetof,
- # because these errors must be tolderated in Felix generated C++
- #
- # But the error can't occur in C, and specifying the option
- # causes gcc to barf with a warning
- #
- if self.lang == "g++":
- try:
- self.compile_dummy_main(CFLAGS="-Wno-invalid-offsetof")
- opt.NO_INVALID_OFFSETOF_WARNING = "-Wall -Wno-invalid-offsetof"
- print "-Wno-invalid-offsetof supported"
- except ExecutionError:
- opt.NO_INVALID_OFFSETOF_WARNING = "-w"
- else:
- opt.NO_INVALID_OFFSETOF_WARNING = "-Wall"
-
- # find if we have g++ with -Wfatal-errors
- try:
- self.compile_dummy_main(CFLAGS="-Wfatal-errors")
- opt.NO_INVALID_OFFSETOF_WARNING = opt.NO_INVALID_OFFSETOF_WARNING + " -Wfatal-errors"
- print "-Wfatal-errors supported"
- except ExecutionError:
- pass
-
- ########
-
- def detect_gcc_builtin_expect(self):
- opt = self.options
-
- filename = self.write_src(r"""
-int main(int argc, char** argv) {
- if(__builtin_expect(1,1));
- return 0;
-}
-""", 'tmp' + os.sep + 'gnu_builtin')
-
- try:
- self.build_static_program(filename)
- opt.HAVE_GNU_BUILTIN_EXPECT= True
- print "gcc __builtin_expect() support detected"
- except ExecutionError:
- opt.HAVE_GNU_BUILTIN_EXPECT= False
-
- def detect_named_registers(self):
- opt = self.options
-
- filename = self.write_src(r"""
-#include <stdio.h>
-register void *sp __asm__ ("esp");
-
-int main(int argc, char** argv) {
- printf("Sp = %p\n",sp);
- return 0;
-}
-""", 'tmp' + os.sep + 'gnu_x86')
-
- # find if we have gnu on 32 bit x86 platform with named registers
- try:
- self.build_static_program(filename)
- opt.HAVE_GNU_X86 = True
- print "gnu x86 32 bit support detected"
- except ExecutionError:
- opt.HAVE_GNU_X86 = False
-
- filename = self.write_src(r"""
-#include <stdio.h>
-register void *sp __asm__ ("rsp");
-
-int main(int argc, char** argv) {
- printf("Sp = %p\n",sp);
- return 0;
-}
-""", 'tmp' + os.sep + 'gnu_x86_64')
-
- # find if we have gnu on 64 bit x86 platform with named registers
- try:
- self.build_static_program(filename)
- opt.HAVE_GNU_X86_64 = True
- print "gnu x86 64 bit support detected"
- except ExecutionError:
- opt.HAVE_GNU_X86_64 = False
-
- # X86_64 dominates X86
- if opt.HAVE_GNU_X86 and opt.HAVE_GNU_X86_64:
- opt.HAVE_GNU_X86 = False
-
- if opt.HAVE_GNU_X86:
- opt.USE_REGPARM3 = True
- print "regparm3 supported"
- else:
- opt.USE_REGPARM3 = False
-
-
- def detect_computed_gotos(self):
- opt = self.options
-
- filename = self.write_src("""
-int main(int argc, char** argv) {
- void *label = &&label2;
- goto *label;
- label1:
- return 1;
- label2:
- return 0;
-}
-""", 'tmp' + os.sep + 'cgoto')
-
-
- # find if we have g++ supporting computed jumps
- try:
- self.build_static_program(filename)
- opt.HAVE_CGOTO = True
- print "Computed goto supported"
- except ExecutionError:
- opt.HAVE_CGOTO = False
-
- filename = self.write_src("""
-int main(int argc, char** argv) {
- void *label = &&label2;
- __asm__(".global fred");
- __asm__("fred:");
- __asm__(""::"g"(&&label1));
- goto *label;
- label1:
- return 1;
- label2:
- return 0;
-}
-""", 'tmp' + os.sep + 'asm_labels')
-
- # find if we have g++ supporting computed jumps and asm labels
- try:
- self.build_static_program(filename)
- opt.HAVE_ASM_LABELS = True
- print "Asm labels supported"
- except ExecutionError:
- opt.HAVE_ASM_LABELS = False
-
-
- def detect_openmp(self):
- # find if we can use -fopenmp without a warning
- try:
- self.compile_dummy_main(CFLAGS="-Werror -fopenmp")
- except ExecutionError:
- print "OpenMP based parallel programming not supported"
- else:
- print "OpenMP based parallel programming supported"
-
- self.options.HAVE_STATIC_OPENMP = True
- self.options.HAVE_SHARED_OPENMP = True
- self.options.OPENMP = "-fopenmp"
-
-
- def detect_PIC(self):
- # find if we can use -fPIC without a warning
- # if a warning is generated it will say something like
- # 'all code is relocatable on this platform'
- # so we make that into an error, detect it, and say -fPIC only
- # if it would not generate this warning
- try:
- self.compile_dummy_main(CFLAGS="-Werror -fPIC")
- except ExecutionError:
- print "All code is position independent"
-
- self.options.HAVE_PIC = False
- else:
- print "-fPIC supported"
-
- self.options.HAVE_PIC = True
- self.options.PIC = "-fPIC"
-
- def construct_compiler_commands(self):
- opt = self.options
-
- if opt.model == "osx":
- #COMPILE_DYNAMIC_RTL = "-bundle -c"
- COMPILE_DYNAMIC_RTL = opt.SPEC_COMPILE_OBJ + " -fno-common"
- # make a dynamic library (not loadable via APIs like dlcompat)
- LINK_DYNAMIC_RTL = "-dynamiclib"
-
- COMPILE_DYNAMIC_MAIN = opt.SPEC_COMPILE_OBJ
- LINK_DYNAMIC_MAIN = ""
-
- COMPILE_DYNAMIC_FLX = opt.SPEC_COMPILE_OBJ + " -bundle -fno-common"
- LINK_DYNAMIC_FLX = "-bundle"
- else:
- COMPILE_DYNAMIC_RTL = opt.SPEC_COMPILE_OBJ
- LINK_DYNAMIC_RTL = "-shared"
-
- COMPILE_DYNAMIC_MAIN = opt.SPEC_COMPILE_OBJ
- LINK_DYNAMIC_MAIN = ""
-
- COMPILE_DYNAMIC_FLX = opt.SPEC_COMPILE_OBJ
- LINK_DYNAMIC_FLX = "-shared"
-
- COM = opt.COM
-
- opt.CCOBJ_DYNAMIC_FLX = COM + ' ' + COMPILE_DYNAMIC_FLX
- opt.CCLINK_DYNAMIC_FLX = COM + ' ' + LINK_DYNAMIC_FLX
-
- opt.CCOBJ_DYNAMIC_RTL = COM + ' ' + COMPILE_DYNAMIC_RTL
- opt.CCLINK_DYNAMIC_RTL = COM + ' ' + LINK_DYNAMIC_RTL
-
- opt.CCOBJ_DYNAMIC_MAIN = COM + ' ' + COMPILE_DYNAMIC_MAIN
- opt.CCLINK_DYNAMIC_MAIN = COM + ' ' + LINK_DYNAMIC_MAIN
-
- opt.CCOBJ_STATIC_FLX = COM + ' ' + opt.SPEC_COMPILE_OBJ
- opt.CCOBJ_STATIC_RTL = COM + ' ' + opt.SPEC_COMPILE_OBJ
- opt.CCOBJ_STATIC_MAIN = COM + ' ' + opt.SPEC_COMPILE_OBJ
-
- opt.CCOBJ_DYNAMIC_RTL = opt.CCOBJ_DYNAMIC_RTL + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
- opt.CCOBJ_DYNAMIC_FLX = opt.CCOBJ_DYNAMIC_FLX + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
-
- opt.CCOBJ_STATIC_RTL = opt.CCOBJ_STATIC_RTL + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
- opt.CCOBJ_STATIC_FLX = opt.CCOBJ_STATIC_FLX + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
-
- opt.CCLINK_STATIC = COM
-
-
- def detect_pthreads(self):
- opt = self.options
-
- # Note that this pthread fragment actually tests the return value
- # as incorrectly compiled threaded code on solaris links and runs,
- # but returns errors.
- filename = self.write_src("""
-#include <pthread.h>
-
-void* start(void* data)
-{
- return NULL;
-}
-
-int main(int argc, char** argv) {
- pthread_t thr;
- pthread_attr_t attr;
- pthread_attr_init(&attr);
- pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
- int res = pthread_create(&thr, &attr, start, NULL);
- pthread_attr_destroy(&attr);
- return res;
-}
-""", 'tmp' + os.sep + 'pthreads')
-
- # find out how to do pthreads
- opt.HAVE_PTHREADS = False
- opt.PTHREAD_SWITCH = None
- for switch in [' ', '-lpthread ', '-pthread ', '-pthreads ']:
- try:
- lines = self.run_static_program(filename, LDFLAGS=switch)
- except ExecutionError:
- pass
- else:
- opt.HAVE_PTHREADS = True
- opt.PTHREAD_SWITCH = switch
- break
-
- if opt.HAVE_PTHREADS:
- print "Posix Threads supported with", opt.PTHREAD_SWITCH
- else:
- print "Posix Threads not supported"
-
-
- def detect_mmap(self):
- opt = self.options
-
- opt.HAVE_MMAP = self.check_header_exists('sys/mman.h')
-
- if not opt.HAVE_MMAP:
- print "mmap not supported"
- return
-
- print "mmap supported"
-
- opt.HAVE_PROT_EXEC = self.check_macro_defined('PROT_EXEC', 'sys/mman.h')
- opt.HAVE_PROT_READ = self.check_macro_defined('PROT_READ', 'sys/mman.h')
- opt.HAVE_PROT_WRITE = self.check_macro_defined('PROT_WRITE', 'sys/mman.h')
- opt.HAVE_MAP_DENYWRITE = self.check_macro_defined('MAP_DENYWRITE', 'sys/mman.h')
- opt.HAVE_MAP_ANON = self.check_macro_defined('MAP_ANON', 'sys/mman.h')
- opt.HAVE_MAP_ANONYMOUS = self.check_macro_defined('MAP_ANONYMOUS', 'sys/mman.h')
- opt.HAVE_MAP_FILE = self.check_macro_defined('MAP_FILE', 'sys/mman.h')
- opt.HAVE_MAP_FIXED = self.check_macro_defined('MAP_FIXED', 'sys/mman.h')
- opt.HAVE_MAP_HASSEMAPHORE = self.check_macro_defined('MAP_HASSEMAPHORE', 'sys/mman.h')
- opt.HAVE_MAP_SHARED = self.check_macro_defined('MAP_SHARED', 'sys/mman.h')
- opt.HAVE_MAP_PRIVATE = self.check_macro_defined('MAP_PRIVATE', 'sys/mman.h')
- opt.HAVE_MAP_NORESERVE = self.check_macro_defined('MAP_NORESERVE', 'sys/mman.h')
- opt.HAVE_MAP_LOCKED = self.check_macro_defined('MAP_LOCKED', 'sys/mman.h')
- opt.HAVE_MAP_GROWSDOWN = self.check_macro_defined('MAP_GROWSDOWN', 'sys/mman.h')
- opt.HAVE_MAP_32BIT = self.check_macro_defined('MAP_32BIT', 'sys/mman.h')
- opt.HAVE_MAP_POPULATE = self.check_macro_defined('MAP_POPULATE', 'sys/mman.h')
- opt.HAVE_MAP_NONBLOCK = self.check_macro_defined('MAP_NONBLOCK', 'sys/mman.h')
-
- try:
- output = self.run_static_string_program("""
-#include <sys/mman.h>
-#include <stdio.h>
-#include <errno.h>
-#include <stdlib.h>
-
-int main(int argc, char** argv)
-{
- size_t n = 10000;
- int fd;
- fd = open("/dev/zero", O_RDWR);
- if (fd == -1) {
- return 1;
- }
- void *data =
- mmap
- (
- NULL,n,
- PROT_WRITE | PROT_READ,
- MAP_PRIVATE,
- fd,0
- )
- ;
- if (data == MAP_FAILED)
- {
- return 1;
- }
- int res = munmap(data,n);
- if (res != 0)
- {
- return 1;
- }
- return 0;
-}
-""", 'tmp'+os.sep+'mmap')
- except ExecutionError:
- pass
- else:
- opt.HAVE_MMAP_DEV_ZERO = True
-
-
- def detect_win_dynamic_loading(self):
- opt = self.options
-
- # check if can get LoadLibrary to work
- basename = 'tmp' + os.sep + 'win32_dummy_lib'
- # RF: This can be compiled as both c and c++ these days it seems
- # hence conditional extern "C"
- dummy_lib_filename = self.write_lib_src("""
-#ifdef __cplusplus
-extern "C"
-#endif
-__declspec(dllexport) int fred(int argc,char** argv) { return 0; }
-""", basename)
-
- basename = 'tmp' + os.sep + 'win32_dummy_main'
- dummy_main_filename = self.write_src("""
-#include <windows.h>
-#include <stdlib.h>
-
-int main(int argc,char** argv) {
- HMODULE lib = LoadLibrary(argv[1]);
- void *fred;
- if(!lib) exit(1);
- fred = (void*)GetProcAddress(lib,"fred");
- if(!fred) exit(1);
- return 0;
-}
-""", basename)
-
- try:
- dll = self.build_shared_dll(dummy_lib_filename)
- exe = self.build_shared_program(dummy_main_filename)
-
- self.shell(exe, dll)
- except ExecutionError:
- pass
- else:
- opt.SUPPORT_DYNAMIC_LOADING = True
- opt.HAVE_LOADLIBRARY = True
- print "Dynamic Loading Supported (with LoadLibrary)"
-
-
- def detect_posix_dynamic_loading(self):
- opt = self.options
-
- basename = 'tmp' + os.sep + 'dummy_lib'
- # RF: This can be compiled as both c and c++ these days it seems
- # hence conditional extern "C"
- # P.S. This lovingly hand crafted function doesn't seem to be called
- proggy = """
-#ifdef __cplusplus
-extern "C"
-#endif
-int fred(int argc, char** argv) { return 0; }
-"""
-
- dummy_lib_filename = self.write_lib_src( proggy, basename)
-
- basename = 'tmp' + os.sep + 'dummy_main'
- dummy_main_filename = self.write_src(r"""
-#include <dlfcn.h>
-#include <stdlib.h>
-
-int main(int argc, char** argv) {
- void *lib = dlopen(argv[1],RTLD_NOW);
- void *fred = 0;
- if(!lib) exit(1);
- fred = dlsym(lib,"fred");
- if(!fred) exit(1);
- return 0;
-}
-""", basename)
-
- try:
- dll = self.build_shared_dll(dummy_lib_filename)
- exe = self.build_shared_program(dummy_main_filename)
- self.shell(exe, dll)
- opt.SUPPORT_DYNAMIC_LOADING = True
- opt.HAVE_DLOPEN = True
- except ExecutionError:
- try: # nope, try with -ldl
- exe = self.build_shared_program([dummy_main_filename], libs=['dl'])
- opt.HAVE_DLOPEN = True
- opt.SUPPORT_DYNAMIC_LOADING = True
- opt.DLLIB = "dl"
- print "Dynamic Loading Supported (with -ldl)"
- except ExecutionError:
- if opt.model == "osx":
- opt.SUPPORT_DYNAMIC_LOADING = True # pre 10.3, we do our own dlopen
-
- def detect_dynamic_loading(self):
- opt = self.options
-
- #check if we can get dlopen to work without -ldl (BSD, Cygwin don't need)
- opt.SUPPORT_DYNAMIC_LOADING = False
- opt.HAVE_DLOPEN = False
- opt.HAVE_LOADLIBRARY = False
- opt.DLLIB = ""
-
- if opt.model in ["win32", "win64"]:
- self.detect_win_dynamic_loading()
- else:
- self.detect_posix_dynamic_loading()
-
- if not opt.SUPPORT_DYNAMIC_LOADING:
- print "DYNAMIC LOADING NOT SUPPORTED"
- print "Temporarily this is mandatory [during config debugging]"
- sys.exit(1)
-
-
- def detect_sockets(self):
- opt = self.options
-
- filename = self.write_src(r"""
-#include <sys/types.h>
-#include <sys/socket.h>
-extern "C" int accept(int s, struct sockaddr *addr, socklen_t *addrlen);
-int main(int argc, char** argv) { return 0; }
-""", 'tmp' + os.sep + 'have_socketlen_t')
-
- try:
- self.compile_static_main([filename])
- opt.FLX_SOCKLEN_T = "socklen_t"
- except ExecutionError:
- filename = self.write_src(r"""
-#include <sys/types.h>
-#include <sys/socket.h>
-extern "C" int accept(int s, struct sockaddr *addr, unsigned int *addrlen);
-int main(int argc, char** argv) { return 0; }
-""", 'tmp' + os.sep + 'have_socketlen_t_is_uint')
-
- try:
- self.compile_static_main([filename])
- opt.FLX_SOCKLEN_T = "unsigned int"
- except ExecutionError:
- filename = self.write_src(r"""
-#include <sys/types.h>
-#include <sys/socket.h>
-extern "C" int accept(int s, struct sockaddr *addr, int *addrlen);
-int main(int argc, char** argv) { return 0; }
-""", 'tmp' + os.sep + 'have_socketlen_t_is_int')
-
- try:
- self.compile_static_main([filename])
- opt.FLX_SOCKLEN_T = "int"
- except ExecutionError:
- opt.FLX_SOCKLEN_T = "int"
- print "socklen_t =", opt.FLX_SOCKLEN_T
-
-
- def detect_kqueues(self):
- opt = self.options
-
- filename = self.write_src(r"""
-#include <sys/types.h> // from the kqueue manpage
-#include <sys/event.h> // kernel events
-#include <sys/time.h> // timespec (kevent timeout)
-
-int
-main(int argc, char** argv) {
- int kq = kqueue();
- return (-1 == kq) ? 1 : 0;
-}
-""", 'tmp' + os.sep + 'kqt')
-
- # see what sort of demuxers we support. right now just testing for
- # kqueues, to unbreak the osx 10.2.8 build. I need demux to be extracted
- # by this point, so I've added a line to configure to do that. That
- # required me to remove the flx_demux.pak's dependence on the config
- # directory (not created at configure time). also had to create a fake
- # flx_rtl_config.h in tmp/
-
- # now that kqueue demuxer uses condition vars and locks for a clean
- # takedown using this method of config is a nightmare before config
- # because the pthread pak file depends on the config results. for now
- # I've replaced the whole lot with a simple kqueue+main programme.
- try:
- # basically a un*x test, note the non portable path separators and
- # gcc style switches
- self.compile_static_main([filename])
- opt.HAVE_KQUEUE_DEMUXER = True
- except ExecutionError:
- opt.HAVE_KQUEUE_DEMUXER = False
-
- print "HAVE_KQUEUE_DEMUXER =", opt.HAVE_KQUEUE_DEMUXER
-
- def detect_epoll(self):
- opt = self.options
-
- filename = self.write_src(r"""
-#include <sys/epoll.h>
-
-int
-main(int argc, char** argv) {
- int efd = epoll_create(20);
- return (-1 == efd) ? 1 : 0;
-}
-""", 'tmp' + os.sep + 'epolltest')
-
- try:
- lines = self.run_static_program(filename)
- except ExecutionError:
- opt.HAVE_EPOLL = False
- else:
- opt.HAVE_EPOLL = True
-
- print "HAVE_EPOLL=", opt.HAVE_EPOLL
-
- def detect_strip(self):
- opt = self.options
-
- # see if we have strip: it isn't considered essential
- filename = self.compile_dummy_main()
- if opt.STRIP:
- try:
- self.shell(opt.STRIP, filename)
- except:
- opt.STRIP = "true strip"
- else:
- opt.STRIP = "true strip"
-
-
- def detect_ar(self):
- opt = self.options
-
- # see if we have ar
- filenames = self.compile_dummy_main()
- self.shell(opt.AR, os.path.join("tmp", "dummy.a"), *filenames)
-
- # see if we have ranlib, it isn't considered essential
- # (a totally brain dead Unix idea: AR should do this)
- try:
- self.shell(opt.RANLIB, os.path.join("tmp", "dummy.a"))
- except ExecutionError:
- opt.RANLIB = "true ranlib"
-
-
- def detect_compiler_options(self):
- self.detect_warning_flags()
- self.detect_PIC()
- self.construct_compiler_commands()
- self.detect_dynamic_loading()
- self.detect_openmp()
-
-
- def check_options(self):
- self.detect_gcc_builtin_expect()
- self.detect_named_registers()
- self.detect_computed_gotos()
- self.detect_pthreads()
- self.detect_mmap()
- self.detect_sockets()
- # could just replace this with header_exists("sys/event.h") but
- # it ain't broke.
- self.detect_kqueues()
- self.options.HAVE_POLL = self.check_header_exists("poll.h")
- self.detect_epoll()
- # nice one, Sun, no one else would ever call a header file "port.h"
- self.options.HAVE_EVTPORTS = self.check_header_exists("port.h")
- print "HAVE_POLL =", self.options.HAVE_POLL
- print "HAVE_EPOLL =", self.options.HAVE_EPOLL
- print "HAVE_EVTPORTS =", self.options.HAVE_EVTPORTS
-
- self.detect_strip()
- self.detect_ar()
-
- ####
-
- def link_thing(self, *args, **kwds):
- opt = self.options
-
- # strip off the lib from the start of the libraries
- libs = []
- for lib in kwds.get('libs', []):
- if lib[0:3] == 'lib': lib = lib[3:]
- libs.append(lib)
- kwds['libs'] = libs
-
- if opt.__dict__.get('HAVE_PTHREADS', False):
- if 'LDFLAGS' in kwds:
- kwds['LDFLAGS'] += ' ' + opt.PTHREAD_SWITCH
- else:
- kwds['LDFLAGS'] = opt.PTHREAD_SWITCH
-
- return super(gnu_mixin, self).link_thing(*args, **kwds)
-
-
- def link_shared_thing(self, *args, **kwds):
- if self.options.DLLIB:
- libs = kwds.get('libs', [])[:]
- libs.append(self.options.DLLIB)
- kwds['libs'] = libs
-
- return super(gnu_mixin, self).link_thing(*args, **kwds)
-
-
- def report_config(self):
- super(gnu_mixin, self).report_config()
- opt = self.options
-
- print
- if opt.SUPPORT_DYNAMIC_LOADING:
- if opt.HAVE_DLOPEN:
- if opt.DLLIB:
- print "Dynamic Loading Supported : with dlopen() in -l", opt.DLLIB
- else:
- print "Dynamic Loading Supported : with dlopen() [native]"
- if opt.HAVE_LOADLIBRARY:
- print "Dynamic Loading Supported : with LoadLibrary"
- else:
- print "Dynamic Loading : NOT SUPPORTED"
- print
diff --git a/fbuild_old/lib/fbuild/flxbuild/gxx_class.py b/fbuild_old/lib/fbuild/flxbuild/gxx_class.py
deleted file mode 100644
index 162cfb8..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/gxx_class.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import os
-import sys
-
-import fbuild.flxbuild
-from fbuild.flxbuild.cxx_base import cxx_base
-from fbuild.flxbuild.gnu_mixin import gnu_mixin
-
-class gxx(gnu_mixin, cxx_base):
- DEFAULT_COM = 'g++'
-
- def check_options(self):
- cxx_base.check_options(self)
- gnu_mixin.check_options(self)
-
- self.detect_gxx_template_extensions()
-
-
- def detect_gxx_template_extensions(self):
- # find if we have g++ supported ext/ with STL extensions
- try:
- self.build_string_program(r"""
-#include <iostream>
-
-// we only bother to check the include file exists
-#include <ext/hash_map>
-using namespace __gnu_cxx;
-
-int main(int argc,char** argv) {
- return 0;
-}
-""", 'tmp' + os.sep + 'gnu_hash')
- self.options.HAVE_STL_GNU_CXX = True
- print "Gnu ext/ templates supported"
- except ExecutionError:
- self.options.HAVE_STL_GNU_CXX = False
diff --git a/fbuild_old/lib/fbuild/flxbuild/iscrutil.py b/fbuild_old/lib/fbuild/flxbuild/iscrutil.py
deleted file mode 100644
index f14285f..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/iscrutil.py
+++ /dev/null
@@ -1,185 +0,0 @@
-import sys
-import os
-import pickle
-
-from fbuild.flxbuild.flxutil import unix2native
-import interscript.frames.inputf
-
-class setup_test:
- def __init__(self, iframe, root,
- zfill_pattern=[1],
- zfill_default=2):
- self.iframe = iframe
- self.root = root
- self.zfill_pattern = zfill_pattern
- self.zfill_default = 2
-
- self.native_root = unix2native(self.root)
- self.levels = []
- self.testcount = 0
- self.argcount = 0
-
- self.names_written = {}
-
- self.katfile = self.root + 'categories'
-
- if os.path.exists(self.katfile):
- f = open(self.katfile)
- try:
- self.registry_in = pickle.load(f)
- finally:
- f.close()
- else:
- self.registry_in = {}
-
- self.registry_out = {}
-
-
- def head(self, level, title=None):
- if not self.levels and level > 1:
- self.levels = [1]
-
- self.testcount = 0
- self.argcount = 0
-
- while len(self.levels) < level:
- self.levels.append(0)
-
- self.levels[level - 1] = self.levels[level - 1] + 1
- self.levels = self.levels[:level]
-
- if title is None:
- title = self.root + self.level_str()
-
- return self.iframe.head(level, title)
-
-
- def level_str(self):
- levels = []
- for i in range(len(self.levels)):
- try:
- z = self.zfill_pattern[i]
- except IndexError:
- z = self.zfill_default
-
- levels.append(str(self.levels[i]).zfill(z))
-
- return '.'.join(levels)
-
-
- def filename(self):
- return self.root + self.level_str()
-
-
- def tangler(self, name,
- extension='',
- filetype=interscript.frames.inputf.deduce):
-
- path = name + extension
- if path in self.names_written:
- print 'file:', path, 'already created!'
-
- # XXX: NEED TO GENERATE A NEW FILE NAME
- # XXX: what's the proper way to error out an interscript file?
- sys.exit(1)
-
- h = self.iframe.tangler(path, filetype)
- self.names_written[path] = h
-
- return h
-
-
- def test(self, *args, **kwds):
- name = '%s-%s' % (self.filename(), self.testcount)
-
-
- # if categories are specified, write them out
- categories = []
- if 'categories' in kwds:
- categories = kwds['categories']
- del kwds['categories']
-
- # construct the tangler
- tangler = apply(self.tangler, (name,) + args, kwds)
- for cat in categories: self.kat(tangler,cat)
-
- self.testcount = self.testcount + 1
- return tangler
-
- def expect(self):
- name = '%s-%s' % (self.filename(), self.testcount - 1)
-
- return self.tangler(name, '.expect', 'data')
-
-
- def args(self, arguments):
- name = '%s-%s-%s' % (self.filename(), self.testcount - 1, self.argcount)
- self.argcount = self.argcount + 1
-
- a = self.tangler(name, '.args', 'data')
-
- select(a)
- tangle(arguments)
- doc()
-
- return a
-
-
- def test_args(self, arglist, *args, **kwds):
- t = apply(self.test, args, kwds)
-
- for arguments in arglist:
- self.args(arguments)
-
- return t
-
-
- def expect_args(self, arguments):
- self.args(arguments)
-
- name = '%s-%s-%s' % (self.filename(), self.testcount - 1, self.argcount - 1)
-
- return self.tangler(name, '.argexpect', 'data')
-
-
- def kat(self, tangler, code):
- tangler.writeline(
- "//Check " + code,
- self.iframe.original_filename,
- self.iframe.original_count
- )
-
- f = tangler.sink.filename
- ff = f.split('/')[-1][:-4]
- v = self.registry_out.get(code,[])
- if ff not in v:
- self.iframe.set_anchor(ff)
- v.append(ff)
- self.registry_out[code]=v
-
-
- def emit_katlist(self):
- self.iframe.begin_list("keyed")
- keys = self.registry_in.keys()
- keys.sort()
- for k in keys:
- v = self.registry_in[k]
- self.iframe.item(k)
- first = 1
- for i in v:
- if first: first = 0
- else: self.iframe.weave(", ")
- self.iframe.ref_anchor(i)
- self.iframe.end_list()
-
-
- def write_katfile(self):
- dirname = os.path.split(self.katfile)[0]
- if not os.path.exists(dirname):
- os.makedirs(dirname)
-
- f = open(self.katfile, 'w')
- try:
- pickle.dump(self.registry_out, f)
- finally:
- f.close()
diff --git a/fbuild_old/lib/fbuild/flxbuild/msvc_mixin.py b/fbuild_old/lib/fbuild/flxbuild/msvc_mixin.py
deleted file mode 100644
index 839acab..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/msvc_mixin.py
+++ /dev/null
@@ -1,215 +0,0 @@
-import os
-import sys
-import re
-
-import fbuild.flxbuild
-from fbuild.flxbuild.c_cxx_base import c_cxx_base
-from fbuild.flxbuild.flxutil import ExecutionError
-
-class msvc_mixin(object):
- DEFAULT_COM = 'cl'
- DEFAULT_AR = 'lib'
-
- def set_options(self, **kwds):
- """these options are model dependent and have to
- be supplied by the client"""
-
- super(msvc_mixin, self).set_options(**kwds)
-
- opt = self.options
-
- # RF: add /GR to generate RTTI information, if this isn't on, we get
- # access violations when doing dynamic_casts. Of course, these access
- # violations are thrown as exceptions, which can be caught with
- # catch(std::exception& e). we also don't want the logo
- # OF COURSE now /GR will have no sense if this happens to be a
- # c compiler. let's hope it doesn't error out.
- opt.COM = self.DEFAULT_COM + " /nologo /GR"
- opt.AR = self.DEFAULT_AR + " /nologo"
-
- ########
-
- def detect_model(self):
- super(msvc_mixin, self).detect_model()
-
- if self.options.model == "detect":
- self.options.model = "win32"
-
-
- def detect_compiler_options(self):
- self.detect_compiler_version()
- self.detect_warning_flags()
- self.construct_compiler_commands()
- self.detect_openmp()
-
-
- def detect_compiler_version(self):
- w, r, e = os.popen3('cl')
- output = e.read()
-
- match = re.search(r'Optimizing Compiler Version ([\d.]+) for', output)
-
- if match:
- self.options.VERSION = [int(i) for i in match.group(1).split('.')]
- else:
- self.options.VERSION = [0]
-
- print 'COMPILER VERSION:', self.options.VERSION
-
-
- # CHECK_OPTIONS FOR CL.EXE IN CASE YOU WERE WONDERING
- # RIGHT NOW WE'RE IN THE FILE msvc_mixin.py, SOMETIMES THAT CAN
- # BE CONFUSING.
- def check_options(self):
- opt = self.options
- COM = opt.COM
-
- # RF: Does windows have mmap? Let's say no.
- opt.HAVE_MMAP = False
- opt.RANLIB = "@rem nothing" # RF: better than none.
- opt.SUPPORT_DYNAMIC_LOADING = True
- opt.SPEC_COMPILE_OBJ = "/c"
- opt.SPEC_OBJ_FILENAME = "/Fo"
- opt.SPEC_EXE_FILENAME = "/Fe"
- opt.SPEC_DEFINE = "/D"
- opt.SPEC_INCLUDE = "/I"
- opt.SPEC_LIBPATH = "/LIBPATH:"
- opt.SPEC_LIB = "/DEFAULTLIB:"
- opt.SPEC_AR_OUT_FILENAME = "/OUT:"
- #opt.DEBUG_FLAGS = "/Yd /Zi /RTC"
- # /Yd deprecated, /RTC not recognized with VS2005
- opt.DEBUG_FLAGS = "/Zi"
- #opt.OPTIMISE = "/Ox /DNDEBUG"
- opt.OPTIMISE = "/Ox"
-
- opt.EXT_LIB = ".lib"
- opt.EXT_EXE= ".exe"
- opt.EXT_SHLIB = ".dll"
- opt.EXT_DYLIB = opt.EXT_SHLIB
- opt.EXT_STATIC_OBJ = "_static.obj"
- opt.EXT_SHARED_OBJ = "_dynamic.obj"
-
- opt.HAVE_DLOPEN = False
- opt.DLLIB = ""
-
- opt.FLX_SOCKLEN_T = 'int'
- opt.HAVE_PTHREADS = False
- opt.HAVE_KQUEUE_DEMUXER = False
- opt.HAVE_POLL = False
- opt.HAVE_EPOLL = False
- opt.HAVE_EVTPORTS = False
-
- # where to put the rtl: Cygwin requires the dll be in the PATH
- opt.SHLIB_DIR = "bin"
- print "rtl located in bin directory"
-
- def detect_openmp(self):
- # find if we can use /openmp without a warning
- try:
- self.run_shared_string_program(
- 'int main(int argc, char** argv) { return 0; }',
- os.path.join('tmp', 'openmp'), CFLAGS='/openmp')
- except ExecutionError:
- print "OpenMP based parallel programming not supported"
- else:
- print "OpenMP based parallel programming supported"
-
- self.options.HAVE_SHARED_OPENMP = True
- self.options.OPENMP = "/openmp"
-
-
- def detect_warning_flags(self):
- # RF: silencing all warnings /w has masked some really insidious bugs
- # (like dynamic_casts whilst RTTI was disabled). does the offset warning
- # even apply to vs toolchain? I don't think so, however, there is one
- # warning, that about missing delete for the custom operator new,
- # the follow /wd<n> should silence that warning.
- # self.options.NO_INVALID_OFFSETOF_WARNING = "/w"
- self.options.NO_INVALID_OFFSETOF_WARNING="/wd4291"
-
-
- def construct_compiler_commands(self):
- opt = self.options
- COM = opt.COM
- # RF: /MT (link with multithreaded clib, LIBCMT.LIB) for static builds
- # and /MD for dynamic (multithreaded dynamic clib, MSVCRT.LIB).
- # This last one's important as it means that not only is malloc threadsafe,
- # but the SAME allocator is shared between the app and its dynamic libs.
- # Without this, pushing an fthread in flx_run and popping it in
- # flxdynamic_lib is actually an insidious error. Note that /M* flags are
- # not just for link time, they seem to need to be passed to the compilation
- # phase as well. To run with debug versions of clib, try /MDd, /MTd and
- # /LDd when linking dynamic libs. Phew.
- # RF: update: Max suggested trying /MD (threadsafe dll clib) for all
- # builds, including static. I forget why, perhaps for uniformity. Anyway,
- # it works fine. It might have implications for folks who link against
- # static libs (folks like me), but I'm using nocygwin, so, yknow, eh.
- # P.S. Erick, if ever again you checkin any changes to this that you
- # haven't first tested, I'll kill you.
-
- COMPILE_DYNAMIC_RTL = "/MD /c /EHs"
- LINK_DYNAMIC_RTL = "/MD /LD"
-
- COMPILE_DYNAMIC_MAIN = "/MD /c /EHs"
- LINK_DYNAMIC_MAIN = "/MD"
-
- COMPILE_DYNAMIC_FLX = "/MD /c /EHs"
- LINK_DYNAMIC_FLX = "/MD /LD"
-
- opt.CCOBJ_DYNAMIC_FLX = COM + ' ' + COMPILE_DYNAMIC_FLX
- opt.CCLINK_DYNAMIC_FLX = COM + ' ' + LINK_DYNAMIC_FLX
-
- opt.CCOBJ_DYNAMIC_RTL = COM + ' ' + COMPILE_DYNAMIC_RTL
- opt.CCLINK_DYNAMIC_RTL = COM + ' ' + LINK_DYNAMIC_RTL
-
- opt.CCOBJ_DYNAMIC_MAIN = COM + ' ' + COMPILE_DYNAMIC_MAIN
- opt.CCLINK_DYNAMIC_MAIN = COM + ' ' + LINK_DYNAMIC_MAIN
-
- opt.CCOBJ_STATIC_FLX = COM + " /MD /c /EHs"
- opt.CCOBJ_STATIC_RTL = COM + " /MD /c /EHs"
- opt.CCOBJ_STATIC_MAIN = COM + " /MD /c /EHs"
-
- opt.CCOBJ_DYNAMIC_RTL = opt.CCOBJ_DYNAMIC_RTL + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
- opt.CCOBJ_DYNAMIC_FLX = opt.CCOBJ_DYNAMIC_FLX + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
- opt.CCOBJ_STATIC_RTL = opt.CCOBJ_STATIC_RTL + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
- opt.CCOBJ_STATIC_FLX = opt.CCOBJ_STATIC_FLX + ' ' + opt.NO_INVALID_OFFSETOF_WARNING
- opt.CCLINK_STATIC = COM + " /MD"
-
- ########
-
- def link_thing(self, *args, **kwds):
- kwds['LDFLAGS'] = kwds.get('LDFLAGS', '')
- # RF: hack to get /link before all other link directives,
- # including the libraries themselves and their paths.
- self.options.PRE_LINK_FLAGS = '/link'
-
- return super(msvc_mixin, self).link_thing(*args, **kwds)
-
-
- def link_shared_thing(self, *args, **kwds):
- lib = super(msvc_mixin, self).link_shared_thing(*args, **kwds)
-
- # msvc 8 and above need the manifest baked into the dll
- if self.options.VERSION > [14, 0, 0, 0]:
- self.shell('mt',
- '/nologo',
- '/manifest', lib + '.manifest',
- '/outputresource:' + lib + ';#2',
- )
-
- return lib
-
- ########
-
- def report_config(self):
- c_cxx_base.report_config(self)
- opt = self.options
-
- print
- if opt.SUPPORT_DYNAMIC_LOADING:
- print "Dynamic Loading Supported : [Windows native]"
- else:
- print "Dynamic Loading : NOT SUPPORTED"
-
- print
- self.report_isnan()
diff --git a/fbuild_old/lib/fbuild/flxbuild/msvcc_class.py b/fbuild_old/lib/fbuild/flxbuild/msvcc_class.py
deleted file mode 100644
index 5fd9990..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/msvcc_class.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from fbuild.flxbuild.c_base import c_base
-from fbuild.flxbuild.msvc_mixin import msvc_mixin
-
-class msvcc(msvc_mixin, c_base):
- DEFAULT_COM = 'cl'
-
- def check_options(self):
- msvc_mixin.check_options(self)
- c_base.check_options(self)
-
- def compile_thing(self, COM, *args, **kwds):
- # make sure we tell the compiler to compile code as a c file
- return super(msvcc, self).compile_thing(COM + ' /TC', *args, **kwds)
diff --git a/fbuild_old/lib/fbuild/flxbuild/msvcxx_class.py b/fbuild_old/lib/fbuild/flxbuild/msvcxx_class.py
deleted file mode 100644
index afe2c1e..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/msvcxx_class.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from fbuild.flxbuild.cxx_base import cxx_base
-from fbuild.flxbuild.msvc_mixin import msvc_mixin
-
-class msvcxx(msvc_mixin, cxx_base):
- # RF: this might be a good place to put the RTTI switch zzz
- DEFAULT_COM = 'cl'
-
- def check_options(self):
- msvc_mixin.check_options(self)
- cxx_base.check_options(self)
diff --git a/fbuild_old/lib/fbuild/flxbuild/ocaml_class.py b/fbuild_old/lib/fbuild/flxbuild/ocaml_class.py
deleted file mode 100644
index a49f6a7..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/ocaml_class.py
+++ /dev/null
@@ -1,366 +0,0 @@
-# discover which ocaml compilers are available
-import os
-import sys
-import shutil
-
-from fbuild.flxbuild.flxutil import mkdirs, erasedir, ExecutionError
-from fbuild.flxbuild.compiler_base import compiler_base
-
-class ocaml(compiler_base):
-
- def autodetect(self, linkopts, log=None):
- opt = self.options
-
- if os.name == 'nt' or sys.platform == 'cygwin':
- opt.EXT_EXE = '.exe'
- # ocamlbuild doesn't work under windows
- opt.HAVE_OCAMLBUILD = False
- else:
- opt.EXT_EXE = ''
-
- try: # check if ocamlbuild exists
- self.shell('ocamlbuild', '-version', log=log)
- opt.OCAMLBUILD = 'ocamlbuild -classic-display'
- #opt.HAVE_OCAMLBUILD = True
- # ocaml build doesn't work properly at the moment
- # due to hygiene checks or something .. so i have
- # to disable it
- opt.HAVE_OCAMLBUILD = False
- except ExecutionError:
- opt.HAVE_OCAMLBUILD = False
-
- try: # check if the client bootstrapped the native code compiler
- self.shell('ocamlopt.opt', log=log)
- opt.OCAMLCC = 'ocamlopt.opt' + (linkopts and ' ' + linkopts or '')
- opt.OCAMLLEX = 'ocamllex.opt'
- opt.OCAMLYACC = 'ocamlyacc'
- opt.NATIVE_CODE_COMPILER = True
- except ExecutionError:
- try: # check if the client has unbootstrapped native code compiler
- self.shell('ocamlopt', log=log)
- opt.OCAMLCC = 'ocamlopt' + (linkopts and ' ' + linkopts or '')
- opt.OCAMLLEX = 'ocamllex'
- opt.OCAMLYACC = 'ocamlyacc'
- opt.NATIVE_CODE_COMPILER = True
- except ExecutionError:
- opt.NATIVE_CODE_COMPILER = False
- try: # check if the client has ocaml at all ..
- self.shell('ocamlc', log=log)
- opt.OCAMLCC = 'ocamlc'
- opt.OCAMLLEX = 'ocamllex'
- opt.OCAMLYACC = 'ocamlyacc'
- opt.OCAMLDOC = 'ocamldoc'
- except ExecutionError:
- print "WARNING: CANT FIND OCAML TOOLS (ocamlc, ocamllex, ocamlyacc)"
- opt.OCAMLCC = 'false ocamlc'
- opt.OCAMLLEX = 'false ocamllex'
- opt.OCAMLYACC = 'false ocamlyacc'
-
- try: # check if there is a native code version of the bytecode compiler
- self.shell('ocamlc.opt', log=log)
- opt.OCAMLB = 'ocamlc.opt'
- except ExecutionError:
- try: # check if the client has ocaml at all
- self.shell('ocamlc', log=log)
- opt.OCAMLB = 'ocamlc'
- except ExecutionError:
- print "WARNING: CANT FIND OCAML TOOL 'ocamlc'"
- opt.OCAMLB = 'false ocamlc'
-
- try:
- self.shell('ocamldoc.opt', log=log)
- opt.OCAMLDOC = 'ocamldoc.opt'
- except ExecutionError:
- try:
- self.shell('ocamldoc', log=log)
- opt.OCAMLDOC = 'ocamldoc'
- except ExecutionError:
- print "WARNING: CANT FIND OCAML TOOL 'ocamldoc'"
- opt.OCAMLDOC = 'false ocamldoc'
-
- # set the default ocaml compiler
- if opt.OCAMLCC[0] != '#':
- opt.OCAMLC = opt.OCAMLCC
- else:
- opt.OCAMLC = opt.OCAMLB
-
- opt.OCAMLCP = "ocamlcp"
- # where the ocaml is installed
- try:
- output = self.shell(opt.OCAMLC, '-where', verbose=False)
- except ExecutionError:
- print "Woops, Can't run", opt.OCAMLC
- sys.exit(1)
- opt.OCAML_INCLUDE_DIRECTORY= output[0].strip()
-
- try:
- output = self.shell(opt.OCAMLB, '-v', verbose=False)
- except ExecutionError:
- print "Woops, Can't run", opt.OCAMLB
- sys.exit(1)
- opt.OCAMLB_DESCRIPTION, ocamlb_version = output[0].strip().split(', ')
-
- try:
- output = self.shell(opt.OCAMLC, '-v', verbose=False)
- except ExecutionError:
- print "Woops, Can't run", opt.OCAMLC
- sys.exit(1)
- opt.OCAMLC_DESCRIPTION, ocamlc_version = output[0].strip().split(', ')
- if ocamlb_version != ocamlc_version:
- print "Inconsistent Ocaml tool versions"
- print "Ocaml: bytecode compiler", ocamlb_version
- print "Ocaml: compiler ", ocamlc_version
- sys.exit(1)
- else:
- opt.OCAML_VERSION = ocamlb_version
-
- warn=' -w yzex -warn-error FDPSU'
- f = open('tmp'+os.sep+'camldummy.ml','w')
- try:
- f.write('print_endline "OK";;\n')
- finally:
- f.close()
-
- try:
- self.shell(opt.OCAMLC,
- warn,
- os.path.join('tmp', 'camldummy.ml'),
- verbose=False,
- )
- except ExecutionError:
- pass
- else:
- opt.OCAMLB = opt.OCAMLB + warn
- opt.OCAMLC = opt.OCAMLC + warn
-
- try:
- self.shell(opt.OCAMLC, '-g',
- os.path.join('tmp', 'camldummy.ml'),
- verbose=False,
- )
- except ExecutionError:
- opt.OCAMLC_SUPPORTS_DEBUG = False
- else:
- opt.OCAMLC_SUPPORTS_DEBUG = True
-
- def report_config(self):
- opt = self.options
-
- print
- print "OCAML tool configuration"
- print "************************"
- print
- print "Ocaml Version",opt.OCAML_VERSION
- if opt.NATIVE_CODE_COMPILER:
- print "Using Native code Compiler"
- else:
- print "Using Bytecode Compiler"
- print "Lexer generator .............. ",opt.OCAMLLEX
- print "Parser generator ............. ",opt.OCAMLYACC
- print "Bytecode and Interface compiler",opt.OCAMLB
- print " ",opt.OCAMLB_DESCRIPTION
- print "Compiler ..................... ",opt.OCAMLC
- print " ",opt.OCAMLC_DESCRIPTION
- print "Profiling Compiler ........... ",opt.OCAMLCP
- print "Document Generator ........... ",opt.OCAMLDOC
- print "C include directory .......... ",opt.OCAML_INCLUDE_DIRECTORY
- print
-
-
- def compile_thing(self, COM, EXT_SRC, EXT_DST, basenames,
- outdir='',
- include_paths=[],
- packs=[],
- profile=False,
- optimise=False,
- FLAGS=None,
- log=None,
- ):
- objects = []
- for basename in basenames:
- src = self.find_in_src_dir(basename + EXT_SRC)
- dst = os.path.join(outdir, basename + EXT_DST)
-
- # search for generated files first
- f = os.path.join(outdir, src)
- if os.path.exists(f):
- src = f
-
- mkdirs(os.path.dirname(dst))
-
- cmd = [COM]
- for i in include_paths: cmd.extend(('-I', i))
- cmd.extend(('-I', os.path.dirname(dst)))
-
- if optimise: cmd.extend(('-unsafe', '-noassert'))
-
- for path, packed, parts in packs:
- if basename in parts:
- cmd.extend(('-for-pack', pack))
-
- cmd.extend(('-o', dst))
- cmd.extend(('-c', src))
-
- # compile it
- self.shell(*cmd, **dict(log=log))
-
- objects.append(dst)
-
- return objects
-
-
- def compile_bytecode_thing(self, *args, **kwds):
- if kwds.pop('profile', False):
- compiler = self.options.OCAMLCP
- else:
- compiler = self.options.OCAMLB
-
- if kwds.pop('debug', False): compiler += ' -g'
-
- return self.compile_thing(compiler, *args, **kwds)
-
-
- def compile_native_thing(self, *args, **kwds):
- compiler = self.options.OCAMLC
- if kwds.pop('profile', False):
- compiler += ' -p'
-
- if kwds.get('optimise', False):
- compiler += ' -inline 5'
-
- if kwds.pop('debug', False) and self.options.OCAMLC_SUPPORTS_DEBUG:
- compiler += ' -g'
-
- return self.compile_thing(compiler, *args, **kwds)
-
- ####
-
- def compile_interface(self, *args, **kwds):
- return self.compile_bytecode_thing('.mli', '.cmi', *args, **kwds)
-
- def compile_bytecode(self, *args, **kwds):
- return self.compile_bytecode_thing('.ml', '.cmo', *args, **kwds)
-
- def compile_native(self, *args, **kwds):
- if not self.options.NATIVE_CODE_COMPILER:
- return self.compile_bytecode(*args, **kwds)
-
- return self.compile_native_thing('.ml', '.cmx', *args, **kwds)
-
- def compile_module(self, *args, **kwds):
- if kwds.pop('bytecode', True):
- return self.compile_bytecode(*args, **kwds)
- else:
- return self.compile_native(*args, **kwds)
-
- ####
-
- def link_thing(self, LINK, EXT_SRC, EXT_DST, objects, outfile,
- outdir='',
- include_paths=[],
- libs=[],
- log=None,
- ):
- dst = os.path.join(outdir, outfile)
- mkdirs(os.path.dirname(dst))
-
- cmd = [LINK]
- for i in include_paths: cmd.extend(('-I', i))
- cmd.extend(('-I', os.path.dirname(dst)))
-
- cmd.extend(('-o', dst))
- cmd.extend([lib + EXT_DST for lib in libs])
- cmd.extend([obj + EXT_SRC for obj in objects])
-
- self.shell(*cmd, **dict(log=log))
-
- return dst
-
- def link_bytecode_lib(self, objects, outfile, *args, **kwds):
- return self.link_thing(
- self.options.OCAMLB + ' -a', '.cmo', '.cma', objects, outfile + '.cma',
- *args, **kwds)
-
- def link_native_lib(self, objects, outfile, *args, **kwds):
- if not self.options.NATIVE_CODE_COMPILER:
- return self.link_bytecode_lib(objects, outfile, *args, **kwds)
-
- return self.link_thing(
- self.options.OCAMLC + ' -a', '.cmx', '.cmxa', objects, outfile + '.cmxa',
- *args, **kwds)
-
- def link_lib(self, *args, **kwds):
- if kwds.pop('bytecode', True):
- return self.link_bytecode_lib(*args, **kwds)
- else:
- return self.link_native_lib(*args, **kwds)
-
- def link_bytecode_exe(self, *args, **kwds):
- opt = self.options
- return self.link_thing(opt.OCAMLB, '.cmo', '.cma', *args, **kwds)
-
- def link_native_exe(self, *args, **kwds):
- if not self.options.NATIVE_CODE_COMPILER:
- return self.link_bytecode_exe(*args, **kwds)
-
- opt = self.options
- return self.link_thing(opt.OCAMLC, '.cmx', '.cmxa', *args, **kwds)
-
- def link_exe(self, *args, **kwds):
- if kwds.pop('bytecode', True):
- return self.link_bytecode_exe(*args, **kwds)
- else:
- return self.link_native_exe(*args, **kwds)
-
- ####
-
- def gen_thing(self, COM, EXT_SRC, basenames,
- outdir='',
- FLAGS=[],
- log=None):
- for basename in basenames:
- src = self.find_in_src_dir(basename + EXT_SRC)
- dst = os.path.join(outdir, basename + EXT_SRC)
-
- mkdirs(os.path.dirname(dst))
- shutil.copy(src, dst)
-
- cmd = [COM]
- cmd.extend(FLAGS)
- cmd.append(dst)
-
- self.shell(*cmd, **dict(log=log))
-
-
- def gen_lexer(self, *args, **kwds):
- return self.gen_thing(self.options.OCAMLLEX, '.mll', *args, **kwds)
-
-
- def gen_parser(self, *args, **kwds):
- return self.gen_thing(self.options.OCAMLYACC, '.mly',
- *args, **kwds)
-
-
- def gen_pgen_parser(self, *args, **kwds):
- return self.gen_thing(os.path.join('bin', 'pgen'), '.dyp',
- *args, **kwds)
-
- def gen_dypgen_parser(self, *args, **kwds):
- return self.gen_thing(os.path.join('bin', 'dypgen'), '.dyp',
- *args, **kwds)
-
- ####
-
- def ocamldoc(self, filenames, outdir,
- include_paths=[],
- log=None,
- ):
- erasedir(outdir)
- mkdirs(outdir)
-
- cmd = [self.options.OCAMLDOC, '-html']
- cmd.extend(('-d', outdir))
- for i in include_paths: cmd.extend(('-I', i))
- cmd.extend(filenames)
-
- self.shell(*cmd, **dict(log=log))
diff --git a/fbuild_old/lib/fbuild/flxbuild/package.py b/fbuild_old/lib/fbuild/flxbuild/package.py
deleted file mode 100644
index bab3dd6..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/package.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# global package registry
-pkgd = {}
-pkgreqs = {}
-pkgdeps = {}
diff --git a/fbuild_old/lib/fbuild/flxbuild/process.py b/fbuild_old/lib/fbuild/flxbuild/process.py
deleted file mode 100644
index ccdce70..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/process.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import os
-import glob
-import time
-
-# forward import of set
-try:
- set
-except NameError:
- from sets import Set as set
-
-from fbuild.flxbuild.flxutil import xqt, newest_filetime, filetime, fmtime, mkdirs
-from fbuild.path import glob_paths
-import config
-
-# global process registry
-processes = {}
-
-class Process(object):
- help = ''
-
- def __init__(self,
- verbose=False,
- quiet=False,
- optimise=False,
- debug=False,
- force=False,
- options=[]):
- self.failures = []
- self.successes = []
- self.dummy = 0
- self.used = False
- self.verbose = verbose
- self.quiet = quiet
- self.optimise = optimise
- self.debug = debug
- self.force = force
- self.options = options
-
- def __call__(self, *args, **kwds):
- return self.runme(*args, **kwds)
-
- def shell(self, *args, **kwds):
- kwds.setdefault('verbose', self.verbose)
- kwds.setdefault('quiet', self.quiet)
-
- return xqt(*args, **kwds)
-
- def __str__(self):
- return self.__class__.__name
-
-
-def get_latest_src_time(pkgdict):
- filenames = set()
-
- # check out iscr files
- for iscr in pkgdict.get("iscr_source",[]):
- base = os.path.basename(os.path.splitext(iscr)[0])
- f = open(os.path.join('manifests', base + '.deps'))
- try:
- filenames.update([line.strip() for line in f])
- finally:
- f.close()
-
- # check out xfiles
- root = pkgdict.get('root', config.src_dir)
- xfiles = pkgdict.get('xfiles', [])
-
- filenames.update(glob_paths(xfiles, root))
-
- return newest_filetime(filenames)
-
-
-def enstamp(stamp, quiet):
- mkdirs(os.path.split(stamp)[0])
- f = open(stamp,"w")
- try:
- print >> f, fmtime(time.time())
- finally:
- f.close()
-
- if not quiet:
- print 'Writing Stamp File:', stamp
diff --git a/fbuild_old/lib/fbuild/flxbuild/testprocess.py b/fbuild_old/lib/fbuild/flxbuild/testprocess.py
deleted file mode 100644
index 92501c5..0000000
--- a/fbuild_old/lib/fbuild/flxbuild/testprocess.py
+++ /dev/null
@@ -1,300 +0,0 @@
-import os
-import glob
-
-from fbuild.flxbuild.process import Process, get_latest_src_time, enstamp
-from fbuild.flxbuild.package import pkgd, pkgreqs
-from fbuild.flxbuild.flxutil import Tee, ExecutionError, filetime, fmtime, mkdirs
-from fbuild.path import relativepath, glob_paths
-
-import config
-
-class TestProcess(Process):
- def find_test(self, tl, a, b):
- for x, y, z in tl:
- if x == a and y == b: return True
- return False
-
- def run_tests(self, pkg, pkgdict, pkgsummary, testfiles, deterministic, static, dynamic, title):
- pkgsummary[(pkg, self.__class__.__name__)] = "started"
- if not testfiles:
- pkgsummary[(pkg, self.__class__.__name__)] = "no tests"
- return 1
-
- tests_ran = 0
- tests_failed = 0
-
- # determine the latest time that this package, and all the recurcively dependent
- # packages where last built
- latest_src_time = get_latest_src_time(pkgdict)
-
- for p in pkgreqs.get(pkg, []):
- latest_src_time = max(latest_src_time, get_latest_src_time(pkgd[p]))
-
- root = pkgdict.get('root', '.')
- for testfile in glob_paths(testfiles, root):
- localpath = relativepath(root, testfile)
-
- failed = False
-
- if self.find_test(self.successes, pkg, testfile): continue
- if self.find_test(self.failures, pkg, testfile): continue
-
- stamp = os.path.join("pkg-stamps", localpath + '.test')
-
- # run the tests
- stamptime = filetime(stamp)
- if not self.force and latest_src_time < stamptime:
- self.successes.append((pkg, testfile, "Stamped ok previously on "+fmtime(stamptime)))
- else:
- tests_ran += 1
-
- log = Tee()
- log.write(title % testfile)
- if not self.quiet:
- if stamptime == 0:
- log.write("+++UNTRIED or FAILED\n")
- elif self.force:
- log.write("+++FORCED\n")
- else:
- log.write("+++OUT OF DATE, source changed since "+fmtime(stamptime)+"\n")
-
- # build the felix code
- try:
- outbase = self.build_testfile(root, localpath, log)
- except ExecutionError, e:
- failed = True
- log.write('TESTFILE -- ERROR! %s (compiler)\n' % testfile)
- else:
- # run the dynamic tests
- if dynamic and config.SUPPORT_DYNAMIC_LOADING:
- resfilename = outbase + ".resh"
-
- try:
- driver = self.shell(
- os.path.join('bin', 'flx_pkgconfig'),
- '--path=config',
- '--field=flx_requires_driver',
- '--rec',
- '@' + resfilename,
- log=log,
- )
-
- driver = driver[0].strip()
- if driver =='': driver = "flx_run"
- driver = os.path.join("bin", driver)
- #log.write("Driver: %s\n" % driver)
- testscript = driver + ' ' + outbase + config.TARGET_CXX.options.EXT_SHLIB
-
- self.run_test(log, testscript, os.path.splitext(testfile)[0], outbase, deterministic)
- except ExecutionError, e:
- failed = True
- log.write('TESTFILE -- ERROR! %s (dynamic)\n' % testscript)
-
- # run the static tests
- if static and config.SUPPORT_STATIC_LINKAGE:
- testscript = outbase + config.TARGET_CXX.options.EXT_EXE
-
- try:
- self.run_test(log, testscript, os.path.splitext(testfile)[0], outbase, deterministic)
- except ExecutionError, e:
- failed = True
- log.write('TESTFILE -- ERROR! %s (static)\n' % testscript)
-
- if not failed:
- enstamp(stamp, self.quiet)
- self.successes.append((pkg, testfile, log.getvalue()))
-
- ####
-
- if failed:
- tests_failed += 1
- self.failures.append((pkg, testfile, log.getvalue()))
-
- pkgsummary[(pkg, self.__class__.__name__)] = \
- "Passed %d/%d" % (tests_ran - tests_failed, tests_ran)
-
- return tests_failed == 0
-
-
- def build_testfile(self, root, localpath, log):
- testfile = os.path.join(root, localpath)
-
- optimise_c = self.optimise or "optimise_c" in options
- mode = "std"
- if optimise_c: mode = "Optimised"
-
- log.write('TRANSLATING %s\n' % testfile)
-
- buildpath = os.path.join('build', localpath)
- outbase = os.path.splitext(buildpath)[0]
- resfilename = outbase + ".resh"
- incfilename = outbase + ".includes"
-
- builddir = os.path.dirname(buildpath)
- mkdirs(builddir)
-
- try:
- self.shell(os.path.join('bin', 'flxg'),
- '-Ilib',
- '-I' + os.path.dirname(testfile),
- '--cache_dir=' + builddir,
- '--output_dir=' + builddir,
- '--elkhound=' + os.path.join('bin', 'flx_elkhound'),
- '--import=flx.flxh',
- 'std',
- os.path.splitext(os.path.basename(testfile))[0],
- log=log,
- )
- except ExecutionError, e:
- log.write('TESTFILE -- ERROR! %s\n' % testfile)
- raise e
-
- cflags = self.shell(os.path.join('bin', 'flx_pkgconfig'),
- '--path=config',
- '--field=cflags',
- '@' + resfilename,
- log=log,
- )
- cflags = cflags[0].strip() + " "
-
- includes = self.shell(os.path.join('bin', 'flx_pkgconfig'),
- '--path=config',
- '--field=includes',
- '@' + resfilename,
- log=log,
- )
- includes = includes[0].strip().split()
- f = open(incfilename,"w")
- for i in includes:
- f.write("#include " + i + "\n")
- f.close()
-
-
- #log.write('Compiling generated code of %s\n' % testfile)
- try:
- if config.SUPPORT_DYNAMIC_LOADING:
- log.write("COMPILING GENERATED C++ TEST CODE: %s (dynamic)\n" % mode)
- log.flush()
-
- dlibs = self.shell(os.path.join('bin', 'flx_pkgconfig'),
- '-r',
- '--path=config',
- '--field=provides_dlib',
- '--field=requires_dlibs',
- '@' + resfilename,
- log=log,
- )
- dlibs = dlibs[0].strip() + " "
-
- config.TARGET_CXX.build_shared_dll(outbase,
- #outdir='build',
- include_paths=[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- optimise=optimise_c,
- debug=self.debug,
- CFLAGS=cflags,
- lib_paths=[config.TARGET_CXX.options.SHLIB_DIR],
- LDFLAGS=dlibs,
- log=log)
-
- if config.SUPPORT_STATIC_LINKAGE:
- log.write("COMPILING GENERATED C++ TEST CODE: %s (static)\n" % mode)
- log.flush()
-
- driver = self.shell(os.path.join('bin', 'flx_pkgconfig'),
- '-r',
- '--keeprightmost',
- '--path=config',
- '--field=flx_requires_driver',
- '@' + resfilename,
- log=log,
- )
- driver = driver[0].strip()
- if driver == '': driver = 'flx_run'
-
- slibs = self.shell(os.path.join('bin', 'flx_pkgconfig'),
- '-r',
- '--keeprightmost',
- '--path=config',
- '--field=provides_slib',
- '--field=requires_slibs',
- driver,
- '@' + resfilename,
- log=log,
- )
- slibs = slibs[0].strip()
- #log.write("slibs=%s\n" % slibs)
-
- driver = os.path.join(
- config.FLX_RTL_DIR,
- driver + config.TARGET_CXX.options.EXT_STATIC_OBJ,
- )
- #log.write('static driver =%s\n' % driver)
-
- config.TARGET_CXX.build_felix_static(outbase,
- #outdir='build',
- objects=[driver],
- include_paths=[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- optimise=optimise_c,
- debug=self.debug,
- macros=["FLX_STATIC_LINK"],
- CFLAGS=cflags,
- lib_paths=[config.FLX_RTL_DIR],
- libs=[],
- LDFLAGS=slibs,
- log=log)
-
- #log.write('TESTFILE -- OK! %s\n' % testfile)
- except ExecutionError, e:
- log.write('TESTFILE -- ERROR! %s\n' % testfile)
- raise e
-
- return outbase
-
-
- def run_test(self, log, testscript, inbase, outbase, deterministic):
- log.write('EXECUTING TEST CODE %s\n' % testscript)
- log.flush()
-
- # FIXME
- argfiles = glob.glob(inbase + '*.args')
-
- if not argfiles:
- output = self.shell(testscript, verbose=True, log=log)
-
- f = open(outbase+ '.output', 'w')
- try:
- f.write(''.join(output))
- finally:
- f.close()
-
- if deterministic:
- output = self.shell(config.DIFF,
- inbase + '.expect',
- outbase + '.output',
- verbose=True,
- log=log)
-
- else:
- for argcount, argfile in zip(range(len(argfiles)), argfiles):
- # read the arguments fromm the file
- f = open(argfile)
- try:
- args = f.read().strip()
- finally:
- f.close()
-
- output = self.shell(testscript, args, verbose=True, log=log)
-
- f = open('%s-%s.argoutput' % (outbase, argcount), 'w')
- try:
- f.write(''.join(output))
- finally:
- f.close()
-
- if deterministic:
- output = self.shell(config.DIFF,
- '%s-%s.argexpect' % (inbase, argcount),
- '%s-%s.argoutput' % (outbase, argcount),
- quiet=True,
- log=log,
- )
diff --git a/fbuild_old/lib/fbuild/mkplugins/__init__.py b/fbuild_old/lib/fbuild/mkplugins/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_felix_dynamic_drivers.py b/fbuild_old/lib/fbuild/mkplugins/build_felix_dynamic_drivers.py
deleted file mode 100644
index d9f3e00..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_felix_dynamic_drivers.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import os
-import shutil
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import mkdirs
-
-import config
-
-class build_felix_dynamic_drivers(Process):
- def runme(self, pkg, pkgdict, *args):
- if not config.SUPPORT_DYNAMIC_LOADING:
- return
-
- DRIVERS = pkgdict.get("dynamic_drivers",[])
-
- if not DRIVERS:
- return
-
- print "COMPILING DRIVERS (dynamic)"
- cflags = pkgdict.get("cflags","")
- dflags = pkgdict.get("dflags","")
- LIBS = pkgdict.get("drivers_require_libs",[])
- includes = pkgdict.get('include_path',[])
- includes = [os.path.join(config.src_dir, i) for i in includes]
-
- libs = []
- for lib in LIBS:
- libs.append(lib+"_dynamic")
-
- for src, outdir in DRIVERS:
- # this is a gross HACK! Don't make dynamic versions
- # of drivers with extra macros, they're for static
- # link of driver without async support
- print 'dynamic Compiling driver object', src
-
- dst = config.TARGET_CXX.build_shared_program(src, os.path.join('build', src),
- outdir='build',
- include_paths=includes+[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- optimise=self.optimise,
- debug=self.debug,
- CFLAGS=cflags,
- lib_paths=[config.TARGET_CXX.options.SHLIB_DIR],
- libs=libs,
- LDFLAGS=dflags)
-
- if outdir:
- print 'copying file', dst, '->', outdir
- mkdirs(outdir)
- shutil.copy(dst, outdir)
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_felix_static_drivers.py b/fbuild_old/lib/fbuild/mkplugins/build_felix_static_drivers.py
deleted file mode 100644
index 47e9d3d..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_felix_static_drivers.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import os
-import shutil
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import mkdirs
-
-import config
-
-class build_felix_static_drivers(Process):
- def runme(self, pkg, pkgdict, *args):
- if not config.SUPPORT_STATIC_LINKAGE:
- return
-
- DRIVERS = pkgdict.get("static_drivers",[])
- if not DRIVERS:
- return
-
- print "COMPILING DRIVERS (static)"
- cflags = pkgdict.get("cflags","")
- includes = pkgdict.get('include_path',[])
- includes = [os.path.join(config.src_dir, i) for i in includes]
-
- for src, outdir in DRIVERS:
- print 'static Compiling driver object', src
-
- dst = config.TARGET_CXX.compile_static_main([src],
- outdir='build',
- include_paths=includes+[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- macros=["FLX_STATIC_LINK"],
- optimise=self.optimise,
- debug=self.debug,
- CFLAGS=cflags)
-
- if outdir:
- print 'copying file', dst[0], '->', outdir
- mkdirs(outdir)
- shutil.copy(dst[0], outdir)
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_host_tools.py b/fbuild_old/lib/fbuild/mkplugins/build_host_tools.py
deleted file mode 100644
index 7bd21d9..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_host_tools.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import os
-import shutil
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import unix2native, mkdirs
-
-import config
-
-class build_host_tools(Process):
- def copy_hpp2rtl(self, HPPS):
- for path in HPPS:
- f = os.path.join(config.src_dir, path)
- if os.path.exists(f):
- src = f
- else:
- src = path
- src = unix2native(src)
- dst = os.path.join(config.FLX_RTL_DIR, os.path.basename(path))
-
- if not self.quiet: print 'copying file', src, '->', dst
- mkdirs(os.path.dirname(dst))
- shutil.copyfile(src, dst)
-
-
- def runme(self, pkg, pkgdict, *args):
- HPPS = pkgdict.get("rtl_interfaces",[])
- self.copy_hpp2rtl(HPPS)
-
- CCS = pkgdict.get("host_cc_ccs",[])
- CPPS = pkgdict.get("host_cpp_cpps",[])
- EXES = pkgdict.get("host_exes",[])
- LIBS = pkgdict.get("host_exes_require_libs",[])
- MACROS =pkgdict.get("host_macros",[])
- INCLUDES=pkgdict.get("host_include_path",[])
- INCLUDE_PATH=[os.path.join(config.src_dir, i) for i in INCLUDES]
-
- if len(CCS)+len(CPPS)+len(EXES)+len(LIBS) == 0:
- return
-
- print "BUILDING HOST TOOLS"
- pkglib = None
-
- if CCS:
- print "HOST C COMPILING", pkg
- pkglib = "lib"+pkg+"_host_static"
-
- config.HOST_C.build_static_rtl(CCS, os.path.join('build', 'hostlib', pkglib),
- outdir='build',
- include_paths=INCLUDE_PATH+[config.FLX_RTL_DIR, config.FLX_HOST_CONFIG_DIR, "elk"],
- macros=MACROS+["FLX_STATIC_LINK"],
- )
-
- if CPPS:
- print "HOST C++ COMPILING", pkg
- pkglib = "lib"+pkg+"_host_static"
-
- config.HOST_CXX.build_static_rtl(CPPS, os.path.join('build', 'hostlib', pkglib),
- outdir='build',
- include_paths=INCLUDE_PATH+[config.FLX_RTL_DIR, config.FLX_HOST_CONFIG_DIR, "elk"],
- macros=MACROS+["FLX_STATIC_LINK"],
- )
-
- for x in CCS+CPPS:
- f = x + config.HOST_CXX.options.EXT_STATIC_OBJ
-
- kwds = {\
- 'include_paths': INCLUDE_PATH+[config.FLX_RTL_DIR, config.FLX_HOST_CONFIG_DIR, "elk"],
- 'macros': MACROS+["FLX_STATIC_LINK"],
- }
-
- if pkglib:
- kwds['lib_paths'] = [os.path.join('build', 'hostlib')]
- kwds['libs'] = [pkglib]
-
- for src, bin in EXES:
- config.HOST_CXX.build_static_program(src, bin, **kwds)
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_target_cpp_tools.py b/fbuild_old/lib/fbuild/mkplugins/build_target_cpp_tools.py
deleted file mode 100644
index 632e016..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_target_cpp_tools.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-
-import config
-
-class build_target_cpp_tools(Process):
- def runme(self, pkg, pkgdict, *args):
- EXES = pkgdict.get("exes",[])
- esflags = pkgdict.get("exes_require_linkflags","")
- if len(EXES)>0:
- print "BUILDING C++ TARGET TOOLS"
- LIBS = pkgdict.get("exes_require_libs",[])
- libs = []
- for lib in LIBS: libs.append(lib+"_static")
- for src,bin in EXES:
- dir = os.path.dirname(src)
- config.TARGET_CXX.build_static_main(src, dir, bin,
- outdir='build',
- include_paths=[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- macros=["FLX_STATIC_LINK"],
- lib_paths=[dir],
- libs=libs,
- LDFLAGS=esflags)
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_target_felix_rtl.py b/fbuild_old/lib/fbuild/mkplugins/build_target_felix_rtl.py
deleted file mode 100644
index 644d6d2..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_target_felix_rtl.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import os
-import shutil
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import unix2native
-
-import config
-
-class build_target_felix_rtl(Process):
- def runme(self, pkg, pkgdict, *args):
- flxs = pkgdict.get('felix_rtl', [])
- if not flxs:
- return
-
- for path in flxs:
- f = os.path.join(config.src_dir, path)
- if os.path.exists(f):
- src = f
- else:
- src = path
- src = unix2native(src)
- dst = os.path.join('lib', os.path.basename(path))
-
- if not self.quiet: print 'copying file', src, '->', dst
- shutil.copyfile(src, dst)
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_target_felix_tools.py b/fbuild_old/lib/fbuild/mkplugins/build_target_felix_tools.py
deleted file mode 100644
index 779c644..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_target_felix_tools.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-
-import config
-
-class build_target_felix_tools(Process):
- def runme(self, pkg, pkgdict, *args):
- flxs = pkgdict.get("felix_tools",[])
- if not flxs:
- return
-
- print "BUILDING FELIX TARGET TOOLS"
- libs = [l+'_static' for l in pkgdict.get("exes_require_libs",[])]
- fsflags = pkgdict.get("felix_requires_linkflags","")
- for src,exe in flxs:
- src = os.path.join(config.src_dir, src)
-
- # added 'std' here so flx_pkgconfig builds
- self.shell(os.path.join('bin', 'flxg'),
- '-Ilib',
- '--elkhound=' + os.path.join('bin', 'flx_elkhound'),
- '--import=flx.flxh',
- 'std',
- src,
- )
- config.TARGET_CXX.build_felix_static(src, exe,
- objects=[os.path.join(config.FLX_RTL_DIR, 'flx_run' + config.TARGET_CXX.options.EXT_STATIC_OBJ)],
- include_paths=[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- macros=["FLX_STATIC_LINK"],
- lib_paths=[config.FLX_RTL_DIR],
- libs=libs,
- LDFLAGS=fsflags
- )
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_target_rtl_dynamic.py b/fbuild_old/lib/fbuild/mkplugins/build_target_rtl_dynamic.py
deleted file mode 100644
index 5f425ba..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_target_rtl_dynamic.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-
-import config
-
-class build_target_rtl_dynamic(Process):
- def runme(self, pkg, pkgdict, *args):
- if not config.SUPPORT_DYNAMIC_LOADING:
- return
-
- CCS = pkgdict.get("cc_ccs",[])
- CPPS = pkgdict.get("cpp_cpps",[])
-
- if not CCS and not CPPS:
- return
-
- BUILD_MACRO = pkgdict.get("build_macro","ERROR!")
- EXTRA_CFLAGS = pkgdict.get("cflags","")
- EXTRA_DFLAGS = pkgdict.get("dflags","")
- INCLUDES=pkgdict.get("include_path",[])
- INCLUDE_PATH=[os.path.join(config.src_dir, i) for i in INCLUDES]
- MACROS =pkgdict.get("macros",[])
-
- flibs = pkgdict.get("lib_requires",[])
- needs_libs = []
- for i in flibs:
- needs_libs.append(i+"_dynamic")
-
- lib = pkgdict.get("provides_lib","lib"+pkg)
-
- print " ++ "+pkg+" RTL (dynamic)"
-
- compile_kwds = {
- 'outdir': 'build',
- 'include_paths': INCLUDE_PATH+[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- 'optimise': self.optimise,
- 'debug': self.debug,
- 'macros': MACROS+["BUILD_"+BUILD_MACRO],
- 'CFLAGS': EXTRA_CFLAGS,
- }
-
- link_kwds = {
- 'lib_paths': [config.TARGET_CXX.options.SHLIB_DIR],
- 'libs': needs_libs,
- 'LDFLAGS': EXTRA_DFLAGS,
- }
-
- if CCS:
- # RF: THIS SHOULD BE TARGET_CC AND THE OUTPUT DIR SHOULD BE THAT OF
- # THE TARGET.
- objects = config.TARGET_CC.compile_shared_rtl(CCS, **compile_kwds)
- library = config.TARGET_CC.options.SHLIB_DIR+os.sep+lib+'_dynamic'
-
- config.TARGET_CC.link_shared_rtl(objects, library, **link_kwds)
-
- if config.TARGET_CC.options.EXT_DYLIB != config.TARGET_CC.options.EXT_SHLIB:
- config.TARGET_CC.link_shared_dll(objects, library,
- **link_kwds)
-
- if CPPS:
- # RF: THIS SHOULD BE TARGET_CXX AND THE OUTPUT DIR SHOULD BE THAT OF
- # THE TARGET.
- objects = config.TARGET_CXX.compile_shared_rtl(CPPS, **compile_kwds)
- library = config.TARGET_CXX.options.SHLIB_DIR+os.sep+lib+'_dynamic'
-
- config.TARGET_CXX.link_shared_rtl(objects, library, **link_kwds)
-
- if config.TARGET_CXX.options.EXT_DYLIB != config.TARGET_CXX.options.EXT_SHLIB:
- config.TARGET_CXX.link_shared_dll(objects, library, **link_kwds)
-
- for x in CCS + CPPS:
- f = x + config.TARGET_CXX.options.EXT_SHARED_OBJ
diff --git a/fbuild_old/lib/fbuild/mkplugins/build_target_rtl_static.py b/fbuild_old/lib/fbuild/mkplugins/build_target_rtl_static.py
deleted file mode 100644
index e0201b4..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/build_target_rtl_static.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-
-import config
-
-class build_target_rtl_static(Process):
- def runme(self, pkg, pkgdict, *args):
- if not config.SUPPORT_STATIC_LINKAGE:
- return
-
- CCS = pkgdict.get("cc_ccs",[])
- CPPS = pkgdict.get("cpp_cpps",[])
-
- if not CCS and not CPPS:
- return
-
- EXTRA_CFLAGS = pkgdict.get("cflags","")
- lib = pkgdict.get("provides_lib","lib"+pkg)
- INCLUDES=pkgdict.get("include_path",[])
- INCLUDE_PATH=[os.path.join(config.src_dir, i) for i in INCLUDES]
- MACROS =pkgdict.get("macros",[])
-
- print " ++ "+pkg+" RTL (static)"
-
- kwds = {
- 'outfile': os.path.join(config.FLX_RTL_DIR, lib + "_static"),
- 'outdir': 'build',
- 'include_paths': INCLUDE_PATH+[config.FLX_RTL_DIR, config.FLX_TARGET_CONFIG_DIR],
- 'macros': MACROS+["FLX_STATIC_LINK"],
- 'optimise': self.optimise,
- 'debug': self.debug,
- 'CFLAGS': EXTRA_CFLAGS,
- }
-
- if CCS:
- config.TARGET_CC.build_static_rtl(CCS, **kwds)
-
- if CPPS:
- config.TARGET_CXX.build_static_rtl(CPPS, **kwds)
-
- for x in CCS + CPPS:
- f = x + config.TARGET_CXX.options.EXT_STATIC_OBJ
diff --git a/fbuild_old/lib/fbuild/mkplugins/clean.py b/fbuild_old/lib/fbuild/mkplugins/clean.py
deleted file mode 100644
index 67c61fa..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/clean.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import os
-import glob
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import erasefile
-
-import config
-
-# cleans products, but not extracted sources
-class clean(Process):
- help = 'remove generated C++ and binaries from test locations'
-
- def runme(self, *args):
- for d in glob.glob(os.path.join("pkg-stamps", "*")):
- erasefile(d)
- for f in glob.glob(os.path.join(config.FLX_LPARCHIVE, "lpsrc", "*.cache")):
- erasefile(f)
- for f in glob.glob(os.path.join(config.FLX_LPARCHIVE, "lpsrc-cache", "*.cache")):
- erasefile(f)
diff --git a/fbuild_old/lib/fbuild/mkplugins/clean_run.py b/fbuild_old/lib/fbuild/mkplugins/clean_run.py
deleted file mode 100644
index 699cbed..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/clean_run.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-import sys
-import glob
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import erasefile
-from fbuild.path import find
-
-import config
-
-# cleans products, but not extracted sources
-class clean_run(Process):
- help = 'remove generated C++ and binaries from test locations'
-
- def runme(self, *args):
- for d in glob.glob(os.path.join("pkg-stamps", "*.run")):
- print "Removing", d
- erasefile(d)
- for d in find('pkg-stamps', '*.test'):
- print "Removing", d
- erasefile(d)
-
- sys.exit(0)
diff --git a/fbuild_old/lib/fbuild/mkplugins/extract_grammar.py b/fbuild_old/lib/fbuild/mkplugins/extract_grammar.py
deleted file mode 100644
index a72985b..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/extract_grammar.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import os
-import sys
-
-from fbuild.flxbuild.process import Process
-
-class extract_grammar(Process):
- help = 'extract the grammar as a text file from ocamlyacc src'
-
- def __init__(self, *args, **kwds):
- super(extract_grammar, self).__init__(*args, **kwds)
-
- self.ran = False
-
- def runme(self, pkg, pkgdict, *args):
- if pkg != 'flx_parser':
- return
- if self.ran:
- return
- self.ran = True
-
- print "GENERATING GRAMMAR"
- try:
- os.mkdir('misc')
- except:
- pass
- self.shell(sys.executable,
- os.path.join('script', 'get_grammar'),
- os.path.join('src', 'compiler', 'flx_parser', 'flx_parse.dyp'),
- '>',
- os.path.join('misc', 'flx_parse.grammar'),
- )
- self.shell(sys.executable,
- os.path.join('script', 'flx_flx_grgen'),
- os.path.join('misc', 'flx_parse.grammar'),
- '>',
- os.path.join('lib', 'flx_grammar.flx'),
- )
- self.shell(sys.executable,
- os.path.join('script', 'elk_flx_lexgen'),
- os.path.join('misc', 'flx_parse.grammar'),
- '>',
- os.path.join('misc', 'elk_flx_lex.cc'),
- )
- self.shell(sys.executable,
- os.path.join('script', 'flx_tokgen'),
- os.path.join('misc', 'flx_parse.grammar'),
- '>',
- os.path.join('lib', 'flx_token.flx'),
- )
diff --git a/fbuild_old/lib/fbuild/mkplugins/extract_iscr.py b/fbuild_old/lib/fbuild/mkplugins/extract_iscr.py
deleted file mode 100644
index 42c4693..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/extract_iscr.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import unix2native
-
-import config
-
-class extract_iscr(Process):
- def runme(self, pkg, pkgdict, *args):
- srcs = pkgdict.get("iscr_source",[])
- for src in srcs:
- src = unix2native(src)
- if not self.quiet: print "REExtracting", pkg, "from", src
- self.shell(config.ISCR,
- '--break-on-error',
- os.path.join(config.FLX_LPARCHIVE, src),
- )
diff --git a/fbuild_old/lib/fbuild/mkplugins/fcount.py b/fbuild_old/lib/fbuild/mkplugins/fcount.py
deleted file mode 100644
index fd6adf7..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/fcount.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import os
-import sys
-
-from fbuild.flxbuild.process import Process
-
-class fcount(Process):
- def __init__(self, *args, **kwds):
- super(fcount, self).__init__(*args, **kwds)
- self.ran = False
-
- def runme(self, pkg, pkgdict,*args):
- if self.ran:
- return
- self.ran = True
-
- # requires posix
- if os.name == 'nt':
- return
-
- if "inline" in self.options:
- tkind = "inline"
- elif "noinline" in self.options:
- tkind = "noinline"
- else:
- tkind = "std"
-
- self.shell(sys.executable,
- os.path.join('script', 'fcount.py'),
- os.path.join('misc', 'fcounts.stats'),
- tkind,
- '"' + os.path.join('tut', 'examples', '*.hpp') + '"',
- )
-
- self.shell(sys.executable,
- os.path.join('script', 'fcount.py'),
- os.path.join('misc', 'fcounts.stats'),
- tkind,
- '"' + os.path.join('test', '*.hpp') + '"',
- )
diff --git a/fbuild_old/lib/fbuild/mkplugins/gramdoc.py b/fbuild_old/lib/fbuild/mkplugins/gramdoc.py
deleted file mode 100644
index 3275a43..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/gramdoc.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import ExecutionError
-
-class gramdoc(Process):
- help = 'make the syntax documentation'
-
- def __init__(self, *args, **kwds):
- super(gramdoc, self).__init__(*args, **kwds)
- self.ran = False
-
- def runme(self, pkg, pkgdict, *args):
- if self.ran:
- return
- self.ran = True
-
- print "GENERATING SYNTAX DOCUMENTATION"
- f = open(os.path.join('tmp', 'xx.flx'), 'w')
- f.write("#import <nugram.flxh>\n")
- f.write("open syntax felix\n")
- f.close()
-
- try:
- self.shell(os.path.join('bin', 'flxp'),
- '-Ilib',
- '--document-grammar',
- os.path.join('tmp', 'xx'),
- )
- except ExecutionError:
- pass
diff --git a/fbuild_old/lib/fbuild/mkplugins/help.py b/fbuild_old/lib/fbuild/mkplugins/help.py
deleted file mode 100644
index bb3664d..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/help.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import sys
-
-from fbuild.flxbuild.process import Process, processes
-from fbuild.flxbuild.package import pkgreqs, pkgdeps
-
-class help(Process):
- help = 'print this help message'
-
- def preprocess(self):
- # PRINT PACKAGE DEPENDENCY INFORMATION
- print "REQS: "
- ks=pkgreqs.keys()
- ks.sort()
- length = 0
- for k in ks:
- length = max(len(k), length)
- for k in ks:
- print " ", k.ljust(length), pkgreqs[k]
-
- print
- print "DEPS: "
- ks = pkgdeps.keys()
- ks.sort()
- length = 0
- for k in ks:
- length = max(len(k), length)
- for k in ks:
- print " ", k.ljust(length), pkgdeps[k]
-
- items = []
- max_len = 0
- for name, process in processes.items():
- if process.help:
- max_len = max(max_len, len(name))
- items.append((name, process.help))
-
- items.sort()
-
- for name, help in items:
- print '%s %s' % (name.ljust(max_len), help)
-
- sys.exit(0)
diff --git a/fbuild_old/lib/fbuild/mkplugins/impldoc.py b/fbuild_old/lib/fbuild/mkplugins/impldoc.py
deleted file mode 100644
index 59eabcd..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/impldoc.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import mkdirs, ExecutionError, MissingFile
-
-import config
-
-class impldoc(Process):
- help = 'make the ocaml compiler documentation'
-
- def runme(self, pkg, pkgdict, *args):
- modules = pkgdict.get('caml_modules', [])
- interfaces = pkgdict.get('caml_interfaces', [])
- includes = pkgdict.get('caml_include_paths', [])
-
- if not (modules or interfaces):
- return
-
- print "GENERATING OCAMLDOC", pkg
-
- mlis = []
-
- for module in interfaces + modules:
- for extension in '.mli', '.ml':
- f = config.HOST_OCAML.find_in_src_dir(module + extension)
- if os.path.exists(f):
- mlis.append(f)
- break
- elif os.path.exists(os.path.join('build', f)):
- mlis.append(os.path.join('build', f))
- break
- else:
- print 'ignoring:', module
- else:
- raise MissingFile(module)
-
- if not mlis:
- return
-
- try:
- config.HOST_OCAML.ocamldoc(mlis,
- outdir=os.path.join('doc', 'impldoc', pkg),
- include_paths=[os.path.join('build', i) for i in includes])
- except ExecutionError:
- pass # well ocamldoc is full of bugs ..
diff --git a/fbuild_old/lib/fbuild/mkplugins/man.py b/fbuild_old/lib/fbuild/mkplugins/man.py
deleted file mode 100644
index b13b8a5..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/man.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import os
-import glob
-
-from fbuild.flxbuild.process import Process
-
-class man(Process):
- help = 'make the man pages'
-
- def runme(self, *args):
- print "GENERATING MAN PAGES"
- mp = glob.glob('man/man1/*.1')
- MAN_PAGES = []
- for i in mp:
- MAN_PAGES.append (i[9:])
- try: os.mkdir("doc/htmlman")
- except: pass
- dtd = ('<!DOCTYPE HTML PUBLIC \\"-//W3C//DTD HTML 4.0 Transitional//EN\\"\\n'+
- ' \\"http://www.w3.org/TR/REC-html40/loose.dtd\\">')
- try:
- for file in MAN_PAGES:
- basename = os.path.splitext(file)[0]
- self.shell(
- "man2html man/man1/" + file +
- '| sed -e "s%<A HREF=\\"[^<]*cgi-bin/man/man2html?1+\(.*\)\\">%<A HREF=\\"\\1_1.html\\">%"' +
- '| sed -e "7d"' +
- '| sed -e "1,3d"' +
- '| sed -e "s%<html>%'+dtd+'\\n<html>%"' +
- ' >' + "doc/htmlman/" + basename+'_1.html',
- )
- except:pass
diff --git a/fbuild_old/lib/fbuild/mkplugins/manifest.py b/fbuild_old/lib/fbuild/mkplugins/manifest.py
deleted file mode 100644
index 6bf753c..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/manifest.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import os
-import glob
-import re
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import filetime, newest_filetime, mkdirs
-
-import config
-
-class manifest(Process):
- def make_manifest(self, pkg, deps_filename):
- regex = re.compile(r"CREATING .* NAMED FILE SOURCE (.*) \[")
-
- mkdirs("manifests")
- f = open(deps_filename, "w")
- try:
- for line in self.shell(config.ISCR, '--trace=sources', '--trace=changes', pkg):
- m = regex.match(line)
- if m:
- dep = m.group(1)
- print "Source File:", dep
- print >> f, os.path.join(config.FLX_LPARCHIVE, 'lpsrc', dep)
- finally:
- f.close()
-
- def preprocess(self):
- paks = glob.glob(os.path.join(config.FLX_LPARCHIVE, "lpsrc", "*.pak"))
- for pak in paks:
- base = os.path.basename(os.path.splitext(pak)[0])
- deps_filename = os.path.join("manifests", base + '.deps')
-
- if not os.path.exists(deps_filename):
- print "New pak", pak, "BUILDING MANIFEST"
- self.make_manifest(pak, deps_filename)
- else:
- manifest_time = filetime(deps_filename)
-
- f = open(deps_filename)
- try:
- src_time = newest_filetime([line.strip() for line in f] + [pak])
- if src_time > manifest_time:
- print "Changed pak", pak, "REBUILDING MANIFEST"
- self.make_manifest(pak, deps_filename)
- finally:
- f.close()
diff --git a/fbuild_old/lib/fbuild/mkplugins/mkdoc.py b/fbuild_old/lib/fbuild/mkplugins/mkdoc.py
deleted file mode 100644
index 204c7eb..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/mkdoc.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import os
-import shutil
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import mkdirs
-
-import config
-
-class mkdoc(Process):
- def __init__(self, *args, **kwds):
- super(mkdoc, self).__init__(*args, **kwds)
-
- self.already_generated = {}
-
- def runme(self, pkg, pkgdict, *args):
- mkdirs("pkg-stamps")
-
- weaver_directory = pkgdict.get("weaver_directory","")
- iscr_source = pkgdict.get("iscr_source",[])
- if iscr_source and weaver_directory:
- print "GENERATING DOCUMENTATION",weaver_directory
-
- iscr_source = filter(lambda x: x not in self.already_generated, iscr_source)
-
- mkdirs(weaver_directory)
- shutil.copy(os.path.join('misc', 'plus.gif'), weaver_directory)
- shutil.copy(os.path.join('misc', 'minus.gif'), weaver_directory)
- shutil.copy(os.path.join('misc', 'dot.gif'), weaver_directory)
- shutil.copy(os.path.join('misc', 'interscript.css'), weaver_directory)
- shutil.copy(os.path.join('misc', 'user.css'), weaver_directory)
-
- for src in iscr_source:
- self.shell(config.ISCR,
- '--language=en',
- '--weaver=web',
- '--weaver=latex',
- '--passes=2',
- '--weaver-directory=' + weaver_directory,
- os.path.join(config.FLX_LPARCHIVE, src),
- )
-
- self.already_generated[src] = 1
diff --git a/fbuild_old/lib/fbuild/mkplugins/performance.py b/fbuild_old/lib/fbuild/mkplugins/performance.py
deleted file mode 100644
index 6c0ab3d..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/performance.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-
-import config
-
-class performance(Process):
- help = 'make performance tests'
-
- SPECIAL_TESTS = [
- (os.path.join('bin', 'drivers', 'flx_run'), os.path.join('test', 'drivers', 'flx_run_lib1.flx'),''),
- (os.path.join('test', 'drivers', 'flx_perf_drv1'), os.path.join('test', 'drivers', 'flx_perf_lib1.flx'),'1000'),
- ]
-
- def performance(self):
- for driver,testfile,moreargs in self.SPECIAL_TESTS:
- test_basename = os.path.splitext(testfile)[0]
- drv_basename = os.path.splitext(driver)[0]
- if config.SUPPORT_DYNAMIC_LOADING:
- testscript = "time "+drv_basename+ " "+test_basename+config.TARGET_CXX.options.EXT_SHLIB+" " + moreargs
- print '(dynamic link) Executing ',testscript
- try:
- self.shell(testscript)
- #print 'TESTFILE -- OK!',testscript
- except ExecutionError, e:
- print 'TESTFILE -- ERROR!',testscript
- raise e
-
- if config.SUPPORT_STATIC_LINKAGE:
- testscript = "time "+test_basename+config.TARGET_CXX.options.EXT_EXE+" " + moreargs
- print '(static link) Executing ',testscript
- try:
- self.shell(testscript)
- #print 'TESTFILE -- OK!',testscript
- except ExecutionError, e:
- print 'TESTFILE -- ERROR!',testscript
- raise e
diff --git a/fbuild_old/lib/fbuild/mkplugins/pfcount.py b/fbuild_old/lib/fbuild/mkplugins/pfcount.py
deleted file mode 100644
index f8ab6b8..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/pfcount.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-import sys
-
-from fbuild.flxbuild.process import Process
-
-class pfcount(Process):
- def __init__(self, *args, **kwds):
- super(pfcount, self).__init__(*args, **kwds)
- self.ran = False
-
- def runme(self, pkg, pkgdict,*args):
- if self.ran:
- return
- self.ran = True
-
- # requires posix
- if os.name == 'nt':
- return
-
- self.shell(sys.executable,
- os.path.join('script', 'pfcount.py'),
- os.path.join('misc', 'fcounts.stats'),
- )
diff --git a/fbuild_old/lib/fbuild/mkplugins/pfcount_all.py b/fbuild_old/lib/fbuild/mkplugins/pfcount_all.py
deleted file mode 100644
index 84c7e95..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/pfcount_all.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import os
-import sys
-
-from fbuild.flxbuild.process import Process
-
-class pfcount_all(Process):
- def __init__(self, *args, **kwds):
- super(pfcount_all, self).__init__(*args, **kwds)
- self.ran = False
-
- def runme(self, pkg, pkgdict,*args):
- if self.ran:
- return
- self.ran = True
-
- # requires posix
- if os.name == 'nt':
- return
-
- self.shell(sys.executable,
- os.path.join('script', 'pfcount.py'),
- os.path.join('misc', 'fcounts.stats'),
- 'all',
- )
diff --git a/fbuild_old/lib/fbuild/mkplugins/rtldoc.py b/fbuild_old/lib/fbuild/mkplugins/rtldoc.py
deleted file mode 100644
index f23f7f1..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/rtldoc.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import os
-from fbuild.flxbuild.flxutil import mkdirs
-from fbuild.flxbuild.process import Process
-
-class rtldoc(Process):
- help = 'make the runtime language documentation'
-
- def __init__(self, *args, **kwds):
- super(rtldoc, self).__init__(*args, **kwds)
- self.ran = False
-
- def runme(self, pkg, pkgdict, *args):
- if self.ran:
- return
- self.ran = True
-
- mkdirs(os.path.join("doc","rtl"))
- print "GENERATING RTLDOCS"
- try:
- self.shell('doxygen', os.path.join('misc', 'doxconf.dox'))
- except:
- pass
diff --git a/fbuild_old/lib/fbuild/mkplugins/run_completion_tests.py b/fbuild_old/lib/fbuild/mkplugins/run_completion_tests.py
deleted file mode 100644
index a7a5704..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/run_completion_tests.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from fbuild.flxbuild.testprocess import TestProcess
-
-# these tests are units tests with non-deterministic results
-class run_completion_tests(TestProcess):
- help = 'run tests that just need to finish'
-
- def runme(self, pkg, pkgdict, pkgsummary):
- completion_tests = pkgdict.get("completion_tests",[])
- completion_tests.sort()
-
- return self.run_tests(pkg, pkgdict, pkgsummary, completion_tests, 0, 1, 1,
- "*** COMPLETION (nondet) TESTING %s : %%s ****\n" % pkg)
diff --git a/fbuild_old/lib/fbuild/mkplugins/run_demos.py b/fbuild_old/lib/fbuild/mkplugins/run_demos.py
deleted file mode 100644
index a9bcd98..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/run_demos.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from fbuild.flxbuild.testprocess import TestProcess
-
-class run_demos(TestProcess):
- help = 'run demonstrations'
-
- def runme(self, pkg, pkgdict, pkgsummary):
- demos = pkgdict.get("demos",[])
- demos.sort()
-
- if not demos:
- return
-
- print "***** DEMO PACKAGE", pkg, "************"
-
- return self.run_tests(pkg, pkgdict, pkgsummary, demos, 0, 1, 1,
- "*** DEMO PACKAGE %s : %%s ****\n" % pkg)
diff --git a/fbuild_old/lib/fbuild/mkplugins/run_dynamic_unit_tests.py b/fbuild_old/lib/fbuild/mkplugins/run_dynamic_unit_tests.py
deleted file mode 100644
index d6a3e18..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/run_dynamic_unit_tests.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from fbuild.flxbuild.testprocess import TestProcess
-
-class run_dynamic_unit_tests(TestProcess):
- help = 'run dynamic unit tests'
-
- def runme(self, pkg, pkgdict, pkgsummary):
- dynamic_unit_tests = pkgdict.get("dynamic_unit_tests",[])
- dynamic_unit_tests.sort()
-
- return self.run_tests(pkg, pkgdict, pkgsummary, dynamic_unit_tests, 1, 0, 1,
- "*** DYNAMIC UNIT TESTING %s : %%s ****\n" % pkg)
diff --git a/fbuild_old/lib/fbuild/mkplugins/run_failure_tests.py b/fbuild_old/lib/fbuild/mkplugins/run_failure_tests.py
deleted file mode 100644
index 175208f..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/run_failure_tests.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import os
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import Tee
-from fbuild.path import relativepath, glob_paths
-
-class run_failure_tests(Process):
- help = 'run tests meant to fail'
-
- def runme(self, pkg, pkgdict, pkgsummary):
- #bad_tests = pkgdict.get("failure_tests",[])
- bad_tests=[]
- bad_tests.sort()
-
- failed = 0
-
- root = pkgdict.get('root', '.')
- for testfile in glob_paths(bad_tests):
- log = Tee()
- log.write("**** FAILURE TESTING PACKAGE %s : %s ****\n" % (pkg, testfile))
-
- #log.write('Running Felix code generator on %s\n' % testfile)
- localpath = relativepath(root, testfile)
- builddir = os.path.join('build', os.path.dirname(localpath))
-
- try:
- self.shell(os.path.join('bin', 'flxg'),
- '-e',
- '-Ilib',
- '-I' + os.path.dirname(testfile),
- '--cache_dir=' + builddir,
- '--output_dir=' + builddir,
- '--elkhound=' + os.path.join('bin', 'flx_elkhound'),
- '--import=flx.flxh',
- 'std',
- os.path.splitext(os.path.basename(testfile))[0],
- log=log,
- )
-
- #log.write('TESTFILE -- failed as expected %s\n' % testfile)
- self.successes.append((pkg, testfile, log.getvalue()))
- except ExecutionError, e:
- failed = 1
-
- log.write('TESTFILE -- SUCCEEDED, SHOULD HAVE FAILED! %s\n' % testfile)
-
- self.failures.append((pkg, testfile, log.getvalue()))
-
- return not failed
diff --git a/fbuild_old/lib/fbuild/mkplugins/run_known_failed_tests.py b/fbuild_old/lib/fbuild/mkplugins/run_known_failed_tests.py
deleted file mode 100644
index 96b7b89..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/run_known_failed_tests.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import Tee
-
-class run_known_failed_tests(Process):
- help = 'mark tests failed that are known to fail without running'
-
- def __init__(self, *args, **kwds):
- super(run_known_failed_tests, self).__init__(*args, **kwds)
- self.dummy = 1
-
- def runme(self, pkg, pkgdict,pkgsummary):
- bad_tests = pkgdict.get("known_failed_tests",[])
- bad_tests.sort()
-
- for testfile in bad_tests:
- log = Tee()
- log.write("**** KNOWN FAILED TESTING PACKAGE %s : %s ****\n" % (pkg, testfile))
-
- # don't even bother to compile
- self.failures.append((pkg, testfile, ""))
-
- return 1 # don't report any failure!
diff --git a/fbuild_old/lib/fbuild/mkplugins/run_static_unit_tests.py b/fbuild_old/lib/fbuild/mkplugins/run_static_unit_tests.py
deleted file mode 100644
index d6c848c..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/run_static_unit_tests.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from fbuild.flxbuild.testprocess import TestProcess
-
-class run_static_unit_tests(TestProcess):
- help = 'run static unit tests'
-
- def runme(self, pkg, pkgdict, pkgsummary):
- static_unit_tests = pkgdict.get("static_unit_tests",[])
- static_unit_tests.sort()
-
- return self.run_tests(pkg, pkgdict, pkgsummary, static_unit_tests, 1, 1, 0,
- "*** STATIC UNIT TESTING %s : %%s ****\n" % pkg)
diff --git a/fbuild_old/lib/fbuild/mkplugins/run_unit_tests.py b/fbuild_old/lib/fbuild/mkplugins/run_unit_tests.py
deleted file mode 100644
index 0306d63..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/run_unit_tests.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from fbuild.flxbuild.testprocess import TestProcess
-
-class run_unit_tests(TestProcess):
- help = 'run unit tests'
-
- def runme(self, pkg, pkgdict, pkgsummary):
- unit_tests = pkgdict.get("unit_tests",[])
- unit_tests.sort()
-
- return self.run_tests(pkg, pkgdict, pkgsummary, unit_tests, 1, 1, 1,
- "*** UNIT TESTING %s : %%s ****\n" % pkg)
diff --git a/fbuild_old/lib/fbuild/mkplugins/speed_tests.py b/fbuild_old/lib/fbuild/mkplugins/speed_tests.py
deleted file mode 100644
index 0f6f277..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/speed_tests.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import os
-import sys
-import shutil
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import mkdirs
-
-import config
-
-class speed_tests(Process):
- help = 'Run comparative tests'
-
- def __init__(self, *args, **kwds):
- super(speed_tests, self).__init__(*args, **kwds)
- self.ran = False
-
- def runme(self, pkg, pkgdict, *args):
- if self.ran:
- return
- self.ran = True
-
- flx_perf = os.path.join(config.FLX_LPARCHIVE, 'flx_perf.pak')
- self.shell(config.ISCR, flx_perf)
-
- import speed.measure
- import speed.panal
-
- self.shell('gnuplot', os.path.join('speed', 'mkjpgs.gpl'))
- self.shell(config.ISCR,
- '--inhibit-sref=1',
- '--language=en',
- '--weaver=web',
- '--passes=2',
- '--weaver-directory=speed/',
- flx_perf,
- )
-
- shutil.copy(os.path.join('misc', 'interscript.css'), 'speed')
diff --git a/fbuild_old/lib/fbuild/mkplugins/typeclassdoc.py b/fbuild_old/lib/fbuild/mkplugins/typeclassdoc.py
deleted file mode 100644
index dc6e8f0..0000000
--- a/fbuild_old/lib/fbuild/mkplugins/typeclassdoc.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import os
-import shutil
-
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import mkdirs, ExecutionError
-
-class typeclassdoc(Process):
- help = 'make the typeclass documentation'
-
- def __init__(self, *args, **kwds):
- super(typeclassdoc, self).__init__(*args, **kwds)
- self.ran = False
-
- def runme(self, pkg, pkgdict, *args):
- if self.ran:
- return
- self.ran = True
-
- print "GENERATING TYPECLASS and MODULE DOCUMENTATION"
- mkdirs(os.path.join('doc', 'moduledoc'))
- shutil.copy(os.path.join('misc', 'plus.gif'), os.path.join("doc", "moduledoc"))
- shutil.copy(os.path.join('misc', 'minus.gif'), os.path.join("doc", "moduledoc"))
- shutil.copy(os.path.join('misc', 'dot.gif'), os.path.join("doc", "moduledoc"))
-
- try:
- self.shell(os.path.join('bin', 'flxd'),
- '-Ilib',
- '--import=flx.flxh',
- '--document-typeclass',
- os.path.join('misc', 'ldoc'),
- )
- except ExecutionError:
- pass
diff --git a/fbuild_old/lib/fbuild/path.py b/fbuild_old/lib/fbuild/path.py
deleted file mode 100644
index 58a48e3..0000000
--- a/fbuild_old/lib/fbuild/path.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import os
-import fnmatch
-import glob
-import types
-
-def splitall(path):
- paths = []
- old_path = path
-
- while True:
- path, filename = os.path.split(path)
-
- if path == old_path:
- if path:
- paths.append(path)
- break
- else:
- old_path = path
- paths.append(filename)
- paths.reverse()
- return paths
-
-def relativepath(root, path):
- roots = splitall(os.path.abspath(root))
- paths = splitall(os.path.abspath(path))
-
- for i, (r, p) in enumerate(zip(roots, paths)):
- j = i
- if r != p:
- break
- else:
- i += 1
- j = len(roots)
-
- new_paths = ['..'] * (len(roots) - i) + paths[j:]
-
- if not new_paths:
- return '.'
- else:
- return os.path.join(*new_paths)
-
-def find(path, name=None, include_dirs=True):
- for root, dirs, files in os.walk(path):
- if include_dirs:
- files += dirs
-
- for f in files:
- if name is not None and not fnmatch.fnmatch(f, name):
- continue
-
- yield os.path.join(root, f)
-
-def glob_paths(paths, root='.'):
- new_paths = []
- for path in paths:
- # if the path is not a string, assume it's a list of path elements
- if not isinstance(path, types.StringTypes):
- path = os.path.join(*path)
-
- pattern = os.path.join(root, path)
- new_paths.extend(glob.glob(pattern))
- return new_paths
diff --git a/fbuild_old/lib/fbuild/processes/__init__.py b/fbuild_old/lib/fbuild/processes/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fbuild_old/lib/fbuild/processes/dypgen.py b/fbuild_old/lib/fbuild/processes/dypgen.py
deleted file mode 100644
index 1c89864..0000000
--- a/fbuild_old/lib/fbuild/processes/dypgen.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import os
-import config
-
-def build_grammar(pkg, pkgdict, *args):
- PGENPARSES = pkgdict.get("caml_pgenparses", [])
- DYPARSES = pkgdict.get("caml_dyparses", [])
- MODULES = pkgdict.get("caml_modules", [])
-
- for module in MODULES:
- dyp = config.HOST_OCAML.find_in_src_dir(module + '.dyp')
- if os.path.exists(dyp) or os.path.exists(os.path.join('build', dyp)):
- DYPARSES.append(module)
-
- if not (PGENPARSES or DYPARSES):
- return
-
- print "CAML BUILDING DYPGEN GRAMMAR", pkg
-
- if PGENPARSES:
- config.HOST_OCAML.gen_pgen_parser(PGENPARSES,
- outdir='build',
- )
-
- if DYPARSES:
- config.HOST_OCAML.gen_dypgen_parser(DYPARSES,
- FLAGS=['--prio-pt', '--pv-obj', '--noemit-token-type'],
- outdir='build',
- )
diff --git a/fbuild_old/lib/fbuild/processes/ocaml.py b/fbuild_old/lib/fbuild/processes/ocaml.py
deleted file mode 100644
index fba43e4..0000000
--- a/fbuild_old/lib/fbuild/processes/ocaml.py
+++ /dev/null
@@ -1,151 +0,0 @@
-import os
-import shutil
-
-import config
-from fbuild.flxbuild.process import Process
-from fbuild.flxbuild.flxutil import unix2native, MissingFile
-
-def copy_mli2ml(pkg, pkgdict, *args):
- for f in pkgdict.get('caml_raw_interfaces', []):
- f = unix2native(f)
- src = f+'.mli'
- dst = f+'.ml'
-
- print 'copying file', src, '->', dst
- shutil.copyfile(src, dst)
-
-
-def build_grammar(pkg, pkgdict, *args):
- LEXS = pkgdict.get("caml_lexes", [])
- PARSES = pkgdict.get("caml_parses", [])
- MODULES = pkgdict.get("caml_modules", [])
-
- for module in MODULES:
- mll = config.HOST_OCAML.find_in_src_dir(module + '.mll')
- if os.path.exists(mll) or os.path.exists(os.path.join('build', mll)):
- LEXS.append(module)
-
- mly = config.HOST_OCAML.find_in_src_dir(module + '.mly')
- if os.path.exists(mly) or os.path.exists(os.path.join('build', mly)):
- PARSES.append(module)
-
- if not (LEXS or PARSES):
- return
-
- print "CAML BUILDING GRAMMAR", pkg
-
- if LEXS:
- config.HOST_OCAML.gen_lexer(LEXS,
- outdir='build',
- )
-
- if PARSES:
- config.HOST_OCAML.gen_parser(PARSES,
- outdir='build',
- )
-
-
-class build_modules(Process):
- def runme(self, pkg, pkgdict, *args):
- MODULES = pkgdict.get("caml_modules", [])
- INTERFACES = pkgdict.get("caml_interfaces", [])
- IMPLEMENTATIONS = pkgdict.get("caml_implementations", [])
- PACKS = pkgdict.get("caml_pack", [])
- INCLUDES = pkgdict.get("caml_include_paths", [])
-
- if not (MODULES or INTERFACES or IMPLEMENTATIONS):
- return
-
- print "CAML COMPILING", pkg
-
- kwds = dict(
- outdir='build',
- include_paths=[os.path.join('build', i) for i in INCLUDES],
- packs=PACKS,
- debug=self.debug,
- profile='profile' in self.options,
- optimise='optimise_felix' in self.options,
- )
-
- for module in MODULES:
- mli = config.HOST_OCAML.find_in_src_dir(module + '.mli')
- mli_exists = os.path.exists(mli) or os.path.exists(os.path.join('build', mli))
-
- ml = config.HOST_OCAML.find_in_src_dir(module + '.ml')
- ml_exists = os.path.exists(ml) or os.path.exists(os.path.join('build', ml))
-
- if not mli_exists and not ml_exists:
- raise MissingFile(module)
-
- if mli_exists:
- config.HOST_OCAML.compile_interface([module], **kwds)
-
- if ml_exists:
- config.HOST_OCAML.compile_module([module],
- bytecode='bytecode' in self.options,
- **kwds)
-
-
- config.HOST_OCAML.compile_interface(INTERFACES, **kwds)
-
- config.HOST_OCAML.compile_module(IMPLEMENTATIONS,
- bytecode='bytecode' in self.options,
- **kwds)
-
-
-class build_libs(Process):
- def runme(self, pkg, pkgdict, *args):
- IMPLEMENTATIONS = [os.path.join('build', f)
- for f in pkgdict.get("caml_implementations", [])]
- lib = pkgdict.get('caml_provide_lib', os.path.join('src', pkg + 'lib'))
-
- MODULES = [os.path.join('build', f)
- for f in pkgdict.get("caml_modules", [])]
-
- if 'bytecode' in self.options or not config.HOST_OCAML.options.NATIVE_CODE_COMPILER:
- MODULES = [f for f in MODULES if os.path.exists(f + '.cmo')]
- else:
- MODULES = [f for f in MODULES if os.path.exists(f + '.cmx')]
-
- if not MODULES + IMPLEMENTATIONS:
- return
-
- print "CAML CREATING LIBRARY", lib
- config.HOST_OCAML.link_lib(MODULES + IMPLEMENTATIONS, lib,
- bytecode='bytecode' in self.options,
- outdir='build',
- )
-
-
-class build_exes(Process):
- def runme(self, pkg, pkgdict, *args):
- EXES = pkgdict.get("caml_exes", [])
- OLIBRARIES = pkgdict.get("caml_require_libs", [])
- INCLUDES = pkgdict.get("caml_include_paths", [])
-
- if not EXES:
- return
-
- print "CAML LINKING EXECUTABLES"
-
- kwds = dict(
- outdir='build',
- bytecode='bytecode' in self.options,
- include_paths=[os.path.join('build', i) for i in INCLUDES],
- )
-
- output_exes = []
- for exe in EXES:
- config.HOST_OCAML.compile_module([exe], **kwds)
-
- src = config.HOST_OCAML.link_exe(
- [os.path.join('build', exe)],
- os.path.splitext(exe)[0] + config.HOST_OCAML.options.EXT_EXE,
- libs=OLIBRARIES,
- **kwds)
-
- dst = os.path.join('bin', os.path.basename(src))
-
- if not self.quiet: print 'copying file', src, '->', dst
- shutil.copy(src, dst)
- output_exes.append(dst)
diff --git a/fbuild_old/lib/fbuild/processes/ocamlbuild.py b/fbuild_old/lib/fbuild/processes/ocamlbuild.py
deleted file mode 100644
index 90a82d6..0000000
--- a/fbuild_old/lib/fbuild/processes/ocamlbuild.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import shutil
-
-import config
-from fbuild.flxbuild.process import Process
-
-class build(Process):
- def runme(self, pkg, pkgdict, *args):
- EXES = pkgdict.get('caml_exes', [])
-
- for exe in EXES:
- if BYTECODE:
- bin = exe + '.byte'
- else:
- bin = exe + '.native'
-
- self.shell(config.OCAMLBUILD, bin)
-
- mkdirs('bin')
- shutil.copy(os.path.join('_build', bin), 'bin')
diff --git a/fbuild_old/lib/fbuild/scheduler.py b/fbuild_old/lib/fbuild/scheduler.py
deleted file mode 100644
index 2ec60dc..0000000
--- a/fbuild_old/lib/fbuild/scheduler.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import sys
-import threading
-
-# we need the Queue from python 2.5 or greater
-if sys.version_info >= (2, 5):
- import Queue
-else:
- import fbuild.compat._Queue as Queue
-
-__all__ = ['Scheduler', 'Future', 'Full', 'Empty']
-
-Full = Queue.Full
-Empty = Queue.Empty
-
-class Scheduler(Queue.Queue):
- def __init__(self, maxsize):
- Queue.Queue.__init__(self)
-
- # we subtract one thread because we'll use the main one as well
- for i in range(maxsize - 1):
- thread = threading.Thread(target=self._run)
- thread.setDaemon(True)
- thread.start()
-
- def _run(self):
- while True:
- self._run_one()
-
- def _run_one(self, *args, **kwargs):
- func = self.get(*args, **kwargs)
- try:
- func()
- finally:
- self.task_done()
-
-
- def future(self, function, *args, **kwargs):
- f = Future(self, function, args, kwargs)
- self.put(f.start)
- return f
-
-
-class Future(object):
- def __init__(self, pool, function, args, kwargs):
- self.__pool = pool
- self.__function = function
- self.__args = args
- self.__kwargs = kwargs
- self.__finished = False
- self.__result = None
- self.__exc = None
-
- def __call__(self):
- while not self.__finished:
- # we're going to block anyway, so just run another
- # future
- try:
- self.__pool._run_one(timeout=1.0)
- except Empty:
- pass
-
- if self.__exc:
- raise self.__exc[0], self.__exc[1], self.__exc[2]
-
- return self.__result
-
- def __repr__(self):
- return '<%s: %s, %s, %s>' % (
- self.__class__.__name__,
- self.__function.__name__,
- self.__args,
- self.__kwargs,
- )
-
- def start(self):
- try:
- self.__result = self.__function(*self.__args, **self.__kwargs)
- except:
- self.__exc = sys.exc_info()
-
- self.__finished = True
diff --git a/lpsrc/flx_maker.pak b/lpsrc/flx_maker.pak
index 7678546..ed378e1 100644
--- a/lpsrc/flx_maker.pak
+++ b/lpsrc/flx_maker.pak
@@ -50,95 +50,9 @@ iscr_source = ['lpsrc/flx_maker.pak']

weaver_directory = 'doc/flx/flx_maker/'

-@select(tangler('umk','python'))
-#!/usr/bin/env python
-import os
-import sys
-
-@tangle('src_dir = %r' % config.src_dir)
-os.environ['PYTHONPATH'] = src_dir + os.pathsep + os.environ.get('PYTHONPATH', '')
-
-os.execl(
- sys.executable,
- sys.executable,
- os.path.join(src_dir, 'fbuild_old', 'bin', 'fbuild'),
- *sys.argv[1:])
-@make_executable('umk')
-
-@select(tangler('mk','python'))
-#!/usr/bin/env python
-import os
-import sys
-import time
-
-@tangle('src_dir = %r' % config.src_dir)
-
-sys.path.append(os.path.join(src_dir, 'fbuild_old', 'lib'))
-
-from fbuild.flxbuild.flxutil import Tee, tee_cmd
-
-def main():
- cmd = [sys.executable, 'umk'] + sys.argv[1:]
- stdout = Tee()
- try:
- return tee_cmd(cmd, stdout)
- finally:
- f = open('mk_' + time.strftime('%Y-%m-%d-%H%M') + '.log', 'w')
- print(stdout.getvalue(), file=f)
-
-if __name__ == '__main__':
- sys.exit(main())
-@make_executable('mk')
-
@select(tangler('spkgs/__init__.py'))
@doc()

-@select(tangler('misc/ldoc.flx'))
-include "std";
-include "judy";
-include "pthread";
-include "flx_demux";
-include "flx_faio";
-include "flx_stream";
-include "tre";
-
-include "mmap";
-
-include "glob";
-include "flx_gmp";
-
-include "mpi/mpi";
-
-include "SDL/SDL_active";
-include "SDL/SDL_audio";
-include "SDL/SDL_cdrom";
-include "SDL/SDL_endian";
-include "SDL/SDL_error";
-include "SDL/SDL_events";
-include "SDL/SDL";
-include "SDL/SDL_framerate";
-include "SDL/SDL_gfxPrimitives";
-include "SDL/SDL_gfxPrimitives_font";
-include "SDL/SDL_imageFilter";
-include "SDL/SDL_image";
-include "SDL/SDL_joystick";
-include "SDL/SDL_keyboard";
-include "SDL/SDL_keysym";
-include "SDL/SDL_mixer";
-include "SDL/SDL_mouse";
-include "SDL/SDL_mutex";
-include "SDL/SDL_net";
-include "SDL/SDL_opengl";
-include "SDL/SDL_rotozoom";
-include "SDL/SDL_rwops";
-include "SDL/SDL_sound";
-include "SDL/SDL_timer";
-include "SDL/SDL_ttf";
-include "SDL/SDL_types";
-include "SDL/SDL_version";
-include "SDL/SDL_video";
-@doc()
-
@head(1,'AUTHORS')
@select(tangler('AUTHORS'))
John (Max) Skaller skaller at users dot sourceforge dot net
@@ -2152,6 +2066,5 @@ post-install:
Just cleaning up script now.
@make_executable(os.path.join('bin', 'flx'))
@make_executable(os.path.join('bin', 'flx.py'))
-@make_executable(os.path.join('bin', 'fishcc'))

-----------------------------------------------------------------------

Summary of changes:
fbuild_old/bin/fbuild | 489 --------
fbuild_old/bin/make_config.py | 600 ----------
fbuild_old/lib/fbuild/compat/_Queue.py | 221 ----
fbuild_old/lib/fbuild/compat/_subprocess.py | 1246 --------------------
fbuild_old/lib/fbuild/flxbuild/c_base.py | 31 -
fbuild_old/lib/fbuild/flxbuild/c_cxx_base.py | 1180 ------------------
fbuild_old/lib/fbuild/flxbuild/compiler_base.py | 51 -
fbuild_old/lib/fbuild/flxbuild/config_support.py | 66 -
fbuild_old/lib/fbuild/flxbuild/cxx_base.py | 104 --
fbuild_old/lib/fbuild/flxbuild/flxutil.py | 234 ----
fbuild_old/lib/fbuild/flxbuild/gcc_class.py | 9 -
fbuild_old/lib/fbuild/flxbuild/gnu_mixin.py | 731 ------------
fbuild_old/lib/fbuild/flxbuild/gxx_class.py | 35 -
fbuild_old/lib/fbuild/flxbuild/iscrutil.py | 185 ---
fbuild_old/lib/fbuild/flxbuild/msvc_mixin.py | 215 ----
fbuild_old/lib/fbuild/flxbuild/msvcc_class.py | 13 -
fbuild_old/lib/fbuild/flxbuild/msvcxx_class.py | 10 -
fbuild_old/lib/fbuild/flxbuild/ocaml_class.py | 366 ------
fbuild_old/lib/fbuild/flxbuild/package.py | 4 -
fbuild_old/lib/fbuild/flxbuild/process.py | 82 --
fbuild_old/lib/fbuild/flxbuild/testprocess.py | 300 -----
.../mkplugins/build_felix_dynamic_drivers.py | 49 -
.../fbuild/mkplugins/build_felix_static_drivers.py | 37 -
.../lib/fbuild/mkplugins/build_host_tools.py | 76 --
.../lib/fbuild/mkplugins/build_target_cpp_tools.py | 24 -
.../lib/fbuild/mkplugins/build_target_felix_rtl.py | 25 -
.../fbuild/mkplugins/build_target_felix_tools.py | 34 -
.../fbuild/mkplugins/build_target_rtl_dynamic.py | 73 --
.../fbuild/mkplugins/build_target_rtl_static.py | 43 -
fbuild_old/lib/fbuild/mkplugins/clean.py | 19 -
fbuild_old/lib/fbuild/mkplugins/clean_run.py | 23 -
fbuild_old/lib/fbuild/mkplugins/extract_grammar.py | 49 -
fbuild_old/lib/fbuild/mkplugins/extract_iscr.py | 17 -
fbuild_old/lib/fbuild/mkplugins/fcount.py | 39 -
fbuild_old/lib/fbuild/mkplugins/gramdoc.py | 31 -
fbuild_old/lib/fbuild/mkplugins/help.py | 42 -
fbuild_old/lib/fbuild/mkplugins/impldoc.py | 45 -
fbuild_old/lib/fbuild/mkplugins/man.py | 30 -
fbuild_old/lib/fbuild/mkplugins/manifest.py | 45 -
fbuild_old/lib/fbuild/mkplugins/mkdoc.py | 42 -
fbuild_old/lib/fbuild/mkplugins/performance.py | 37 -
fbuild_old/lib/fbuild/mkplugins/pfcount.py | 23 -
fbuild_old/lib/fbuild/mkplugins/pfcount_all.py | 24 -
fbuild_old/lib/fbuild/mkplugins/rtldoc.py | 22 -
.../lib/fbuild/mkplugins/run_completion_tests.py | 12 -
fbuild_old/lib/fbuild/mkplugins/run_demos.py | 16 -
.../lib/fbuild/mkplugins/run_dynamic_unit_tests.py | 11 -
.../lib/fbuild/mkplugins/run_failure_tests.py | 49 -
.../lib/fbuild/mkplugins/run_known_failed_tests.py | 22 -
.../lib/fbuild/mkplugins/run_static_unit_tests.py | 11 -
fbuild_old/lib/fbuild/mkplugins/run_unit_tests.py | 11 -
fbuild_old/lib/fbuild/mkplugins/speed_tests.py | 38 -
fbuild_old/lib/fbuild/mkplugins/typeclassdoc.py | 33 -
fbuild_old/lib/fbuild/path.py | 62 -
fbuild_old/lib/fbuild/processes/dypgen.py | 28 -
fbuild_old/lib/fbuild/processes/ocaml.py | 151 ---
fbuild_old/lib/fbuild/processes/ocamlbuild.py | 19 -
fbuild_old/lib/fbuild/scheduler.py | 81 --
lpsrc/flx_maker.pak | 87 --
59 files changed, 0 insertions(+), 7652 deletions(-)
delete mode 100755 fbuild_old/bin/fbuild
delete mode 100755 fbuild_old/bin/make_config.py
delete mode 100644 fbuild_old/lib/fbuild/__init__.py
delete mode 100644 fbuild_old/lib/fbuild/compat/_Queue.py
delete mode 100644 fbuild_old/lib/fbuild/compat/__init__.py
delete mode 100644 fbuild_old/lib/fbuild/compat/_subprocess.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/__init__.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/c_base.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/c_cxx_base.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/compiler_base.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/config_support.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/cxx_base.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/flxutil.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/gcc_class.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/gnu_mixin.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/gxx_class.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/iscrutil.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/msvc_mixin.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/msvcc_class.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/msvcxx_class.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/ocaml_class.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/package.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/process.py
delete mode 100644 fbuild_old/lib/fbuild/flxbuild/testprocess.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/__init__.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_felix_dynamic_drivers.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_felix_static_drivers.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_host_tools.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_target_cpp_tools.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_target_felix_rtl.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_target_felix_tools.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_target_rtl_dynamic.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/build_target_rtl_static.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/clean.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/clean_run.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/extract_grammar.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/extract_iscr.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/fcount.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/gramdoc.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/help.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/impldoc.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/man.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/manifest.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/mkdoc.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/performance.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/pfcount.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/pfcount_all.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/rtldoc.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/run_completion_tests.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/run_demos.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/run_dynamic_unit_tests.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/run_failure_tests.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/run_known_failed_tests.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/run_static_unit_tests.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/run_unit_tests.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/speed_tests.py
delete mode 100644 fbuild_old/lib/fbuild/mkplugins/typeclassdoc.py
delete mode 100644 fbuild_old/lib/fbuild/path.py
delete mode 100644 fbuild_old/lib/fbuild/processes/__init__.py
delete mode 100644 fbuild_old/lib/fbuild/processes/dypgen.py
delete mode 100644 fbuild_old/lib/fbuild/processes/ocaml.py
delete mode 100644 fbuild_old/lib/fbuild/processes/ocamlbuild.py
delete mode 100644 fbuild_old/lib/fbuild/scheduler.py


hooks/post-receive
--
An advanced programming language

Reply all
Reply to author
Forward
0 new messages