nillerusr
3 years ago
16 changed files with 1795 additions and 284 deletions
@ -0,0 +1,246 @@
@@ -0,0 +1,246 @@
|
||||
# encoding: utf-8 |
||||
# conan.py -- Conan Package Manager integration |
||||
# Copyright (C) 2019 a1batross |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
from waflib import Logs, Utils |
||||
from waflib.Configure import conf |
||||
import os |
||||
import sys |
||||
import subprocess |
||||
import json |
||||
|
||||
def options(opt): |
||||
grp = opt.add_option_group('Conan options') |
||||
|
||||
grp.add_option('--disable-conan', action = 'store_true', default = False, dest = 'NO_CONAN', |
||||
help = 'completely disable Conan') |
||||
|
||||
grp.add_option('--force-conan', action = 'store_true', default = False, dest = 'CONAN_MANDATORY', |
||||
help = 'require Conan, useful for testing') |
||||
|
||||
grp.add_option('--conan-profile', action = 'store', default = None, dest = 'CONAN_PROFILE', |
||||
help = 'set conan profile') |
||||
|
||||
# grp.add_option('') |
||||
|
||||
def conan(ctx, args, quiet=False): |
||||
# print('%s %s' % (ctx.env.CONAN[0], arg_str)) |
||||
argv = [ctx.env.CONAN[0]] |
||||
argv += Utils.to_list(args) |
||||
ret = b'' |
||||
retval = False |
||||
|
||||
ctx.logger.info('argv: {}'.format(argv)) |
||||
if quiet: |
||||
try: |
||||
ret = subprocess.check_output(argv, cwd=ctx.bldnode.abspath()) |
||||
ctx.logger.info('output: \n{}'.format(ret)) |
||||
except subprocess.CalledProcessError as e: |
||||
ret = e.output |
||||
ctx.logger.info('FAIL!!!\nretval: {}\noutput: \n{}'.format(e.returncode, ret)) |
||||
else: |
||||
retval = True |
||||
else: |
||||
retval = subprocess.call(argv, cwd=ctx.bldnode.abspath()) |
||||
if retval != 0: |
||||
ctx.logger.info('FAIL!!!\nretval: {}'.format(retval)) |
||||
retval = False |
||||
else: |
||||
retval = True |
||||
|
||||
if sys.version_info > (3, 0): |
||||
ret = ret.decode('utf-8') |
||||
ret = ret.strip().replace('\r\n', '\n') |
||||
|
||||
return (retval, ret) |
||||
|
||||
@conf |
||||
def add_conan_remote(ctx, name, url): |
||||
""" |
||||
Adds conan remote |
||||
|
||||
:param name: name of remote |
||||
:type name: string |
||||
:param url: url path |
||||
:type url: string |
||||
""" |
||||
|
||||
if not ctx.env.CONAN: |
||||
ctx.fatal("Conan is not installed!") |
||||
|
||||
ctx.start_msg('Checking if conan has %s remote' % name) |
||||
[success,remotes] = conan(ctx, 'remote list --raw', quiet=True) |
||||
|
||||
if not success: |
||||
ctx.end_msg('no') |
||||
ctx.fatal('conan has failed to list remotes') |
||||
|
||||
found = False |
||||
for v in remotes.splitlines(): |
||||
a = v.split(' ') |
||||
if a[0] == name: |
||||
if a[1] == url: |
||||
found = True |
||||
else: |
||||
ctx.end_msg('no') |
||||
ctx.fatal('''Conan already has %s remote, but it points to another remote! |
||||
You can remote it with:\n |
||||
$ %s remote remove %s''' % (name, ctx.env.CONAN[0], name)) |
||||
break |
||||
|
||||
if not found: |
||||
ctx.end_msg('no') |
||||
ctx.start_msg('Adding new %s remote to conan' % name) |
||||
if conan(ctx, 'remote add %s %s' % (name, url), quiet=True) == False: |
||||
ctx.end_msg('fail', color='RED') |
||||
ctx.fatal('conan has failed to add %s remote' % name) |
||||
ctx.end_msg('done') |
||||
else: |
||||
ctx.end_msg('yes') |
||||
|
||||
@conf |
||||
def parse_conan_json(ctx, name): |
||||
with open(os.path.join(ctx.bldnode.abspath(), 'conanbuildinfo.json')) as jsonfile: |
||||
cfg = json.load(jsonfile) |
||||
|
||||
deps = cfg["dependencies"] |
||||
|
||||
ctx.env['HAVE_%s' % name] = True |
||||
|
||||
for dep in deps: |
||||
def to_u8(arr): |
||||
if sys.version_info > (3, 0): |
||||
return arr |
||||
ret = [] |
||||
for i in arr: |
||||
ret += [ i.encode('utf8') ] |
||||
return ret |
||||
|
||||
ctx.env['INCLUDES_%s' % name] += to_u8(dep['include_paths']) |
||||
ctx.env['LIB_%s' % name] += to_u8(dep['libs']) |
||||
ctx.env['LIBPATH_%s' % name] += to_u8(dep['lib_paths']) |
||||
ctx.env['DEFINES_%s' % name] += to_u8(dep['defines']) |
||||
ctx.env['CFLAGS_%s' % name] += to_u8(dep['cflags']) |
||||
ctx.env['CXXFLAGS_%s' % name] += to_u8(dep['cflags']) |
||||
ctx.env['LINKFLAGS_%s' % name] += to_u8(dep['sharedlinkflags']) |
||||
return |
||||
|
||||
|
||||
def conan_update_profile(ctx, settings, profile): |
||||
args = ['profile', 'update'] |
||||
|
||||
for (key, value) in settings.items(): |
||||
args2 = args + [ 'settings.%s=%s' % (key, value), profile ] |
||||
if conan(ctx, args2, quiet=True) == False: |
||||
ctx.fatal('Can\'t update profile') |
||||
|
||||
@conf |
||||
def add_dependency(ctx, pkg, *k, **kw): |
||||
""" |
||||
Retrieves and adds depedency during configuration stage |
||||
|
||||
:param pkg: package name in conan format |
||||
:type pkg: string |
||||
:param remote: remote name, optional |
||||
:type remote: string |
||||
:param options: package options, optional |
||||
:type options: dict |
||||
:param uselib_store: set uselib name, optional |
||||
:type uselib_store: string |
||||
""" |
||||
|
||||
if not ctx.env.CONAN: |
||||
ctx.fatal("Conan is not installed!") |
||||
|
||||
name = pkg.split('/')[0] |
||||
ctx.msg(msg='Downloading dependency %s' % name, result='in process', color='BLUE') |
||||
|
||||
args = ['install', pkg, '-g', 'json', '--build=missing', '-pr', ctx.env.CONAN_PROFILE] |
||||
if 'remote' in kw: |
||||
args += ['-r', kw['remote']] |
||||
|
||||
if 'options' in kw: |
||||
for (key, value) in kw['options'].items(): |
||||
args += ['-o', '%s=%s' % (key, value)] |
||||
|
||||
if conan(ctx, args): |
||||
if 'uselib_store' in kw: |
||||
uselib = kw['uselib_store'] |
||||
else: uselib = name.upper() # we just use upper names everywhere |
||||
ctx.parse_conan_json(uselib) |
||||
ctx.msg(msg='Downloading dependency %s' % name, result='ok', color='GREEN') |
||||
return |
||||
|
||||
ctx.msg(msg='Downloading dependency %s' % name, result='fail', color='RED') |
||||
ctx.fatal('Conan has failed installing dependency %s' % pkg) |
||||
|
||||
def configure(conf): |
||||
# already configured |
||||
if conf.env.CONAN: |
||||
return |
||||
|
||||
# respect project settings |
||||
if not conf.env.CONAN_MANDATORY: |
||||
conf.env.CONAN_MANDATORY = conf.options.CONAN_MANDATORY |
||||
|
||||
# disabled by user request |
||||
if conf.options.NO_CONAN and not conf.env.CONAN_MANDATORY: |
||||
conf.env.CONAN = None |
||||
return |
||||
|
||||
conf.find_program('conan', mandatory=conf.env.MANDATORY) |
||||
|
||||
if not conf.env.CONAN: |
||||
return |
||||
|
||||
conf.start_msg('Checking conan version') |
||||
ver = conan(conf, '--version', quiet=True) |
||||
if not ver: |
||||
conf.end_msg('fail') |
||||
if conf.env.CONAN_MANDATORY: |
||||
conf.fatal('Conan has failed! Check your conan installation') |
||||
else: |
||||
Logs.warn('Conan has failed! Check your conan installation. Continuing...') |
||||
|
||||
if conf.options.CONAN_PROFILE: |
||||
conf.env.CONAN_PROFILE = conf.options.CONAN_PROFILE |
||||
else: |
||||
profile = conf.env.CONAN_PROFILE = os.path.join(conf.bldnode.abspath(), 'temp_profile') |
||||
settings = dict() |
||||
|
||||
conan(conf, ['profile', 'new', profile, '--detect', '--force'], quiet=True) |
||||
# NOTE: Conan installs even 32-bit runtime packages on x86_64 for now :( |
||||
# it may potentially break system on Linux |
||||
if conf.env.DEST_SIZEOF_VOID_P == 4 and conf.env.DEST_CPU in ['x86', 'x86_64'] and conf.env.DEST_OS != 'linux': |
||||
settings['arch'] = 'x86' |
||||
|
||||
if conf.env.DEST_OS2 == 'android': |
||||
settings['os'] = 'Android' |
||||
|
||||
if conf.env.COMPILER_CC == 'msvc': |
||||
settings['compiler.runtime'] = 'MT' |
||||
|
||||
conan_update_profile(conf, settings, profile) |
||||
|
||||
# I think Conan is respecting environment CC/CXX values, so it's not need |
||||
# to specify compiler here |
||||
#compiler = conf.env.COMPILER_CC |
||||
#if conf.env.COMPILER_CC == 'msvc': |
||||
# compiler = 'Visual Studio' |
||||
#elif conf.env.DEST_OS == 'darwin' and conf.env.COMPILER_CC == 'clang': |
||||
# compiler = 'apple-clang' |
||||
#elif conf.env.COMPILER_CC == 'suncc': |
||||
# compiler = 'sun-cc' |
||||
#settings['compiler'] = compiler |
||||
|
||||
conf.end_msg(ver) |
||||
|
@ -0,0 +1,74 @@
@@ -0,0 +1,74 @@
|
||||
# encoding: utf-8 |
||||
# cxx11.py -- check if compiler can compile C++11 code with lambdas |
||||
# Copyright (C) 2018 a1batross |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
try: from fwgslib import get_flags_by_compiler |
||||
except: from waflib.extras.fwgslib import get_flags_by_compiler |
||||
from waflib import Configure |
||||
|
||||
# Input: |
||||
# CXX11_MANDATORY(optional) -- fail if C++11 not available |
||||
# Output: |
||||
# HAVE_CXX11 -- true if C++11 available, otherwise false |
||||
|
||||
modern_cpp_flags = { |
||||
'msvc': [], |
||||
'default': ['-std=c++11'] |
||||
} |
||||
|
||||
CXX11_LAMBDA_FRAGMENT=''' |
||||
class T |
||||
{ |
||||
static void M(){} |
||||
public: |
||||
void t() |
||||
{ |
||||
auto l = []() |
||||
{ |
||||
T::M(); |
||||
}; |
||||
} |
||||
}; |
||||
|
||||
int main() |
||||
{ |
||||
T t; |
||||
t.t(); |
||||
} |
||||
''' |
||||
|
||||
@Configure.conf |
||||
def check_cxx11(ctx, *k, **kw): |
||||
if not 'msg' in kw: |
||||
kw['msg'] = 'Checking if \'%s\' supports C++11' % ctx.env.COMPILER_CXX |
||||
|
||||
if not 'mandatory' in kw: |
||||
kw['mandatory'] = False |
||||
|
||||
# not best way, but this check |
||||
# was written for exactly mainui_cpp, |
||||
# where lambdas are mandatory |
||||
return ctx.check_cxx(fragment=CXX11_LAMBDA_FRAGMENT, *k, **kw) |
||||
|
||||
def configure(conf): |
||||
flags = get_flags_by_compiler(modern_cpp_flags, conf.env.COMPILER_CXX) |
||||
|
||||
if conf.check_cxx11(): |
||||
conf.env.HAVE_CXX11 = True |
||||
elif len(flags) != 0 and conf.check_cxx11(msg='...trying with additional flags', cxxflags = flags): |
||||
conf.env.HAVE_CXX11 = True |
||||
conf.env.CXXFLAGS += flags |
||||
else: |
||||
conf.env.HAVE_CXX11 = False |
||||
|
||||
if conf.env.CXX11_MANDATORY: |
||||
conf.fatal('C++11 support not available!') |
@ -0,0 +1,35 @@
@@ -0,0 +1,35 @@
|
||||
# encoding: utf-8 |
||||
# enforce_pic.py -- enforcing PIC if requested |
||||
# Copyright (C) 2021 a1batross |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
from waflib.Configure import conf |
||||
|
||||
@conf |
||||
def check_pic(conf, enable): |
||||
if enable: |
||||
# Every static library must have fPIC |
||||
if conf.env.DEST_OS != 'win32' and '-fPIC' in conf.env.CFLAGS_cshlib: |
||||
conf.env.append_unique('CFLAGS_cstlib', '-fPIC') |
||||
conf.env.append_unique('CXXFLAGS_cxxstlib', '-fPIC') |
||||
else: |
||||
if '-fPIC' in conf.env.CFLAGS_cstlib: |
||||
conf.env.CFLAGS_cstlib.remove('-fPIC') |
||||
if '-fPIC' in conf.env.CFLAGS_cshlib: |
||||
conf.env.CFLAGS_cshlib.remove('-fPIC') |
||||
if '-fPIC' in conf.env.CXXFLAGS_cxxshlib: |
||||
conf.env.CXXFLAGS_cxxshlib.remove('-fPIC') |
||||
if '-fPIC' in conf.env.CXXFLAGS_cxxstlib: |
||||
conf.env.CXXFLAGS_cxxstlib.remove('-fPIC') |
||||
if '-fPIC' in conf.env.CFLAGS_MACBUNDLE: |
||||
conf.env.CFLAGS_MACBUNDLE.remove('-fPIC') |
||||
if '-fPIC' in conf.env.CXXFLAGS_MACBUNDLE: |
||||
conf.env.CXXFLAGS_MACBUNDLE.remove('-fPIC') |
@ -0,0 +1,47 @@
@@ -0,0 +1,47 @@
|
||||
# encoding: utf-8 |
||||
# force_32bit.py -- force compiler to create 32-bit code |
||||
# Copyright (C) 2018 a1batross |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
try: from fwgslib import get_flags_by_compiler |
||||
except: from waflib.extras.fwgslib import get_flags_by_compiler |
||||
from waflib import Configure |
||||
|
||||
# Input: |
||||
# BIT32_MANDATORY(optional) -- fail if 32bit mode not available |
||||
# Output: |
||||
# DEST_SIZEOF_VOID_P -- an integer, equals sizeof(void*) on target |
||||
|
||||
@Configure.conf |
||||
def check_32bit(ctx, *k, **kw): |
||||
if not 'msg' in kw: |
||||
kw['msg'] = 'Checking if \'%s\' can target 32-bit' % ctx.env.COMPILER_CC |
||||
|
||||
if not 'mandatory' in kw: |
||||
kw['mandatory'] = False |
||||
|
||||
return ctx.check_cc( fragment='int main(void){int check[sizeof(void*)==4?1:-1];return 0;}', *k, **kw) |
||||
|
||||
def configure(conf): |
||||
flags = ['-m32'] if not conf.env.DEST_OS == 'darwin' else ['-arch', 'i386'] |
||||
|
||||
if conf.check_32bit(): |
||||
conf.env.DEST_SIZEOF_VOID_P = 4 |
||||
elif conf.env.BIT32_MANDATORY and conf.check_32bit(msg = '...trying with additional flags', cflags = flags, linkflags = flags): |
||||
conf.env.LINKFLAGS += flags |
||||
conf.env.CXXFLAGS += flags |
||||
conf.env.CFLAGS += flags |
||||
conf.env.DEST_SIZEOF_VOID_P = 4 |
||||
else: |
||||
conf.env.DEST_SIZEOF_VOID_P = 8 |
||||
|
||||
if conf.env.BIT32_MANDATORY: |
||||
conf.fatal('Compiler can\'t create 32-bit code!') |
@ -0,0 +1,153 @@
@@ -0,0 +1,153 @@
|
||||
# encoding: utf-8 |
||||
# fwgslib.py -- utils for Waifu build system(Waf with extensions) by FWGS |
||||
# Copyright (C) 2018 a1batross, Michel Mooij (michel.mooij |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
import os |
||||
from waflib import Utils, Errors, Configure, Build |
||||
|
||||
def get_flags_by_compiler(flags, compiler, major_version=None): |
||||
'''Returns a list of compile flags, depending on compiler |
||||
|
||||
:param flags: compiler flags |
||||
:type flags: dict |
||||
:param compiler: compiler string(COMPILER_CC, for example) |
||||
:type compiler: string |
||||
:returns: list of flags |
||||
''' |
||||
out = [] |
||||
if compiler in flags: |
||||
f = flags[compiler] |
||||
if type(f) is list: |
||||
out += f |
||||
elif type(f) is dict: |
||||
if major_version in f: |
||||
out += f[major_version] |
||||
elif 'default' in flags: |
||||
out += f['default'] |
||||
else: |
||||
raise TypeError('unknown type, expected list or dict, got %s' % type(f)) |
||||
elif 'default' in flags: |
||||
out += flags['default'] |
||||
return out |
||||
|
||||
def get_flags_by_type(flags, type, compiler, major_version=None): |
||||
'''Returns a list of compile flags, depending on compiler and build type |
||||
|
||||
:param flags: compiler flags |
||||
:param type: build type |
||||
:type type: string |
||||
:param compiler: compiler string(COMPILER_CC, for example) |
||||
:type compiler: string |
||||
:returns: list of flags |
||||
''' |
||||
out = [] |
||||
if 'common' in flags: |
||||
out += get_flags_by_compiler(flags['common'], compiler, major_version) |
||||
if type in flags: |
||||
out += get_flags_by_compiler(flags[type], compiler, major_version) |
||||
return out |
||||
|
||||
@Configure.conf |
||||
def filter_flags(conf, flags, required_flags, features, checkarg, compiler): |
||||
|
||||
check_flags = required_flags + (['-Werror'] if compiler != 'msvc' else []) |
||||
tests = map(lambda x: { |
||||
'features': features, |
||||
'compiler' : compiler, |
||||
'msg': '... ' + x, |
||||
'define_name': Utils.quote_define_name(x), |
||||
checkarg: [x] + check_flags }, flags ) |
||||
|
||||
conf.env.stash() |
||||
conf.multicheck(*tests, |
||||
msg = 'Checking supported flags for %s in parallel' % compiler, |
||||
mandatory = False) |
||||
supported_flags = [ f for f in flags if conf.env[Utils.quote_define_name(f)] ] |
||||
conf.env.revert() |
||||
return supported_flags |
||||
|
||||
@Configure.conf |
||||
def filter_cflags(conf, flags, required_flags = []): |
||||
return conf.filter_flags(flags, required_flags, 'c', 'cflags', conf.env.COMPILER_CC) |
||||
|
||||
@Configure.conf |
||||
def filter_cxxflags(conf, flags, required_flags = []): |
||||
return conf.filter_flags(flags, required_flags, 'cxx', 'cxxflags', conf.env.COMPILER_CXX) |
||||
|
||||
def conf_get_flags_by_compiler(unused, flags, compiler, major_version=None): |
||||
return get_flags_by_compiler(flags, compiler, major_version) |
||||
|
||||
setattr(Configure.ConfigurationContext, 'get_flags_by_compiler', conf_get_flags_by_compiler) |
||||
setattr(Build.BuildContext, 'get_flags_by_compiler', conf_get_flags_by_compiler) |
||||
|
||||
def conf_get_flags_by_type(unused, flags, type, compiler, major_version=None): |
||||
return get_flags_by_type(flags, type, compiler, major_version) |
||||
|
||||
setattr(Configure.ConfigurationContext, 'get_flags_by_type', conf_get_flags_by_type) |
||||
setattr(Build.BuildContext, 'get_flags_by_type', conf_get_flags_by_type) |
||||
|
||||
def get_deps(bld, target): |
||||
'''Returns a list of (nested) targets on which this target depends. |
||||
|
||||
:param bld: a *waf* build instance from the top level *wscript* |
||||
:type bld: waflib.Build.BuildContext |
||||
:param target: task name for which the dependencies should be returned |
||||
:type target: str |
||||
:returns: a list of task names on which the given target depends |
||||
''' |
||||
try: |
||||
tgen = bld.get_tgen_by_name(target) |
||||
except Errors.WafError: |
||||
return [] |
||||
else: |
||||
uses = Utils.to_list(getattr(tgen, 'use', [])) |
||||
deps = uses[:] |
||||
for use in uses: |
||||
deps += get_deps(bld, use) |
||||
return list(set(deps)) |
||||
|
||||
|
||||
def get_tgens(bld, names): |
||||
'''Returns a list of task generators based on the given list of task |
||||
generator names. |
||||
|
||||
:param bld: a *waf* build instance from the top level *wscript* |
||||
:type bld: waflib.Build.BuildContext |
||||
:param names: list of task generator names |
||||
:type names: list of str |
||||
:returns: list of task generators |
||||
''' |
||||
tgens=[] |
||||
for name in names: |
||||
try: |
||||
tgen = bld.get_tgen_by_name(name) |
||||
except Errors.WafError: |
||||
pass |
||||
else: |
||||
tgens.append(tgen) |
||||
return list(set(tgens)) |
||||
|
||||
|
||||
def get_targets(bld): |
||||
'''Returns a list of user specified build targets or None if no specific |
||||
build targets has been selected using the *--targets=* command line option. |
||||
|
||||
:param bld: a *waf* build instance from the top level *wscript*. |
||||
:type bld: waflib.Build.BuildContext |
||||
:returns: a list of user specified target names (using --targets=x,y,z) or None |
||||
''' |
||||
if bld.targets == '': |
||||
return None |
||||
targets = bld.targets.split(',') |
||||
for target in targets: |
||||
targets += get_deps(bld, target) |
||||
return targets |
@ -0,0 +1,47 @@
@@ -0,0 +1,47 @@
|
||||
# encoding: utf-8 |
||||
# gitversion.py -- waf plugin to get git version |
||||
# Copyright (C) 2018 a1batross |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
import subprocess |
||||
from waflib import Configure, Logs |
||||
|
||||
@Configure.conf |
||||
def get_git_version(conf): |
||||
# try grab the current version number from git |
||||
node = conf.path.find_node('.git') |
||||
|
||||
if not node: |
||||
return None |
||||
|
||||
try: |
||||
stdout = conf.cmd_and_log([conf.env.GIT[0], 'describe', '--dirty', '--always'], |
||||
cwd = node.parent) |
||||
version = stdout.strip() |
||||
except Exception as e: |
||||
version = '' |
||||
Logs.debug(str(e)) |
||||
|
||||
if len(version) == 0: |
||||
version = None |
||||
|
||||
return version |
||||
|
||||
def configure(conf): |
||||
if conf.find_program('git', mandatory = False): |
||||
conf.start_msg('Checking git hash') |
||||
ver = conf.get_git_version() |
||||
|
||||
if ver: |
||||
conf.env.GIT_VERSION = ver |
||||
conf.end_msg(conf.env.GIT_VERSION) |
||||
else: |
||||
conf.end_msg('no', color='YELLOW') |
@ -0,0 +1,780 @@
@@ -0,0 +1,780 @@
|
||||
#!/usr/bin/env python |
||||
# -*- encoding: utf-8 -*- |
||||
# Michel Mooij, michel.mooij7@gmail.com |
||||
# modified: Alibek Omarov, a1ba.omarov@gmail.com |
||||
|
||||
''' |
||||
Summary |
||||
------- |
||||
Exports and converts *waf* project data, for C/C++ programs, static- and shared |
||||
libraries, into **Microsoft Visual Studio**, also known as **msdev**, |
||||
project files (.vcproj) and solution (.sln) files. |
||||
|
||||
**Microsoft Visual Studio** is a mature and stable integrated development |
||||
environment for, amongst others, the C and C++ programming language. A free |
||||
version of this IDE, known as the *express* version can be obtained from Microsoft |
||||
at http://wwww.visualstudio.com. |
||||
|
||||
Description |
||||
----------- |
||||
When exporting *waf* project data, a single **Visual Studio** solution will be |
||||
exported in the top level directory of your *WAF* build environment. This |
||||
solution file will contain references to all exported **Visual Studio** |
||||
projects and will include dependencies between those projects and will have the |
||||
same name as APPNAME variable from the top level *wscript* file. |
||||
|
||||
For each single task generator (*waflib.TaskGenerator*), for instance a |
||||
*bld.program(...)* which has been defined within a *wscript* file somewhere in |
||||
the build environment, a single **Visual Studio** project file will be generated |
||||
in the same directory as where the task generator has been defined. |
||||
The name of this task generator will be used as name for the exported **Visual |
||||
Studio** project file. If for instance the name of the task generator is |
||||
*hello*, then a **Visual Studio** project file named *hello.vcproj* will be |
||||
exported. |
||||
|
||||
Example below presents an overview of an environment in which **Visual Studio** |
||||
files already have been exported:: |
||||
|
||||
. |
||||
├── components |
||||
│ └── clib |
||||
│ ├── program |
||||
│ │ ├── cprogram.vcproj |
||||
│ │ └── wscript |
||||
│ ├── shared |
||||
│ │ ├── cshlib.vcproj |
||||
│ │ └── wscript |
||||
│ └── static |
||||
│ ├── cstlib.vcproj |
||||
│ └── wscript |
||||
│ |
||||
├── waf.vcproj |
||||
├── appname.sln |
||||
└── wscript |
||||
|
||||
|
||||
Projects will be exported such that they will use the same settings and |
||||
structure as has been defined for that build task within the *waf* build |
||||
environment as much as possible. Note that since cross compilation is not |
||||
really supported in this IDE, only the first environment encountered that |
||||
is targeted for **MS Windows** will be exported; i.e. an environment in |
||||
which:: |
||||
|
||||
bld.env.DEST_OS == 'win32' |
||||
|
||||
is true. |
||||
|
||||
|
||||
Please note that in contrast to a *normal* IDE setup the exported projects |
||||
will contain either a *debug* **or** a *release* build target but not both at |
||||
the same time. By doing so exported projects will always use the same settings |
||||
(e.g. compiler options, installation paths) as when building the same task in |
||||
the *waf* build environment from command line. |
||||
|
||||
|
||||
Usage |
||||
----- |
||||
**Visual Studio** project and workspace files can be exported using the *msdev* |
||||
command, as shown in the example below:: |
||||
|
||||
$ waf msdev |
||||
|
||||
When needed, exported **Visual Studio** project- and solution files can be |
||||
removed using the *clean* command, as shown in the example below:: |
||||
|
||||
$ waf msdev --clean |
||||
|
||||
Once exported simply open the *appname.sln* using **Visual Studio** |
||||
this will automatically open all exported projects as well. |
||||
|
||||
Tasks generators to be excluded can be marked with the *skipme* option |
||||
as shown below:: |
||||
|
||||
def build(bld): |
||||
bld.program(name='foo', src='foobar.c', msdev_skip=True) |
||||
|
||||
''' |
||||
|
||||
import os |
||||
import sys |
||||
import copy |
||||
import uuid |
||||
import shutil |
||||
import xml.etree.ElementTree as ElementTree |
||||
from xml.dom import minidom |
||||
from waflib import Utils, Logs, Errors, Context |
||||
from waflib.Build import BuildContext |
||||
# import waftools |
||||
# from waftools import deps |
||||
try: from fwgslib import get_targets |
||||
except: from waflib.extras.fwgslib import get_targets |
||||
|
||||
try: from subproject import get_subproject_env |
||||
except: from waflib.extras.subproject import get_subproject_env |
||||
|
||||
|
||||
def options(opt): |
||||
'''Adds command line options to the *waf* build environment |
||||
|
||||
:param opt: Options context from the *waf* build environment. |
||||
:type opt: waflib.Options.OptionsContext |
||||
''' |
||||
opt.add_option('--msdev', dest='msdev', default=False, action='store_true', help='select msdev for export/import actions') |
||||
opt.add_option('--clean', dest='clean', default=False, action='store_true', help='delete exported files') |
||||
|
||||
|
||||
def configure(conf): |
||||
'''Method that will be invoked by *waf* when configuring the build |
||||
environment. |
||||
|
||||
:param conf: Configuration context from the *waf* build environment. |
||||
:type conf: waflib.Configure.ConfigurationContext |
||||
''' |
||||
pass |
||||
|
||||
|
||||
class MsDevContext(BuildContext): |
||||
'''export C/C++ tasks to MS Visual Studio projects and solutions.''' |
||||
cmd = 'msdev' |
||||
|
||||
def execute(self): |
||||
'''Will be invoked when issuing the *msdev* command.''' |
||||
self.restore() |
||||
if not self.all_envs: |
||||
self.load_envs() |
||||
self.recurse([self.run_dir]) |
||||
self.pre_build() |
||||
|
||||
for group in self.groups: |
||||
for tgen in group: |
||||
try: |
||||
f = tgen.post |
||||
except AttributeError: |
||||
pass |
||||
else: |
||||
f() |
||||
try: |
||||
self.get_tgen_by_name('') |
||||
except Exception: |
||||
pass |
||||
|
||||
self.msdev = True |
||||
if self.options.clean: |
||||
cleanup(self) |
||||
else: |
||||
export(self) |
||||
self.timer = Utils.Timer() |
||||
|
||||
def export(bld): |
||||
'''Exports all C and C++ task generators as **Visual Studio** projects |
||||
and creates a **Visual Studio** solution containing references to |
||||
those project. |
||||
|
||||
:param bld: a *waf* build instance from the top level *wscript*. |
||||
:type bld: waflib.Build.BuildContext |
||||
''' |
||||
if not bld.options.msdev and not hasattr(bld, 'msdev'): |
||||
return |
||||
|
||||
Logs.pprint('RED', '''This tool is intended only to ease development for Windows users! |
||||
Don't use it for release builds, as it doesn't enables WinXP compatibility for now!''') |
||||
|
||||
solution = MsDevSolution(bld) |
||||
targets = get_targets(bld) |
||||
|
||||
saveenv = bld.env # root env |
||||
for tgen in bld.task_gen_cache_names.values(): |
||||
if targets and tgen.get_name() not in targets: |
||||
continue |
||||
if getattr(tgen, 'msdev_skipme', False): |
||||
continue |
||||
try: |
||||
bld.env = get_subproject_env(bld, tgen.path, True) |
||||
except IndexError: |
||||
bld.env = saveenv |
||||
if set(('c', 'cxx')) & set(getattr(tgen, 'features', [])): |
||||
project = MsDevProject(bld, tgen) |
||||
project.export() |
||||
|
||||
(name, fname, deps, pid) = project.get_metadata() |
||||
solution.add_project(name, fname, deps, pid) |
||||
|
||||
solution.export() |
||||
|
||||
|
||||
def cleanup(bld): |
||||
'''Removes all **Visual Studio** projects and workspaces from the |
||||
*waf* build environment. |
||||
|
||||
:param bld: a *waf* build instance from the top level *wscript*. |
||||
:type bld: waflib.Build.BuildContext |
||||
''' |
||||
if not bld.options.msdev and not hasattr(bld, 'msdev'): |
||||
return |
||||
|
||||
targets = get_targets(bld) |
||||
saveenv = bld.env |
||||
|
||||
for tgen in bld.task_gen_cache_names.values(): |
||||
if targets and tgen.get_name() not in targets: |
||||
continue |
||||
if getattr(tgen, 'msdev_skipme', False): |
||||
continue |
||||
try: |
||||
bld.env = get_subproject_env(bld, tgen.path) |
||||
except IndexError: |
||||
bld.env = saveenv |
||||
if set(('c', 'cxx')) & set(getattr(tgen, 'features', [])): |
||||
project = MsDevProject(bld, tgen) |
||||
project.cleanup() |
||||
|
||||
solution = MsDevSolution(bld) |
||||
solution.cleanup() |
||||
|
||||
|
||||
class MsDev(object): |
||||
'''Abstract base class used for exporting *waf* project data to |
||||
**Visual Studio** projects and solutions. |
||||
|
||||
REMARK: |
||||
bld.objects() taks generators are treated as static libraries. |
||||
|
||||
:param bld: Build context as used in *wscript* files of your *waf* build |
||||
environment. |
||||
:type bld: waflib.Build.BuildContext |
||||
''' |
||||
|
||||
PROGRAM = '1' |
||||
'''Identifier for projects containing an executable''' |
||||
|
||||
SHLIB = '2' |
||||
'''Identifier for projects containing a shared library''' |
||||
|
||||
STLIB = '4' |
||||
'''Identifier for projects containing a static library''' |
||||
|
||||
C = 'c' |
||||
'''Identifier for projects using C language''' |
||||
|
||||
CXX = 'cxx' |
||||
'''Identifier for projects using C++ language''' |
||||
|
||||
def __init__(self, bld): |
||||
self.bld = bld |
||||
|
||||
def export(self): |
||||
'''Exports a **Visual Studio** solution or project.''' |
||||
content = self.get_content() |
||||
if not content: |
||||
return |
||||
if self.xml_clean: |
||||
content = self.xml_clean(content) |
||||
|
||||
node = self.make_node() |
||||
if not node: |
||||
return |
||||
node.write(content) |
||||
Logs.pprint('YELLOW', 'exported: %s' % node.abspath()) |
||||
|
||||
def cleanup(self): |
||||
'''Deletes a **Visual Studio** solution or project file including |
||||
associated files (e.g. *.ncb*). |
||||
''' |
||||
cwd = self.get_cwd() |
||||
for node in cwd.ant_glob('*.user'): |
||||
node.delete() |
||||
Logs.pprint('YELLOW', 'removed: %s' % node.abspath()) |
||||
for node in cwd.ant_glob('*.ncb'): |
||||
node.delete() |
||||
Logs.pprint('YELLOW', 'removed: %s' % node.abspath()) |
||||
for node in cwd.ant_glob('*.suo'): |
||||
node.delete() |
||||
Logs.pprint('YELLOW', 'removed: %s' % node.abspath()) |
||||
for node in cwd.ant_glob('*.sln'): |
||||
node.delete() |
||||
Logs.pprint('YELLOW', 'removed: %s' % node.abspath()) |
||||
node = self.find_node() |
||||
if node: |
||||
node.delete() |
||||
Logs.pprint('YELLOW', 'removed: %s' % node.abspath()) |
||||
|
||||
def get_cwd(self): |
||||
cwd = os.path.dirname(self.get_fname()) |
||||
if cwd == "": |
||||
cwd = "." |
||||
return self.bld.srcnode.find_node(cwd) |
||||
|
||||
def find_node(self): |
||||
name = self.get_fname() |
||||
if not name: |
||||
return None |
||||
return self.bld.srcnode.find_node(name) |
||||
|
||||
def make_node(self): |
||||
name = self.get_fname() |
||||
if not name: |
||||
return None |
||||
return self.bld.srcnode.make_node(name.lower()) |
||||
|
||||
def get_fname(self): |
||||
'''<abstract> Returns file name.''' |
||||
return None |
||||
|
||||
def get_content(self): |
||||
'''<abstract> Returns file content.''' |
||||
return None |
||||
|
||||
def xml_clean(self, content): |
||||
s = minidom.parseString(content).toprettyxml(indent="\t") |
||||
lines = [l for l in s.splitlines() if not l.isspace() and len(l)] |
||||
lines[0] = '<?xml version="1.0" encoding="UTF-8"?>' |
||||
return '\n'.join(lines) |
||||
|
||||
|
||||
class MsDevSolution(MsDev): |
||||
'''Class used for exporting *waf* project data to a **Visual Studio** |
||||
solution located in the lop level directory of the *waf* build |
||||
environment. |
||||
|
||||
:param bld: Build context as used in *wscript* files of your *waf* build |
||||
environment. |
||||
:type bld: waflib.Build.BuildContext |
||||
''' |
||||
|
||||
def __init__(self, bld): |
||||
super(MsDevSolution, self).__init__(bld) |
||||
self.projects = {} |
||||
self.xml_clean = None |
||||
|
||||
def get_fname(self): |
||||
'''Returns the workspace's file name.''' |
||||
return '%s.sln' % getattr(Context.g_module, Context.APPNAME) |
||||
|
||||
def export(self): |
||||
'''Exports a **Visual Studio** solution.''' |
||||
dst = self.get_fname() |
||||
|
||||
s = MSDEV_SOLUTION |
||||
|
||||
with open(dst, 'w') as f: |
||||
for line in s[0:3]: |
||||
f.write(line) |
||||
for name, (fname, deps, pid) in self.projects.items(): |
||||
sid = str(uuid.uuid4()).upper() |
||||
f.write('Project("{%s}") = "%s", "%s", "{%s}"\n' % (sid, name, fname, pid)) |
||||
if len(deps): |
||||
f.write('\tProjectSection(ProjectDependencies) = postProject\n') |
||||
for d in deps: |
||||
try: |
||||
(_, _, pid) = self.projects[d] |
||||
except KeyError: |
||||
pass |
||||
else: |
||||
f.write('\t\t{%s} = {%s}\n' % (pid, pid)) |
||||
f.write('\tEndProjectSection\n') |
||||
f.write('EndProject\n') |
||||
for line in s[3:8]: |
||||
f.write(line) |
||||
for _, (_, _, pid) in self.projects.items(): |
||||
f.write('\t\t{%s}.Debug|Win32.ActiveCfg = Debug|Win32\n' % (pid)) |
||||
f.write('\t\t{%s}.Debug|Win32.Build.0 = Debug|Win32\n' % (pid)) |
||||
for line in s[8:]: |
||||
f.write(line) |
||||
Logs.pprint('YELLOW', 'exported: %s' % os.path.abspath(dst)) |
||||
|
||||
def add_project(self, name, fname, deps, pid): |
||||
'''Adds a project to the workspace. |
||||
|
||||
:param name: Name of the project. |
||||
:type name: str |
||||
:param fname: Complete path to the project file |
||||
:type fname: str |
||||
:param deps: List of names on which this project depends |
||||
:type deps: list of str |
||||
''' |
||||
self.projects[name] = (fname, deps, pid) |
||||
|
||||
|
||||
class MsDevProject(MsDev): |
||||
'''Class used for exporting *waf* project data to **Visual Studio** |
||||
projects. |
||||
|
||||
:param bld: Build context as used in *wscript* files of your *waf* build |
||||
environment. |
||||
:type bld: waflib.Build.BuildContext |
||||
|
||||
:param gen: Task generator that contains all information of the task to be |
||||
converted and exported to the **Visual Studio** project. |
||||
:type gen: waflib.Task.TaskGen |
||||
''' |
||||
|
||||
def __init__(self, bld, gen): |
||||
super(MsDevProject, self).__init__(bld) |
||||
self.gen = gen |
||||
self.id = str(uuid.uuid4()).upper() |
||||
self.type = self.get_type(gen) |
||||
self.language = self.get_language(gen) |
||||
self.buildpath = self.get_buildpath(bld, gen) |
||||
|
||||
def get_type(self, gen): |
||||
if set(('cprogram', 'cxxprogram')) & set(gen.features): |
||||
return MsDev.PROGRAM |
||||
elif set(('cshlib', 'cxxshlib')) & set(gen.features): |
||||
return MsDev.SHLIB |
||||
else: |
||||
return MsDev.STLIB |
||||
|
||||
def get_language(self, gen): |
||||
return MsDev.CXX if 'cxx' in gen.features else MsDev.C |
||||
|
||||
def get_buildpath(self, bld, gen): |
||||
pth = '%s/%s' % (bld.path.get_bld().path_from(gen.path), gen.path.relpath()) |
||||
return pth.replace('/', '\\') |
||||
|
||||
def get_fname(self): |
||||
'''Returns the project's file name.''' |
||||
return '%s/%s.vcproj' % (self.gen.path.relpath().replace('\\', '/'), self.gen.get_name()) |
||||
|
||||
def get_root(self): |
||||
'''Returns a document root, either from an existing file, or from template.''' |
||||
fname = self.get_fname() |
||||
if os.path.exists(fname): |
||||
tree = ElementTree.parse(fname) |
||||
root = tree.getroot() |
||||
else: |
||||
root = ElementTree.fromstring(MSDEV_PROJECT) |
||||
return root |
||||
|
||||
def get_content(self): |
||||
'''Returns the content of a project file.''' |
||||
root = self.get_root() |
||||
root.set('Name', self.gen.get_name()) |
||||
root.set('ProjectGUID', '{%s}' % self.id) |
||||
configurations = root.find('Configurations') |
||||
for configuration in configurations.iter('Configuration'): |
||||
configuration.set('ConfigurationType', '%s' % self.type) |
||||
configuration.set('OutputDirectory', '%s\\msdev' % self.buildpath) |
||||
configuration.set('IntermediateDirectory', '%s\\msdev' % self.buildpath) |
||||
for tool in configuration.iter('Tool'): |
||||
name = tool.get('Name') |
||||
if name == 'VCCLCompilerTool': |
||||
tool.set('PreprocessorDefinitions', '%s' % self.get_compiler_defines(self.gen)) |
||||
includes = [] |
||||
for include in self.get_compiler_includes(self.bld, self.gen): |
||||
includes.append('%s' % include) |
||||
tool.set('AdditionalIncludeDirectories', ';'.join(includes)) |
||||
if name == 'VCLinkerTool': |
||||
if self.type == MsDev.PROGRAM: |
||||
# Force Windows Subsystem |
||||
# TODO: this isn't enables Windows XP compatibility! |
||||
tool.set('SubSystem', '2') |
||||
self.update_link_deps(tool) |
||||
self.update_link_paths(tool) |
||||
files = root.find('Files') |
||||
self.update_includes(files) |
||||
self.update_sources(files) |
||||
return ElementTree.tostring(root) |
||||
|
||||
def update_includes(self, files): |
||||
'''Add include files.''' |
||||
includes = [] |
||||
for filtr in files.iter('Filter'): |
||||
if filtr.get('Name') == 'Header Files': |
||||
for include in filtr.iter('File'): |
||||
includes.append(include.get('RelativePath')) |
||||
break |
||||
if len(includes) == 0: |
||||
filtr = ElementTree.SubElement(files, 'Filter', attrib={'Name':'Header Files', 'Filter':'h;hpp;hxx;hm;inl;inc;xsd'}) |
||||
filtr.set('UniqueIdentifier', '{%s}' % str(uuid.uuid4()).upper()) |
||||
for include in self.get_include_files(self.bld, self.gen): |
||||
if include not in includes: |
||||
ElementTree.SubElement(filtr, 'File', attrib={'RelativePath':'%s' % include}) |
||||
|
||||
def update_sources(self, files): |
||||
'''Add source files.''' |
||||
sources = [] |
||||
for filtr in files.iter('Filter'): |
||||
if filtr.get('Name') == 'Source Files': |
||||
for source in filtr.iter('File'): |
||||
sources.append(source.get('RelativePath')) |
||||
break |
||||
if len(sources) == 0: |
||||
filtr = ElementTree.SubElement(files, 'Filter', attrib={'Name':'Source Files', 'Filter':'cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx'}) |
||||
filtr.set('UniqueIdentifier', '{%s}' % str(uuid.uuid4()).upper()) |
||||
for source in self.get_genlist(self.gen, 'source'): |
||||
if source not in sources: |
||||
ElementTree.SubElement(filtr, 'File', attrib={'RelativePath':'%s' % source}) |
||||
|
||||
def update_link_deps(self, tool): |
||||
'''Add libraries on which this project depends.''' |
||||
deps = tool.get('AdditionalDependencies') |
||||
|
||||
deps = [] # clean out deps everytime |
||||
|
||||
libs = self.get_link_libs(self.bld, self.gen) |
||||
for lib in libs: |
||||
dep = '%s.lib' % lib |
||||
if dep not in deps: |
||||
deps.append(dep) |
||||
if len(deps): |
||||
add_deps = " ".join(deps) # work around when converting to vcxproj by inserting spaces |
||||
tool.set('AdditionalDependencies', add_deps) |
||||
|
||||
def update_link_paths(self, tool): |
||||
deps = tool.get('AdditionalLibraryDirectories', '') |
||||
deps = [] |
||||
dirs = self.get_link_paths(self.bld, self.gen) |
||||
for dep in dirs: |
||||
if dep not in deps: |
||||
deps.append(dep) |
||||
if len(deps): |
||||
tool.set('AdditionalLibraryDirectories', ';'.join(deps)) |
||||
|
||||
def get_metadata(self): |
||||
'''Returns a tuple containing project information (name, file name and |
||||
dependencies). |
||||
''' |
||||
name = self.gen.get_name() |
||||
fname = self.get_fname().replace('/', '\\') |
||||
deps = Utils.to_list(getattr(self.gen, 'use', [])) |
||||
return (name, fname, deps, self.id) |
||||
|
||||
def get_genlist(self, gen, name): |
||||
lst = Utils.to_list(getattr(gen, name, [])) |
||||
lst = [str(l.path_from(gen.path)) if hasattr(l, 'path_from') else l for l in lst] |
||||
return [l.replace('/', '\\') for l in lst] |
||||
|
||||
def get_compiler_options(self, bld, gen): |
||||
if self.language == MsDev.CXX: |
||||
flags = getattr(gen, 'cxxflags', []) + bld.env.CXXFLAGS |
||||
else: |
||||
flags = getattr(gen, 'cflags', []) + bld.env.CFLAGS |
||||
if self.type == MsDev.SHLIB: |
||||
if self.language == MsDev.CXX: |
||||
flags.extend(bld.env.CXXFLAGS_cxxshlib) |
||||
else: |
||||
flags.extend(bld.env.CFLAGS_cshlib) |
||||
return list(set(flags)) |
||||
|
||||
def get_compiler_includes(self, bld, gen): |
||||
includes = self.get_genlist(gen, 'includes') |
||||
for include in bld.env['INCLUDES']: |
||||
root = bld.path.abspath().replace('\\', '/') |
||||
pref = os.path.commonprefix([root, include]) |
||||
if pref == root: |
||||
node = bld.root.find_dir(include) |
||||
if node: |
||||
includes.append(node.path_from(gen.path).replace('/', '\\')) |
||||
|
||||
deps = Utils.to_list(getattr(gen, 'use', [])) |
||||
for dep in deps: |
||||
uselib_incs = bld.env['INCLUDES_%s' % dep] |
||||
for uselib_inc in uselib_incs: |
||||
root = bld.root.abspath().replace('\\', '/') |
||||
pref = os.path.commonprefix([root, uselib_inc]) |
||||
if pref == root: |
||||
node = bld.root.find_dir(uselib_inc) |
||||
if node: |
||||
inc = node.path_from(gen.path).replace('/', '\\') |
||||
includes.append(inc) |
||||
Logs.pprint('YELLOW', 'Added relative include: %s' % inc) |
||||
includes.append(uselib_inc) |
||||
return includes |
||||
|
||||
def get_compiler_defines(self, gen): |
||||
defines = self.get_genlist(gen, 'defines') + gen.bld.env.DEFINES |
||||
if 'win32' in sys.platform: |
||||
defines = [d.replace('"', '\\"') for d in defines] |
||||
defines = ';'.join(defines) |
||||
return defines |
||||
|
||||
def get_link_options(self, bld, gen): |
||||
flags = getattr(gen, 'linkflags', []) + bld.env.LINKFLAGS |
||||
if self.language == MsDev.CXX: |
||||
if self.type == MsDev.SHLIB: |
||||
flags.extend(bld.env.LINKFLAGS_cxxshlib) |
||||
else: |
||||
flags.extend(bld.env.LINKFLAGS_cshlib) |
||||
return list(set(flags)) |
||||
|
||||
def get_link_libs(self, bld, gen): |
||||
libs = Utils.to_list(getattr(gen, 'lib', [])) |
||||
deps = Utils.to_list(getattr(gen, 'use', [])) |
||||
for dep in deps: |
||||
try: |
||||
tgen = bld.get_tgen_by_name(dep) |
||||
except Errors.WafError: |
||||
uselib_libs = bld.env['LIB_%s' % dep] |
||||
for uselib_lib in uselib_libs: |
||||
libs.append(uselib_lib) |
||||
pass |
||||
else: |
||||
if self.type == MsDev.STLIB: |
||||
libs.append(dep) |
||||
return libs |
||||
|
||||
def get_link_paths(self, bld, gen): |
||||
dirs = [] |
||||
deps = Utils.to_list(getattr(gen, 'use', [])) |
||||
for dep in deps: |
||||
try: |
||||
tgen = bld.get_tgen_by_name(dep) |
||||
except Errors.WafError: |
||||
uselib_paths = bld.env['LIBPATH_%s' % dep] |
||||
for uselib_path in uselib_paths: |
||||
root = bld.root.abspath().replace('\\', '/') |
||||
pref = os.path.commonprefix([root, uselib_path]) |
||||
if pref == root: |
||||
node = bld.root.find_dir(uselib_path) |
||||
if node: |
||||
libpath = node.path_from(gen.path).replace('/', '\\') |
||||
dirs.append(libpath) |
||||
Logs.pprint('YELLOW', 'Added relative library path: %s' % libpath) |
||||
dirs.append(uselib_path) |
||||
pass |
||||
else: |
||||
if self.type in (MsDev.STLIB, MsDev.SHLIB, MsDev.PROGRAM): |
||||
directory = '%s\\msdev' % tgen.path.get_bld().path_from(gen.path) |
||||
if directory not in dirs: |
||||
dirs.append(directory.replace('/', '\\')) |
||||
# elif self.type in (MsDev.PROGRAM): |
||||
# try: |
||||
# for directory in tgen.lib_paths: |
||||
# if directory not in dirs: |
||||
# dirs.append(directory.replace('/', '\\')) |
||||
# except AttributeError: |
||||
# pass |
||||
return dirs |
||||
|
||||
def get_include_files(self, bld, gen): |
||||
includes = [] |
||||
for include in self.get_genlist(gen, 'includes'): |
||||
node = gen.path.find_dir(include) |
||||
if node: |
||||
for header in node.ant_glob('*.h*'): |
||||
includes.append(header.path_from(gen.path).replace('/', '\\')) |
||||
|
||||
for include in bld.env['INCLUDES']: |
||||
root = bld.path.abspath().replace('\\', '/') |
||||
pref = os.path.commonprefix([root, include]) |
||||
if pref == root: |
||||
node = bld.root.find_dir(include) |
||||
if node: |
||||
for header in node.ant_glob('*.h*'): |
||||
includes.append(header.path_from(gen.path).replace('/', '\\')) |
||||
|
||||
return includes |
||||
|
||||
|
||||
MSDEV_PROJECT = \ |
||||
'''<?xml version="1.0" encoding="UTF-8"?> |
||||
<VisualStudioProject |
||||
ProjectType="Visual C++" |
||||
Version="8,00" |
||||
Name="" |
||||
ProjectGUID="" |
||||
Keyword="Win32Proj" |
||||
TargetFrameworkVersion="0" |
||||
> |
||||
<Platforms> |
||||
<Platform |
||||
Name="Win32" |
||||
/> |
||||
</Platforms> |
||||
<ToolFiles> |
||||
</ToolFiles> |
||||
<Configurations> |
||||
<Configuration |
||||
Name="Debug|Win32" |
||||
OutputDirectory="Debug" |
||||
IntermediateDirectory="Debug" |
||||
ConfigurationType="1" |
||||
> |
||||
<Tool |
||||
Name="VCPreBuildEventTool" |
||||
/> |
||||
<Tool |
||||
Name="VCCustomBuildTool" |
||||
/> |
||||
<Tool |
||||
Name="VCXMLDataGeneratorTool" |
||||
/> |
||||
<Tool |
||||
Name="VCWebServiceProxyGeneratorTool" |
||||
/> |
||||
<Tool |
||||
Name="VCMIDLTool" |
||||
/> |
||||
<Tool |
||||
Name="VCCLCompilerTool" |
||||
Optimization="0" |
||||
PreprocessorDefinitions="" |
||||
MinimalRebuild="true" |
||||
BasicRuntimeChecks="3" |
||||
RuntimeLibrary="3" |
||||
UsePrecompiledHeader="0" |
||||
WarningLevel="3" |
||||
DebugInformationFormat="4" |
||||
/> |
||||
<Tool |
||||
Name="VCManagedResourceCompilerTool" |
||||
/> |
||||
<Tool |
||||
Name="VCResourceCompilerTool" |
||||
/> |
||||
<Tool |
||||
Name="VCPreLinkEventTool" |
||||
/> |
||||
<Tool |
||||
Name="VCLinkerTool" |
||||
LinkIncremental="2" |
||||
GenerateDebugInformation="true" |
||||
SubSystem="1" |
||||
TargetMachine="1" |
||||
/> |
||||
<Tool |
||||
Name="VCALinkTool" |
||||
/> |
||||
<Tool |
||||
Name="VCManifestTool" |
||||
/> |
||||
<Tool |
||||
Name="VCXDCMakeTool" |
||||
/> |
||||
<Tool |
||||
Name="VCBscMakeTool" |
||||
/> |
||||
<Tool |
||||
Name="VCFxCopTool" |
||||
/> |
||||
<Tool |
||||
Name="VCAppVerifierTool" |
||||
/> |
||||
<Tool |
||||
Name="VCPostBuildEventTool" |
||||
/> |
||||
</Configuration> |
||||
</Configurations> |
||||
<References> |
||||
</References> |
||||
<Files> |
||||
</Files> |
||||
<Globals> |
||||
</Globals> |
||||
</VisualStudioProject> |
||||
''' |
||||
|
||||
MSDEV_SOLUTION = [ |
||||
'Microsoft Visual Studio Solution File, Format Version 8.00\n', |
||||
'# Visual Studio 2005\n', |
||||
'Global\n', |
||||
'GlobalSection(SolutionConfigurationPlatforms) = preSolution\n', |
||||
'Debug|Win32 = Debug|Win32\n', |
||||
'EndGlobalSection\n', |
||||
'GlobalSection(ProjectConfigurationPlatforms) = postSolution\n', |
||||
'EndGlobalSection\n', |
||||
'GlobalSection(SolutionProperties) = preSolution\n', |
||||
'HideSolutionNode = FALSE\n', |
||||
'EndGlobalSection\n', |
||||
'EndGlobal\n', |
||||
'\n'] |
@ -1,124 +0,0 @@
@@ -1,124 +0,0 @@
|
||||
#! /usr/bin/env python |
||||
# encoding: utf-8 |
||||
|
||||
""" |
||||
Compiler definition for OpenWatcom's owcc |
||||
""" |
||||
|
||||
from waflib import Errors, Utils |
||||
from waflib.Tools import ccroot, ar |
||||
from waflib.Configure import conf |
||||
|
||||
@conf |
||||
def find_owcc(conf): |
||||
v = conf.env |
||||
cc = None |
||||
if v.CC: |
||||
cc = v.CC |
||||
else: |
||||
cc = conf.find_program('cc', var='CC') |
||||
if not cc: |
||||
conf.fatal('owcc was not found') |
||||
|
||||
try: |
||||
out = conf.cmd_and_log(cc + ['-v']) |
||||
except Errors.WafError: |
||||
conf.fatal('%r -v could not be executed' % cc) |
||||
if not 'Open Watcom' in out: |
||||
conf.fatal('failed to detect owcc') |
||||
|
||||
v.CC = cc |
||||
v.CC_NAME = 'owcc' |
||||
v.CXX = v.CC |
||||
v.CXX_NAME = v.cc_NAME |
||||
if not v.AR: |
||||
conf.find_program('wlib', var='AR') |
||||
conf.add_os_flags('ARFLAGS') |
||||
if not v.ARFLAGS: |
||||
v.ARFLAGS = ['-fo'] |
||||
|
||||
@conf |
||||
def owcc_common_flags(conf): |
||||
v = conf.env |
||||
|
||||
v.CC_SRC_F = '' |
||||
v.CXX_SRC_F = '' |
||||
v.CC_TGT_F = ['-c', '-o'] |
||||
v.CXX_TGT_F = ['-c', '-o'] |
||||
v.CPPPATH_ST = '-I%s' |
||||
v.DEFINES_ST = '-D%s' |
||||
|
||||
if not v.LINK_CC: |
||||
v.LINK_CC = v.CC |
||||
if not v.LINK_CXX: |
||||
v.LINK_CXX = v.CXX |
||||
|
||||
v.CCLNK_SRC_F = '' |
||||
v.CCLNK_TGT_F = ['-o'] |
||||
v.CXXLNK_SRC_F = '' |
||||
v.CXXLNK_TGT_F = ['-o'] |
||||
|
||||
v.LIB_ST = '-l%s' # template for adding libs |
||||
v.LIBPATH_ST = '-L%s' # template for adding libpaths |
||||
v.STLIB_ST = '-l%s' |
||||
v.STLIBPATH_ST = '-L%s' |
||||
|
||||
v.cprogram_PATTERN = '%s.exe' |
||||
v.cxxprogram_PATTERN = '%s.exe' |
||||
v.cshlib_PATTERN = 'lib%s.so' |
||||
v.cxxshlib_PATTERN = 'lib%s.so' |
||||
v.cstlib_PATTERN = '%s.a' |
||||
v.cxxstlib_PATTERN = '%s.a' |
||||
|
||||
def find_target(flags): |
||||
if '-b' in flags: |
||||
return flags[flags.index('-b')+1] |
||||
|
||||
@conf |
||||
def owcc_detect_platform(conf): |
||||
v = conf.env |
||||
target = find_target(v.LINKFLAGS) |
||||
if not target: |
||||
target = find_target(v.CC) |
||||
if not target: |
||||
target = find_target(v.CFLAGS) |
||||
if not target: |
||||
target = Utils.unversioned_sys_platform() |
||||
if target in ['dos4g', 'dos4gnz', 'dos32a', 'stub32a', 'stub32ac']: |
||||
v.DEST_BINFMT = 'le' |
||||
v.DEST_OS = 'dos' |
||||
elif target in ['dos32x', 'stub32x', 'stub32xc']: |
||||
v.DEST_BINFMT = 'lx' |
||||
v.DEST_OS = 'dos' |
||||
elif target.startswith('win') or target.startswith('nt'): |
||||
v.DEST_BINFMT = 'pe' |
||||
v.DEST_OS = 'win32' |
||||
elif target == 'qnx386': |
||||
v.DEST_OS = 'qnx' |
||||
v.DEST_BINFMT = 'qnx' |
||||
elif target in ['linux', '386']: |
||||
v.DEST_OS = 'linux' |
||||
v.DEST_BINFMT = 'elf' |
||||
else: |
||||
v.DEST_OS = target |
||||
v.DEST_BINFMT = None |
||||
|
||||
v.DEST_CPU = 'i386' |
||||
|
||||
for f in v.LINKFLAGS + v.CC + v.CFLAGS: |
||||
if f.startswith('-march'): |
||||
v.DEST_CPU=f.split('=')[1] |
||||
break |
||||
|
||||
|
||||
def configure(conf): |
||||
conf.find_owcc() |
||||
conf.owcc_common_flags() |
||||
conf.cc_load_tools() |
||||
conf.cc_add_flags() |
||||
conf.env.append_unique('CFLAGS','-Wc,-xx') |
||||
conf.cxx_load_tools() |
||||
conf.cxx_add_flags() |
||||
conf.env.append_unique('CXXFLAGS','-Wc,-xx') |
||||
conf.link_add_flags() |
||||
conf.owcc_detect_platform() |
@ -0,0 +1,45 @@
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python |
||||
# encoding: utf-8 |
||||
# Copyright (c) 2019 mittorn |
||||
|
||||
''' |
||||
Reconfigure |
||||
|
||||
Store/load configuration user input |
||||
|
||||
Usage: |
||||
def options(opt): |
||||
opt.load('reconfigure') |
||||
|
||||
def configure(conf): |
||||
conf.load('reconfigure') |
||||
|
||||
./waf configure --reconfigure |
||||
''' |
||||
|
||||
from waflib import Configure, Logs, Options, Utils, ConfigSet |
||||
import os |
||||
import optparse |
||||
|
||||
def options(opt): |
||||
opt.add_option('--rebuild-cache', dest='rebuild_cache', default=False, action='store_true', help='load previous configuration') |
||||
opt.add_option('--reconfigure', dest='reconfigure', default=False, action='store_true', help='load and update configuration') |
||||
|
||||
def configure(conf): |
||||
store_path = os.path.join(conf.bldnode.abspath(), 'configuration.py') |
||||
store_data = ConfigSet.ConfigSet() |
||||
options = vars(conf.options) |
||||
environ = conf.environ |
||||
if conf.options.reconfigure or conf.options.rebuild_cache: |
||||
store_data.load(store_path) |
||||
if conf.options.reconfigure: |
||||
for o in options: |
||||
if options[o]: store_data['OPTIONS'][o] = options[o] |
||||
store_data['ENVIRON'].update(environ) |
||||
store_data.store(store_path) |
||||
conf.environ = store_data['ENVIRON'] |
||||
conf.options = optparse.Values(store_data['OPTIONS']) |
||||
else: |
||||
store_data['OPTIONS'] = vars(conf.options) |
||||
store_data['ENVIRON'] = conf.environ |
||||
store_data.store(store_path) |
@ -0,0 +1,93 @@
@@ -0,0 +1,93 @@
|
||||
# encoding: utf-8 |
||||
# sdl2.py -- sdl2 waf plugin |
||||
# Copyright (C) 2018 a1batross |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
import os |
||||
|
||||
def options(opt): |
||||
grp = opt.add_option_group('SDL2 options') |
||||
grp.add_option('-s', '--sdl2', action='store', dest = 'SDL2_PATH', default = None, |
||||
help = 'path to precompiled SDL2 library(required for Windows)') |
||||
|
||||
grp.add_option('--skip-sdl2-sanity-check', action='store_false', default = True, dest='SDL2_SANITY_CHECK', |
||||
help = 'skip checking SDL2 sanity') |
||||
|
||||
def my_dirname(path): |
||||
# really dumb, will not work with /path/framework//, but still enough |
||||
if path[-1] == '/': |
||||
path = path[:-1] |
||||
return os.path.dirname(path) |
||||
|
||||
def sdl2_configure_path(conf, path): |
||||
conf.env.HAVE_SDL2 = 1 |
||||
if conf.env.DEST_OS == 'darwin': |
||||
conf.env.INCLUDES_SDL2 = [ |
||||
os.path.abspath(os.path.join(path, 'Headers')) |
||||
] |
||||
conf.env.FRAMEWORKPATH_SDL2 = [my_dirname(path)] |
||||
conf.env.FRAMEWORK_SDL2 = ['SDL2'] |
||||
else: |
||||
conf.env.INCLUDES_SDL2 = [ |
||||
os.path.abspath(os.path.join(path, 'include')), |
||||
os.path.abspath(os.path.join(path, 'include/SDL2')) |
||||
] |
||||
libpath = 'lib' |
||||
if conf.env.COMPILER_CC == 'msvc': |
||||
if conf.env.DEST_CPU == 'x86_64': |
||||
libpath = 'lib/x64' |
||||
else: |
||||
libpath = 'lib/' + conf.env.DEST_CPU |
||||
conf.env.LIBPATH_SDL2 = [os.path.abspath(os.path.join(path, libpath))] |
||||
conf.env.LIB_SDL2 = ['SDL2'] |
||||
|
||||
def configure(conf): |
||||
if conf.options.SDL2_PATH: |
||||
conf.start_msg('Configuring SDL2 by provided path') |
||||
sdl2_configure_path(conf, conf.options.SDL2_PATH) |
||||
conf.end_msg('yes: {0}, {1}, {2}'.format(conf.env.LIB_SDL2, conf.env.LIBPATH_SDL2, conf.env.INCLUDES_SDL2)) |
||||
else: |
||||
try: |
||||
conf.check_cfg( |
||||
path='sdl2-config', |
||||
args='--cflags --libs', |
||||
package='', |
||||
msg='Checking for library SDL2', |
||||
uselib_store='SDL2') |
||||
except conf.errors.ConfigurationError: |
||||
conf.env.HAVE_SDL2 = 0 |
||||
|
||||
if not conf.env.HAVE_SDL2 and conf.env.CONAN: |
||||
if not conf.env.SDL2_VERSION: |
||||
version = '2.0.10' |
||||
else: |
||||
version = conf.env.SDL2_VERSION |
||||
|
||||
conf.load('conan') |
||||
conf.add_conan_remote('bincrafters', 'https://api.bintray.com/conan/bincrafters/public-conan') |
||||
conf.add_dependency('sdl2/%s@bincrafters/stable' % version, options = { 'shared': 'True' } ) |
||||
|
||||
if conf.env.HAVE_SDL2 and conf.options.SDL2_SANITY_CHECK: |
||||
try: |
||||
conf.check_cc( |
||||
fragment=''' |
||||
#define SDL_MAIN_HANDLED |
||||
#include <SDL.h> |
||||
int main( void ) |
||||
{ |
||||
SDL_Init( SDL_INIT_EVERYTHING ); |
||||
return 0; |
||||
}''', |
||||
msg = 'Checking for library SDL2 sanity', |
||||
use = 'SDL2', |
||||
execute = False) |
||||
except conf.errors.ConfigurationError: |
||||
conf.env.HAVE_SDL2 = 0 |
@ -0,0 +1,78 @@
@@ -0,0 +1,78 @@
|
||||
#! /usr/bin/env python |
||||
# Modified: Alibek Omarov <a1ba.omarov@gmail.com> |
||||
# Originally taken from Thomas Nagy's blogpost |
||||
|
||||
""" |
||||
Strip executables upon installation |
||||
""" |
||||
|
||||
import shutil, os |
||||
from waflib import Build, Utils, Context, Errors, Logs |
||||
|
||||
def options(opt): |
||||
grp = opt.option_groups['install/uninstall options'] |
||||
grp.add_option('--strip', dest='strip', action='store_true', default=False, |
||||
help='strip binaries. You must pass this flag to install command [default: %default]') |
||||
|
||||
grp.add_option('--strip-to-file', dest='strip_to_file', action='store_true', default=False, |
||||
help='strip debug information to file *.debug. Implies --strip. You must pass this flag to install command [default: %default]') |
||||
|
||||
def configure(conf): |
||||
if conf.env.DEST_BINFMT in ['elf', 'mac-o']: |
||||
conf.find_program('strip', var='STRIP') |
||||
if not conf.env.STRIPFLAGS: |
||||
conf.env.STRIPFLAGS = os.environ['STRIPFLAGS'] if 'STRIPFLAGS' in os.environ else [] |
||||
|
||||
# a1ba: I am lazy to add `export OBJCOPY=` everywhere in my scripts |
||||
# so just try to deduce which objcopy we have |
||||
try: |
||||
k = conf.env.STRIP[0].rfind('-') |
||||
if k >= 0: |
||||
objcopy_name = conf.env.STRIP[0][:k] + '-objcopy' |
||||
try: conf.find_program(objcopy_name, var='OBJCOPY') |
||||
except: conf.find_program('objcopy', var='OBJCOPY') |
||||
else: |
||||
conf.find_program('objcopy', var='OBJCOPY') |
||||
except: |
||||
conf.find_program('objcopy', var='OBJCOPY') |
||||
|
||||
def copy_fun(self, src, tgt): |
||||
inst_copy_fun(self, src, tgt) |
||||
|
||||
if not self.generator.bld.options.strip and not self.generator.bld.options.strip_to_file: |
||||
return |
||||
|
||||
if self.env.DEST_BINFMT not in ['elf', 'mac-o']: # don't strip unknown formats or PE |
||||
return |
||||
|
||||
if getattr(self.generator, 'link_task', None) and self.generator.link_task.outputs[0] in self.inputs: |
||||
tgt_debug = tgt + '.debug' |
||||
strip_cmd = self.env.STRIP + self.env.STRIPFLAGS + [tgt] |
||||
ocopy_cmd = self.env.OBJCOPY + ['--only-keep-debug', tgt, tgt_debug] |
||||
ocopy_debuglink_cmd = self.env.OBJCOPY + ['--add-gnu-debuglink=%s' % tgt_debug, tgt] |
||||
c1 = Logs.colors.NORMAL |
||||
c2 = Logs.colors.CYAN |
||||
c3 = Logs.colors.YELLOW |
||||
c4 = Logs.colors.BLUE |
||||
try: |
||||
if self.generator.bld.options.strip_to_file: |
||||
self.generator.bld.cmd_and_log(ocopy_cmd, output=Context.BOTH, quiet=Context.BOTH) |
||||
if not self.generator.bld.progress_bar: |
||||
Logs.info('%s+ objcopy --only-keep-debug %s%s%s %s%s%s', c1, c4, tgt, c1, c3, tgt_debug, c1) |
||||
|
||||
self.generator.bld.cmd_and_log(strip_cmd, output=Context.BOTH, quiet=Context.BOTH) |
||||
if not self.generator.bld.progress_bar: |
||||
f1 = os.path.getsize(src) |
||||
f2 = os.path.getsize(tgt) |
||||
Logs.info('%s+ strip %s%s%s (%d bytes change)', c1, c2, tgt, c1, f2 - f1) |
||||
|
||||
if self.generator.bld.options.strip_to_file: |
||||
self.generator.bld.cmd_and_log(ocopy_debuglink_cmd, output=Context.BOTH, quiet=Context.BOTH) |
||||
if not self.generator.bld.progress_bar: |
||||
Logs.info('%s+ objcopy --add-gnu-debuglink=%s%s%s %s%s%s', c1, c3, tgt_debug, c1, c2, tgt, c1) |
||||
except Errors.WafError as e: |
||||
print(e.stdout, e.stderr) |
||||
|
||||
inst_copy_fun = Build.inst.copy_fun |
||||
Build.inst.copy_fun = copy_fun |
||||
|
@ -0,0 +1,174 @@
@@ -0,0 +1,174 @@
|
||||
#!/usr/bin/env python |
||||
# encoding: utf-8 |
||||
# Copyright (c) 2019 a1batross |
||||
|
||||
''' |
||||
Subproject tool |
||||
|
||||
Helps you have standalone environment for each subproject(subdirectory) |
||||
|
||||
Usage: |
||||
def options(opt): |
||||
opt.load('subproject') |
||||
|
||||
def configure(conf): |
||||
conf.add_subproject('folder1 folder2') |
||||
|
||||
def build(bld): |
||||
bld.add_subproject('folder1 folder2') |
||||
''' |
||||
|
||||
from waflib import Configure, Logs, Options, Utils |
||||
import os, sys |
||||
|
||||
def opt(f): |
||||
""" |
||||
Decorator: attach new option functions to :py:class:`waflib.Options.OptionsContext`. |
||||
|
||||
:param f: method to bind |
||||
:type f: function |
||||
""" |
||||
setattr(Options.OptionsContext, f.__name__, f) |
||||
return f |
||||
|
||||
def get_waifulib_by_path(path): |
||||
if not os.path.isabs(path): |
||||
path = os.path.abspath(path) |
||||
|
||||
waifulib = os.path.join(path, 'scripts', 'waifulib') |
||||
if os.path.isdir(waifulib): |
||||
return waifulib |
||||
return None |
||||
|
||||
def check_and_add_waifulib(path): |
||||
waifulib = get_waifulib_by_path(path) |
||||
|
||||
if waifulib: |
||||
sys.path.insert(0, waifulib) |
||||
|
||||
def remove_waifulib(path): |
||||
waifulib = get_waifulib_by_path(path) |
||||
|
||||
if waifulib: |
||||
sys.path.remove(waifulib) |
||||
|
||||
@opt |
||||
def add_subproject(ctx, names): |
||||
names_lst = Utils.to_list(names) |
||||
|
||||
for name in names_lst: |
||||
if not os.path.isabs(name): |
||||
# absolute paths only |
||||
wscript_dir = os.path.join(ctx.path.abspath(), name) |
||||
else: wscript_dir = name |
||||
|
||||
wscript_path = os.path.join(wscript_dir, 'wscript') |
||||
|
||||
if not os.path.isfile(wscript_path): |
||||
# HACKHACK: this way we get warning message right in the help |
||||
# so this just becomes more noticeable |
||||
ctx.add_option_group('Cannot find wscript in ' + wscript_path + '. You probably missed submodule update') |
||||
else: |
||||
check_and_add_waifulib(wscript_dir) |
||||
ctx.recurse(name) |
||||
remove_waifulib(wscript_dir) |
||||
|
||||
def options(opt): |
||||
grp = opt.add_option_group('Subproject options') |
||||
|
||||
grp.add_option('-S', '--skip-subprojects', action='store', dest = 'SKIP_SUBDIRS', default=None, |
||||
help = 'don\'t recurse into specified subprojects. Use only directory name.') |
||||
|
||||
def get_subproject_env(ctx, path, log=False): |
||||
# remove top dir path |
||||
path = str(path) |
||||
if path.startswith(ctx.top_dir): |
||||
if ctx.top_dir[-1] != os.pathsep: |
||||
path = path[len(ctx.top_dir) + 1:] |
||||
else: path = path[len(ctx.top_dir):] |
||||
|
||||
# iterate through possible subprojects names |
||||
folders = os.path.normpath(path).split(os.sep) |
||||
# print(folders) |
||||
for i in range(1, len(folders)+1): |
||||
name = folders[-i] |
||||
# print(name) |
||||
if name in ctx.all_envs: |
||||
if log: Logs.pprint('YELLOW', 'env: changed to %s' % name) |
||||
return ctx.all_envs[name] |
||||
if log: Logs.pprint('YELLOW', 'env: changed to default env') |
||||
raise IndexError('top env') |
||||
|
||||
@Configure.conf |
||||
def add_subproject(ctx, dirs, prepend = None): |
||||
''' |
||||
Recurse into subproject directory |
||||
|
||||
:param dirs: Directories we recurse into |
||||
:type dirs: array or string |
||||
:param prepend: Prepend virtual path, useful when managing projects with different environments |
||||
:type prepend: string |
||||
|
||||
''' |
||||
if isinstance(ctx, Configure.ConfigurationContext): |
||||
if not ctx.env.IGNORED_SUBDIRS and ctx.options.SKIP_SUBDIRS: |
||||
ctx.env.IGNORED_SUBDIRS = ctx.options.SKIP_SUBDIRS.split(',') |
||||
|
||||
for prj in Utils.to_list(dirs): |
||||
if ctx.env.SUBPROJECT_PATH: |
||||
subprj_path = list(ctx.env.SUBPROJECT_PATH) |
||||
else: |
||||
subprj_path = [] |
||||
|
||||
if prj in ctx.env.IGNORED_SUBDIRS: |
||||
ctx.msg(msg='--X %s' % '/'.join(subprj_path), result='ignored', color='YELLOW') |
||||
continue |
||||
|
||||
if prepend: |
||||
subprj_path.append(prepend) |
||||
|
||||
subprj_path.append(prj) |
||||
|
||||
saveenv = ctx.env |
||||
|
||||
ctx.setenv('_'.join(subprj_path), ctx.env) # derive new env from previous |
||||
|
||||
ctx.env.ENVNAME = prj |
||||
ctx.env.SUBPROJECT_PATH = list(subprj_path) |
||||
|
||||
ctx.msg(msg='--> %s' % '/'.join(subprj_path), result='in progress', color='BLUE') |
||||
check_and_add_waifulib(os.path.join(ctx.path.abspath(), prj)) |
||||
ctx.recurse(prj) |
||||
remove_waifulib(os.path.join(ctx.path.abspath(), prj)) |
||||
ctx.msg(msg='<-- %s' % '/'.join(subprj_path), result='done', color='BLUE') |
||||
|
||||
ctx.setenv('') # save env changes |
||||
|
||||
ctx.env = saveenv # but use previous |
||||
else: |
||||
if not ctx.all_envs: |
||||
ctx.load_envs() |
||||
|
||||
for prj in Utils.to_list(dirs): |
||||
if prj in ctx.env.IGNORED_SUBDIRS: |
||||
continue |
||||
|
||||
if ctx.env.SUBPROJECT_PATH: |
||||
subprj_path = list(ctx.env.SUBPROJECT_PATH) |
||||
else: |
||||
subprj_path = [] |
||||
|
||||
if prepend: |
||||
subprj_path.append(prepend) |
||||
|
||||
subprj_path.append(prj) |
||||
saveenv = ctx.env |
||||
try: |
||||
ctx.env = ctx.all_envs['_'.join(subprj_path)] |
||||
except: |
||||
ctx.fatal('Can\'t find env cache %s' % '_'.join(subprj_path)) |
||||
|
||||
check_and_add_waifulib(os.path.join(ctx.path.abspath(), prj)) |
||||
ctx.recurse(prj) |
||||
remove_waifulib(os.path.join(ctx.path.abspath(), prj)) |
||||
ctx.env = saveenv |
@ -1,115 +0,0 @@
@@ -1,115 +0,0 @@
|
||||
# encoding: utf-8 |
||||
# xshlib.py -- advanced linking utils |
||||
# Copyright (C) 2019 mittorn |
||||
# This program is free software: you can redistribute it and/or modify |
||||
# it under the terms of the GNU General Public License as published by |
||||
# the Free Software Foundation, either version 3 of the License, or |
||||
# (at your option) any later version. |
||||
# |
||||
# This program is distributed in the hope that it will be useful, |
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
# GNU General Public License for more details. |
||||
|
||||
from waflib import Logs, Utils, TaskGen, Task |
||||
from waflib.Tools import ccroot, c, cxx |
||||
|
||||
MAIN_BINARY = 'xash' |
||||
|
||||
def options(opt): |
||||
opt.add_option('--static-linking', action='store', dest='STATIC_LINKING', default=None) |
||||
|
||||
def configure(conf): |
||||
if conf.options.STATIC_LINKING: |
||||
conf.find_program('ld') |
||||
conf.find_program('objcopy') |
||||
conf.env.STATIC_LINKING = conf.options.STATIC_LINKING |
||||
conf.add_os_flags('LD_RELOCATABLE_FLAGS') |
||||
|
||||
def build(bld): |
||||
if bld.env.STATIC_LINKING: |
||||
apply_static(MAIN_BINARY,*bld.env.STATIC_LINKING.split(',')) |
||||
|
||||
class objcopy_relocatable_lib(Task.Task): |
||||
"remove all exports except of lib_${NAME}_exports" |
||||
no_errcheck_out = True |
||||
run_str = '${OBJCOPY} -G lib_${NAME}_exports ${SRC[0].abspath()} ${TGT[0].abspath()}' |
||||
def keyword(self): |
||||
return 'ObjCopy' |
||||
|
||||
class xshlib(ccroot.link_task): |
||||
"make relocatable library" |
||||
no_errcheck_out = True |
||||
run_str = '${LD} -r -o ${TGT[0].abspath()} ${LD_RELOCATABLE_FLAGS} ${CCLNK_SRC_F}${SRC}' |
||||
|
||||
def add_target(self, target): |
||||
"create objcopy task for target" |
||||
if not self.env.LD_RELOCATABLE_FLAGS: |
||||
self.env.LD_RELOCATABLE_FLAGS = [] |
||||
if '-m32' in self.env.LINKFLAGS: |
||||
self.env.LD_RELOCATABLE_FLAGS.append('-melf_i386') |
||||
|
||||
base = self.generator.path |
||||
target_unstripped = base.find_or_declare('%s.unstripped.o'% target) |
||||
target_stripped = base.find_or_declare('%s.o'% target) |
||||
|
||||
self.set_outputs(target_unstripped) |
||||
self.generator.objcopy_task= self.generator.create_task('objcopy_relocatable_lib', target_unstripped, target_stripped) |
||||
self.generator.objcopy_task.env['NAME'] = target |
||||
|
||||
class cprogram_static(c.cprogram): |
||||
"build static c program" |
||||
run_str = '${LINK_CC} -static ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${STLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' |
||||
|
||||
class cxxprogram_static(cxx.cxxprogram): |
||||
"build static cxx program" |
||||
run_str = '${LINK_CXX} -static ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${STLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' |
||||
|
||||
# usevars are same |
||||
ccroot.USELIB_VARS['cprogram_static'] = ccroot.USELIB_VARS['cxxprogram_static'] = ccroot.USELIB_VARS['cxxprogram'] |
||||
|
||||
def apply_static(main, *reloc): |
||||
"apply xshlib tasks and generate files" |
||||
|
||||
def write_libraries_list(out_node): |
||||
"generate library list" |
||||
|
||||
libraries = reloc |
||||
externs = '\n'.join(['extern table_t lib_%s_exports[];' % e for e in libraries]) |
||||
table = '\n'.join(['{ "%s", &lib_%s_exports },' % (e, e) for e in libraries]) |
||||
out_node.write('%s\nstruct {const char *name;void *func;} libs[] = {\n%s\n{0,0}\n};\n' % (externs, table )) |
||||
|
||||
|
||||
def write_export_list(name, in_node, out_node): |
||||
"generate exports list for library" |
||||
|
||||
exports = in_node.read().splitlines() |
||||
externs = '\n'.join(['extern void %s(void);' % e for e in exports]) |
||||
table = '\n'.join(['{ "%s", &%s },' % (e, e) for e in exports]) |
||||
out_node.write('%s\nstruct {const char *name;void *func;} lib_%s_exports[] = {\n%s\n{0,0}\n};\n' % (externs, name, table )) |
||||
|
||||
@TaskGen.feature('cshlib', 'cxxshlib') |
||||
@TaskGen.before('process_source', 'propogate_uselib_vars') |
||||
def apply_xshlib(self): |
||||
"apply xshlib feature and inject link_helper.c to sources" |
||||
if self.name in reloc: |
||||
for k in ('cshlib', 'cxxshlib'): |
||||
if k in self.features: |
||||
self.features.remove(k) |
||||
self.features.append('xshlib') |
||||
in_node = self.path.get_src().make_node('exports.txt') |
||||
bldnode = self.path.get_bld() |
||||
bldnode.mkdir() |
||||
out_node = bldnode.make_node('link_helper.c') |
||||
write_export_list(self.name,in_node, out_node) |
||||
self.source = Utils.to_list(self.source) + [out_node] |
||||
|
||||
@TaskGen.feature('cshlib', 'cxxshlib', 'cprogram', 'cxxprogram', 'cprogram_static', 'cxxprogram_static') |
||||
@TaskGen.before('process_source') |
||||
def add_deps(self): |
||||
"add all relocatable objects to main binary source list" |
||||
if self.name == main: |
||||
write_libraries_list(self.path.get_bld().make_node('generated_library_tables.h')) |
||||
|
||||
for t in reloc: |
||||
self.source += [self.bld.get_tgen_by_name(t).objcopy_task.outputs[0]] |
File diff suppressed because one or more lines are too long
Loading…
Reference in new issue