Compare commits

...

31 Commits
main ... dev

Author SHA1 Message Date
201eef96df [DEV] update header only library 2022-02-15 23:20:53 +01:00
c74c1b1805 [DEBUG] correct heritage 2022-02-15 23:47:11 +01:00
2c5642d5c8 [DEV] update model 2022-02-11 22:14:44 +01:00
6a6961fb3d [DEBUG] error in heriage 2022-02-08 23:02:38 +01:00
c31aad7e01 [DEV] cahnge config file 2022-02-06 23:20:50 +01:00
88834cc4d0 [DEV] upgrade to support multiple run test and chach at the end only (add suport of gnuc++ on GLB (bad hook) 2022-02-06 22:30:17 +01:00
1604b08af7 [DEV] correct selection of building mode 2022-01-16 23:48:21 +01:00
b4687319a2 [DEV] add capability to test multiple nodes 2022-01-16 23:40:03 +01:00
98a474d49c [DEV] remove dead idea 2022-01-16 22:35:16 +01:00
28cd4ef882 [DEBUG] add some correction 2022-01-15 00:33:58 +01:00
76b9b883ee [DEV] add some element 2022-01-14 22:24:58 +01:00
4db61cf9f2 [DEV] add a proto of nasm 2022-01-12 22:53:32 +01:00
64d016880e [DEV] update lutiun to support corectly multiple build model 2022-01-12 00:04:08 +01:00
9dc5218775 [DEV] update GLD parsing 2021-12-21 09:27:01 +01:00
08e50c35b3 [DEV] add generic flags for code quality 2021-11-17 21:53:36 +01:00
fced469b14 [DEV] update to GLD stupid repo compatibility 2021-11-15 22:47:04 +01:00
5555a74aa4 [DEV] update gld interpreatation 2021-11-10 23:50:26 +01:00
21eb62613d [DEV] continue interation of GLD ==> for cmake compatibility 2021-11-07 23:16:09 +01:00
e98d901fa4 [DEV] update the model 2021-10-28 23:56:18 +02:00
cdb88347ed [DEV] add GLD compatibility 2021-10-18 00:09:51 +02:00
3458fd1536 [VERSION] update dev tag version 2019-08-28 23:18:49 +02:00
608f14af02 [RELEASE] Release v2.7.1 2019-08-28 23:18:49 +02:00
2caa0eb792 [DEBUG] wrong include file 2019-08-28 23:18:30 +02:00
981eff73ea [VERSION] update dev tag version 2019-08-28 23:14:41 +02:00
f5ab931d42 [RELEASE] Release v2.7.0 2019-08-28 23:14:41 +02:00
3e5a35fa74 [DEBUG] correct the readme rst to md 2019-08-28 23:11:26 +02:00
58602345b5 [DEV] correct the wrong version number when install 2019-08-28 22:54:41 +02:00
843c1296a6 [VERSION] update dev tag version 2019-08-28 01:09:03 +02:00
f164407750 [RELEASE] Release v2.6.0 2019-08-28 01:09:03 +02:00
bc9ecbf0df [DEV] add version file 2019-08-28 00:31:29 +02:00
9ca3f693a2 [DEV] update version model 2019-08-28 00:23:32 +02:00
61 changed files with 4609 additions and 936 deletions

9
.gitignore vendored
View File

@ -1,9 +0,0 @@
# Compiled python modules.
*.pyc
# Setuptools distribution folder.
/dist/
/build/
# Python egg metadata, regenerated from source files by setuptools.
/*.egg-info

17
.project Normal file
View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>lutin</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.python.pydev.PyDevBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.python.pydev.pythonNature</nature>
</natures>
</projectDescription>

5
.pydevproject Normal file
View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?><pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python interpreter</pydev_property>
</pydev_project>

View File

@ -0,0 +1,62 @@
eclipse.preferences.version=1
encoding//bin/lutin=utf-8
encoding//lutin/__init__.py=utf-8
encoding//lutin/builder.py=utf-8
encoding//lutin/depend.py=utf-8
encoding//lutin/env.py=utf-8
encoding//lutin/heritage.py=utf-8
encoding//lutin/macro.py=utf-8
encoding//lutin/module.py=utf-8
encoding//lutin/moduleGLD.py=utf-8
encoding//lutin/multiprocess.py=utf-8
encoding//lutin/system.py=utf-8
encoding//lutin/target.py=utf-8
encoding//lutin/tools.py=utf-8
encoding//lutin/z_builder/__init__.py=utf-8
encoding//lutin/z_builder/lutinBuilder_binary.py=utf-8
encoding//lutin/z_builder/lutinBuilder_c++.py=utf-8
encoding//lutin/z_builder/lutinBuilder_c.py=utf-8
encoding//lutin/z_builder/lutinBuilder_jar.py=utf-8
encoding//lutin/z_builder/lutinBuilder_java.py=utf-8
encoding//lutin/z_builder/lutinBuilder_javah.py=utf-8
encoding//lutin/z_builder/lutinBuilder_libraryDynamic.py=utf-8
encoding//lutin/z_builder/lutinBuilder_libraryStatic.py=utf-8
encoding//lutin/z_builder/lutinBuilder_m.py=utf-8
encoding//lutin/z_builder/lutinBuilder_mm.py=utf-8
encoding//lutin/z_builder/lutinBuilder_nasm.py=utf-8
encoding//lutin/z_builder/lutinBuilder_s.py=utf-8
encoding//lutin/z_system/lutinSystem_Android_c.py=utf-8
encoding//lutin/z_system/lutinSystem_Android_cxx.py=utf-8
encoding//lutin/z_system/lutinSystem_Android_m.py=utf-8
encoding//lutin/z_system/lutinSystem_IOs_c.py=utf-8
encoding//lutin/z_system/lutinSystem_IOs_cxx.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_alsa.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_bsd.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_bz2.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_c.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_cxx.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_egl.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_gnutls.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_khr.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_m.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_mysql.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_opengl.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_pthread.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_python3-numpy.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_python3.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_rt.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_sodium.py=utf-8
encoding//lutin/z_system/lutinSystem_Linux_z.py=utf-8
encoding//lutin/z_system/lutinSystem_MacOs_Cocoa.py=utf-8
encoding//lutin/z_system/lutinSystem_Windows_ole.py=utf-8
encoding//lutin/z_system/lutinSystem_Windows_oleaut.py=utf-8
encoding//lutin/z_system/lutinSystem_Windows_psapi.py=utf-8
encoding//lutin/z_system/lutinSystem_Windows_shell.py=utf-8
encoding//lutin/z_system/lutinSystem_Windows_start-mode-gui.py=utf-8
encoding//lutin/z_target/lutinTarget_Android.py=utf-8
encoding//lutin/z_target/lutinTarget_Debian.py=utf-8
encoding//lutin/z_target/lutinTarget_IOs.py=utf-8
encoding//lutin/z_target/lutinTarget_Linux.py=utf-8
encoding//lutin/z_target/lutinTarget_MacOs.py=utf-8
encoding//lutin/z_target/lutinTarget_Windows.py=utf-8
encoding//lutin/zip.py=utf-8

View File

@ -1,2 +1,3 @@
include README.rst
include README.md
include bash-autocompletion/lutin
include version.txt

View File

@ -4,23 +4,7 @@ Lutin
`lutin` is a generic builder and package maker is a FREE software tool.
.. image:: https://badge.fury.io/py/lutin.png
:target: https://pypi.python.org/pypi/lutin
Release (master)
----------------
.. image:: https://travis-ci.org/HeeroYui/lutin.svg?branch=master
:target: https://travis-ci.org/HeeroYui/lutin
Developement (dev)
------------------
.. image:: https://travis-ci.org/HeeroYui/lutin.svg?branch=dev
:target: https://travis-ci.org/HeeroYui/lutin
[![Badge](https://badge.fury.io/py/lutin.png](https://pypi.python.org/pypi/lutin)
Instructions
------------
@ -51,21 +35,24 @@ Installation
Requirements: ``Python >= 2.7`` and ``pip``
Just run:
pip install lutin
```
pip install lutin
```
Install pip on debian/ubuntu:
sudo apt-get install pip
```
sudo apt-get install pip
```
Install pip on ARCH-linux:
sudo pacman -S pip
```
sudo pacman -S pip
```
Install pip on MacOs:
sudo easy_install pip
```
sudo easy_install pip
```
License (MPL v2.0)
---------------------

255
bin/lutin
View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python3
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
@ -12,6 +12,7 @@
import sys
import os
import copy
import json
from realog import debug as debug
import lutin
import death.Arguments as arguments
@ -28,6 +29,7 @@ import lutin.tools as lutinTools
myArgs = arguments.Arguments()
myArgs.add("h", "help", desc="Display this help")
myArgs.add("H", "HELP", desc="Display this help (with all compleate information)")
myArgs.add("", "version", desc="Display the application version")
myArgs.add_section("option", "Can be set one time in all case")
myArgs.add("v", "verbose", list=[["0","None"],["1","error"],["2","warning"],["3","info"],["4","debug"],["5","verbose"],["6","extreme_verbose"]], desc="display makefile debug level (verbose) default =2")
myArgs.add("C", "color", desc="Display makefile output in color")
@ -40,6 +42,7 @@ myArgs.add("o", "force-optimisation", desc="Force optimisation of the build")
myArgs.add("w", "warning", desc="Store warning in a file build file")
myArgs.add("i", "isolate-system", desc="Isolate system build (copy header of c and c++ system lib to not include unneeded external libs) EXPERIMENTAL (archlinux)")
myArgs.add("K", "ccache", desc="Enable the ccache interface")
myArgs.add("A", "async-fail", desc="Asynchronous fail of all the run execution, this permit to execute all test and report all fails")
myArgs.add_section("properties", "keep in the sequency of the cible")
myArgs.add("t", "target", haveParam=True, desc="Select a target (by default the platform is the computer that compile this) To know list : 'lutin.py --list-target'")
@ -61,27 +64,29 @@ localArgument = myArgs.parse()
display the help of this makefile
"""
def usage(full=False):
color = debug.get_color_set()
color = debug.get_color_set();
# generic argument displayed :
myArgs.display()
print(" All target can finish with '?clean' '?dump' '?gcov' ... ?action (@ can replace ?)" )
print(" " + color['green'] + "all" + color['default'])
print(" build all (only for the current selected board) (bynary and packages)")
print(" " + color['green'] + "clean" + color['default'])
print(" clean all (same as previous)")
print(" " + color['green'] + "dump" + color['default'])
print(" Dump all the module dependency and properties")
print(" " + color['green'] + "dependency" + color['default'])
print(" generate a file dependency.dot that represent all the dependency link")
print(" Select what in included: 'dependency:LPBDK'")
print(" L: Library")
print(" P: Pre-build")
print(" D: Data")
print(" B: Binary")
print(" K: Package")
print(" eg: lutin dependency:LD ; dot -Tsvg dependency.dot -o dependency.svg ; firefox dependency.svg")
print(" " + color['green'] + "gcov" + color['default'])
print(" Parse all the code of the library with the gcov resolution")
myArgs.display();
print(" All target can finish with '?clean' '?dump' '?gcov' ... ?action (@ can replace ?)" );
print(" " + color['green'] + "all" + color['default']);
print(" build all (only for the current selected board) (bynary and packages)");
print(" " + color['green'] + "clean" + color['default']);
print(" clean all (same as previous)");
print(" " + color['green'] + "dump" + color['default']);
print(" Dump all the module dependency and properties");
print(" " + color['green'] + "dependency" + color['default']);
print(" generate a file dependency.dot that represent all the dependency link");
print(" Select what in included: 'dependency:LPBDK'");
print(" L: Library");
print(" P: Pre-build");
print(" D: Data");
print(" B: Binary");
print(" K: Package");
print(" eg: lutin dependency:LD ; dot -Tsvg dependency.dot -o dependency.svg ; firefox dependency.svg");
print(" " + color['green'] + "gcov" + color['default']);
print(" Parse all the code of the library with the gcov resolution");
print(" " + color['green'] + "run" + color['default']);
print(" Execute the application in the module. add ':' to add parameter to set in the program parameters");
listOfAllModule = module.list_all_module_with_desc()
for mod in listOfAllModule:
data_print = " "
@ -128,7 +133,8 @@ def usage(full=False):
"""
print(data_print)
if mod["description"] != "":
if mod["description"] != None \
and mod["description"] != "":
print(" " + mod["description"])
if full == True:
if mod["type"] != None \
@ -170,8 +176,22 @@ def usage(full=False):
print(" ex complex arguments : " + sys.argv[0] + " -cclang -mdebug zeus-package-base?build?run%zeus-launcher:--srv=user:--elog-level=5")
print(" ex gcov: " + sys.argv[0] + " -cgcc --gcov -mdebug etk-test?build?run etk?gcov")
print(" ex gcov with output: " + sys.argv[0] + " -cgcc --gcov -mdebug etk-test?build?run etk?gcov:output")
print(" ex multiple test execution with end resume: " + sys.argv[0] + " -cgcc -mdebug *-test?build?run")
exit(0)
##
## @brief Display the version of this package.
##
def version():
color = debug.get_color_set()
import pkg_resources
print("version: " + str(pkg_resources.get_distribution('lutin').version))
foldername = os.path.dirname(__file__)
print("source folder is: " + foldername)
exit(0)
def check_boolean(value):
if value == "" \
or value == "1" \
@ -188,6 +208,10 @@ def parseGenericArg(argument, active):
if active == False:
usage()
return True
if argument.get_option_name() == "version":
if active == False:
version()
return True
if argument.get_option_name() == "HELP":
if active == False:
usage(True)
@ -205,22 +229,29 @@ def parseGenericArg(argument, active):
return True
if argument.get_option_name() == "list-target":
if active == False:
list_of_target = target.list_all_target()
retValue = ""
list_of_target = target.list_all_target();
retValue = "";
for targetName in list_of_target:
if retValue != "":
retValue += " "
retValue += targetName
print(retValue)
exit(0)
retValue += " ";
retValue += targetName;
print(retValue);
exit(0);
return True
elif argument.get_option_name()=="jobs":
if active == True:
multiprocess.set_core_number(int(argument.get_arg()))
multiprocess.set_core_number(int(argument.get_arg()));
return True
elif argument.get_option_name()=="depth":
if active == True:
env.set_parse_depth(int(argument.get_arg()))
env.set_parse_depth(int(argument.get_arg()));
return True
elif argument.get_option_name()=="async-fail":
if active == True:
if check_boolean(argument.get_arg()) == True:
env.set_async_fail(True);
else:
env.set_async_fail(False);
return True
elif argument.get_option_name()=="ccache":
if active == True:
@ -284,58 +315,74 @@ def parseGenericArg(argument, active):
return True
return False
"""
simple configuration in file ".lutin/config.json"
{
"exclude-path":[
"archive",
"sdk"
],
"parsing-depth": 3,
"jobs": 12,
"color: true,
"debug-level": 3,
"print-pretty": true,
"isolate-system": false,
"force-optimization": false
}
"""
# open configuration of lutin:
config_file_name = "lutinConfig.py"
config_file_name = ".lutin/config.json"
config_file = os.path.join(tools.get_run_path(), config_file_name)
if os.path.isfile(config_file) == True:
sys.path.append(os.path.dirname(config_file))
debug.debug("Find basic configuration file: '" + config_file + "'")
sys.path.append(os.path.dirname(config_file));
debug.debug("Find basic configuration file: '" + config_file + "'");
config_data_file = tools.file_read_data(config_file);
# the file exist, we can open it and get the initial configuration:
configuration_file = __import__(config_file_name[:-3])
config_data = json.loads(config_data_file);
if "get_exclude_path" in dir(configuration_file):
data = configuration_file.get_exclude_path()
if "exclude-path" in config_data.keys():
data = config_data["exclude-path"];
debug.debug(" get default config 'get_exclude_path' val='" + str(data) + "'")
env.set_exclude_search_path(data)
if "get_parsing_depth" in dir(configuration_file):
data = configuration_file.get_parsing_depth()
if "parsing-depth" in config_data.keys():
data = config_data["parsing-depth"];
debug.debug(" get default config 'get_parsing_depth' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("depth", str(data)), True)
if "get_ccache" in dir(configuration_file):
data = configuration_file.get_ccache()
if "ccache" in config_data.keys():
data = config_data["ccache"];
debug.debug(" get default config 'get_ccache' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("ccache", str(data)), True)
if "get_default_jobs" in dir(configuration_file):
data = configuration_file.get_default_jobs()
if "jobs" in config_data.keys():
data = config_data["jobs"];
debug.debug(" get default config 'get_default_jobs' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("jobs", str(data)), True)
if "get_default_color" in dir(configuration_file):
data = configuration_file.get_default_color()
if "color" in config_data.keys():
data = config_data["color"];
debug.debug(" get default config 'get_default_color' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("color", str(data)), True)
if "get_default_debug_level" in dir(configuration_file):
data = configuration_file.get_default_debug_level()
if "debug-level" in config_data.keys():
data = config_data["debug-level"];
debug.debug(" get default config 'get_default_debug_level' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("verbose", str(data)), True)
if "get_default_print_pretty" in dir(configuration_file):
data = configuration_file.get_default_print_pretty()
if "print-pretty" in config_data.keys():
data = config_data["print-pretty"];
debug.debug(" get default config 'get_default_print_pretty' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("pretty", str(data)), True)
if "get_default_force_optimisation" in dir(configuration_file):
data = configuration_file.get_default_force_optimisation()
if "force-optimization" in config_data.keys():
data = config_data["force-optimization"];
debug.debug(" get default config 'get_default_force_optimisation' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("force-optimisation", str(data)), True)
if "get_default_isolate_system" in dir(configuration_file):
data = configuration_file.get_default_isolate_system()
if "isolate-system" in config_data.keys():
data = config_data["isolate-system"];
debug.debug(" get default config 'get_default_isolate_system' val='" + str(data) + "'")
parseGenericArg(arg_element.ArgElement("isolate-system", str(data)), True)
@ -360,36 +407,37 @@ config = {
"gcov":False,
"compilator-version":""
}
elementErrors = [];
# load the default target :
my_target = None
actionDone=False
my_target = None;
actionDone=False;
# parse all argument
for argument in localArgument:
if parseGenericArg(argument, False) == True:
continue
continue;
elif argument.get_option_name() == "compilator-version":
config["compilator-version"] = argument.get_arg()
config["compilator-version"] = argument.get_arg();
elif argument.get_option_name() == "package":
config["generate-package"]=False
config["generate-package"]=False;
elif argument.get_option_name() == "simulation":
config["simulation"]=True
config["simulation"]=True;
elif argument.get_option_name() == "gcov":
config["gcov"]=True
config["gcov"]=True;
elif argument.get_option_name() == "bus":
config["bus-size"]=argument.get_arg()
config["bus-size"]=argument.get_arg();
elif argument.get_option_name() == "arch":
config["arch"]=argument.get_arg()
config["arch"]=argument.get_arg();
elif argument.get_option_name() == "compilator":
if config["compilator"] != argument.get_arg():
debug.debug("change compilator ==> " + argument.get_arg())
config["compilator"] = argument.get_arg()
debug.debug("change compilator ==> " + argument.get_arg());
config["compilator"] = argument.get_arg();
#remove previous target
my_target = None
my_target = None;
elif argument.get_option_name() == "target":
# No check input ==> this will be verify automaticly chen the target will be loaded
if targetName != argument.get_arg():
targetName = argument.get_arg()
debug.debug("change target ==> '" + targetName + "' & reset mode : gcc&release")
targetName = argument.get_arg();
debug.debug("change target ==> '" + targetName + "' & reset mode : gcc&release");
#reset properties by defauult:
config = {
"compilator":lutinHost.HOST_DEFAULT_COMPILATOR,
@ -400,53 +448,76 @@ for argument in localArgument:
"simulation":False,
"gcov":False,
"compilator-version":""
}
};
#remove previous target
my_target = None
my_target = None;
elif argument.get_option_name() == "mode":
if config["mode"] != argument.get_arg():
config["mode"] = argument.get_arg()
debug.debug("change mode ==> " + config["mode"])
config["mode"] = argument.get_arg();
debug.debug("change mode ==> " + config["mode"]);
#remove previous target
my_target = None
my_target = None;
else:
argument_value = argument.get_arg()
debug.debug("something request : '" + argument_value + "'")
argument_value = argument.get_arg();
debug.debug("something request : '" + argument_value + "'");
if argument.get_option_name() != "":
debug.warning("Can not understand argument : '" + argument.get_option_name() + "'")
usage()
debug.warning("Can not understand argument : '" + argument.get_option_name() + "'");
usage();
break;
name2 = argument_value.replace("@", "?")
gettedElement = name2.split("?")
module_name = gettedElement[0]
action_list = gettedElement[1:]
name2 = argument_value.replace("@", "?");
gettedElement = name2.split("?");
module_name = gettedElement[0];
action_list = gettedElement[1:];
if len(action_list) == 0:
action_list = "build"
debug.debug("requested: '" + module_name + "' ? actions:'" + str(action_list) + "'")
multiple_module_list = []
action_list = "build";
debug.debug("requested: '" + module_name + "' ? actions:'" + str(action_list) + "'");
multiple_module_list = [];
if module_name[-1] == "*":
base_name = module_name[:-1]
base_name = module_name[:-1];
for mod in module.list_all_module():
if mod[:len(base_name)] == base_name:
debug.verbose("need do it for: " + mod);
multiple_module_list.append(mod)
multiple_module_list.append(mod);
else:
multiple_module_list.append(module_name)
debug.debug("Will do: '" + str(multiple_module_list) + "' ? actions:'" + str(action_list) + "'")
multiple_module_list.append(module_name);
debug.debug("Will do: '" + str(multiple_module_list) + "' ? actions:'" + str(action_list) + "'");
for module_name in multiple_module_list:
#load the target if needed :
if my_target == None:
my_target = target.load_target(targetName, copy.deepcopy(config))
my_target.build(module_name, actions=action_list)
actionDone=True
my_target = target.load_target(targetName, copy.deepcopy(config));
heritage, is_build, error_nodes = my_target.build(module_name, actions=action_list);
if error_nodes != None:
for err in error_nodes:
elementErrors.append(err);
actionDone=True;
# if no action done : we do "all" ...
if actionDone==False:
#load the target if needed :
if my_target == None:
my_target = target.load_target(targetName, config)
my_target.build("all")
my_target = target.load_target(targetName, config);
heritage, is_build, error_nodes = my_target.build("all");
if error_nodes != None:
for err in error_nodes:
elementErrors.append(err);
if len(elementErrors) != 0:
have_error = False;
for elret in elementErrors:
out = " '" + str(elret["module"]) + "'";
if elret["bin"] != None:
out += " ==> bin name='" + str(elret["bin"]) + "'";
if len(elret["options"]) != 0:
out += " with option: " + str(elret["options"]);
if elret["return"] != 0:
debug.warning("[ FAIL ] " + out + " RETURN value: " + str(elret["return"]));
have_error = True;
else:
debug.info("[ OK ] " + out);
if have_error:
debug.error("Execution fail...");
# stop all started threads;
multiprocess.un_init()

20
cmake/CMakeLists.txt Normal file
View File

@ -0,0 +1,20 @@
cmake_minimum_required(VERSION 3.20)
project(GlobalSearch)
message("List of modules: ${CMAKE_MODULE_PATH}")
#LIST(APPEND CMAKE_MODULE_PATH "cmake")
#find_package(GLDBuilder REQUIRED)
include("cmake/GLDBuilder.cmake")
#GLD_import("./" "etk-core")
get_filename_component(LOCAL_FILE_PATH "." ABSOLUTE)
GLD_auto_prebuild_load_all("${LOCAL_FILE_PATH}")
GLD_auto_load_all("${LOCAL_FILE_PATH}")
GLD_instanciate()

944
cmake/GLDBuilder.cmake Normal file
View File

@ -0,0 +1,944 @@
cmake_minimum_required(VERSION 3.20)
include("cmake/GLDJson.cmake")
include("cmake/GLDTargetConfig.cmake")
include("cmake/GLDTools.cmake")
function(GLD_import_full_group NAME_GLD_MODULE MY_JSON_STRING ELEMENT_TO_CHECK TYPE_VARIABLE)
json_get_type(TYPE ${MY_JSON_STRING} ${ELEMENT_TO_CHECK})
#message("target type = ${TYPE}")
if (${TYPE} STREQUAL "OBJECT")
json_object_keys(LIST_KEY ${MY_JSON_STRING} ${ELEMENT_TO_CHECK})
foreach (III ${LIST_KEY})
# check the target, no need to had unknown target ...
if (${III} STREQUAL "*")
json_object_values(DATA_TARGET ${MY_JSON_STRING} ${ELEMENT_TO_CHECK} "*")
#message("target(*) data: ${DATA_TARGET}")
GLD_import_full(NAME_GLD_MODULE DATA_TARGET)
elseif (${III} STREQUAL ${TYPE_VARIABLE})
json_object_values(DATA_TARGET ${MY_JSON_STRING} ${ELEMENT_TO_CHECK} "${III}")
GLD_import_full(NAME_GLD_MODULE DATA_TARGET)
#message("target(${III}) data: ${DATA_TARGET}")
else()
message("TODO: get dependency manage '${ELEMENT_TO_CHECK}' : ${III}")
endif()
endforeach()
elseif(${TYPE} STREQUAL "NOTFOUND" OR ${TYPE} STREQUAL "NULL")
# nothing to do ..
else()
message("ERROR : '${ELEMENT_TO_CHECK}' can not be other than an json object : ${TYPE}")
endif()
endfunction()
function(GLD_import_element_dependency NAME_GLD_MODULE MY_JSON_STRING)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
##########################################################
## DEPENDENCY:
##########################################################
json_get_type(TYPE ${MY_JSON_STRING} "dependency")
set(LIST_VALUE "")
set(LIST_OPTIONAL_VALUE "")
#message("Dependency type = ${TYPE}")
if (${TYPE} STREQUAL "ARRAY")
json_size(SIZE ${MY_JSON_STRING} "dependency")
#message("Dependency SIZE = ${SIZE}")
if (SIZE GREATER 0)
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "dependency")
MATH(EXPR SIZE "${SIZE}-1")
set(VAR_OUT_TMP "")
foreach(IDX RANGE ${SIZE})
json_get_data(ELEMENT ${OBJECT_DATA} ${IDX})
json_get_type(TYPE ${OBJECT_DATA} ${IDX})
if (${TYPE} STREQUAL "STRING")
message(" - <dep> : ${ELEMENT}")
list(APPEND VAR_OUT_TMP ${ELEMENT})
elseif (${TYPE} STREQUAL "OBJECT")
json_get_type(TYPE ${ELEMENT} "name")
if (${TYPE} STREQUAL "STRING")
json_get_data(DEPENDENCY_NAME ${ELEMENT} "name")
json_get_type(TYPE ${ELEMENT} "optional")
#message("optional type = ${TYPE} <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ${DEPENDENCY_NAME}")
if (${TYPE} STREQUAL "BOOLEAN")
json_get_data(DEPENDENCY_OPTIONAL ${ELEMENT} "optional")
if (${DEPENDENCY_OPTIONAL})
message(" - <dep> : ${DEPENDENCY_NAME} (optional) ==> not managed now ...")
#message("optional value ==========================> '${DEPENDENCY_OPTIONAL}' ==> MAYBE")
list(APPEND LIST_OPTIONAL_VALUE ${DEPENDENCY_NAME})
else()
message(" - <dep> : ${DEPENDENCY_NAME}")
#message("optional value ==========================> '${DEPENDENCY_OPTIONAL}' ==> MUST")
list(APPEND VAR_OUT_TMP ${DEPENDENCY_NAME})
endif()
else()
message(" - <dep> : ${DEPENDENCY_NAME}")
list(APPEND VAR_OUT_TMP ${DEPENDENCY_NAME})
endif()
#message("optional type = ${TYPE} <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ${DEPENDENCY_NAME}")
else()
message("Dependency 'name' is not a string or is missing type: ${TYPE}")
endif()
else()
message("dependency element not manage data : ${ELEMENT}")
## TODO add in dependency if optional : check if the element exit in the current module list ...
endif()
endforeach()
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
endif()
elseif(${TYPE} STREQUAL "NOTFOUND")
return()
endif()
set(MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY ${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY} ${LIST_VALUE} CACHE INTERNAL "")
endfunction()
function(GLD_import_element_source NAME_GLD_MODULE MY_JSON_STRING)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
##########################################################
## SOURCE:
##########################################################
set(LIST_VALUE "")
json_get_type(TYPE ${MY_JSON_STRING} "source")
if (${TYPE} STREQUAL "STRING")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "source")
message(" - <src> : ${OBJECT_DATA}")
list(APPEND LIST_VALUE ${OBJECT_DATA})
elseif (${TYPE} STREQUAL "ARRAY")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "source")
json_size(SIZE ${MY_JSON_STRING} "source")
#message("Dependency SIZE = ${SIZE}")
if (SIZE GREATER 0)
MATH(EXPR SIZE "${SIZE}-1")
set(VAR_OUT_TMP "")
foreach(IDX RANGE ${SIZE})
json_get_data(ELEMENT ${OBJECT_DATA} ${IDX})
json_get_type(TYPE ${OBJECT_DATA} ${IDX})
if (${TYPE} STREQUAL "STRING")
message(" - <src> : ${ELEMENT}")
list(APPEND LIST_VALUE ${ELEMENT})
elseif (${TYPE} STREQUAL "OBJECT")
message(" - <src2> : ${ELEMENT}")
json_get_type(TYPE ${ELEMENT} "source")
json_get_data(ELEMENT_SOURCE ${ELEMENT} "source")
if (${TYPE} STREQUAL "STRING")
message(" - <src> : ${ELEMENT_SOURCE}")
list(APPEND LIST_VALUE ${ELEMENT_SOURCE})
#message("optional type = ${TYPE} <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ${DEPENDENCY_NAME}")
elseif (${TYPE} STREQUAL "ARRAY")
message(" - <src> : ${ELEMENT_SOURCE}")
list(APPEND LIST_VALUE ${ELEMENT_SOURCE})
else()
message("Dependency 'name' is not a string or is missing type: ${TYPE}")
endif()
# TODO: add the source specific flags or other things ...
else()
message("'source' element not manage data : ${ELEMENT}")
## TODO add in dependency if optional : check if the element exit in the current module list ...
endif()
endforeach()
endif()
elseif (${TYPE} STREQUAL "OBJECT")
# todo: manage object with source like { "c++":[...]...}
elseif(${TYPE} STREQUAL "NOTFOUND")
return()
else()
message(WARNING "Unmanaged type='${TYPE}' for 'source' node")
endif()
set(MODULE_MAP_${LOCAL_MODULE_NAME}_SOURCE ${MODULE_MAP_${LOCAL_MODULE_NAME}_SOURCE} ${LIST_VALUE} CACHE INTERNAL "")
endfunction()
function(GLD_import_element_header NAME_GLD_MODULE MY_JSON_STRING)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
##########################################################
## HEADER:
##########################################################
set(LIST_VALUE "")
json_get_type(TYPE ${MY_JSON_STRING} "header")
if (${TYPE} STREQUAL "STRING")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "header")
message(" - <header> : ${OBJECT_DATA}")
list(APPEND LIST_VALUE ${OBJECT_DATA})
elseif (${TYPE} STREQUAL "ARRAY")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "header")
json_size(SIZE ${MY_JSON_STRING} "header")
#message("Dependency SIZE = ${SIZE}")
if (SIZE GREATER 0)
MATH(EXPR SIZE "${SIZE}-1")
set(VAR_OUT_TMP "")
foreach(IDX RANGE ${SIZE})
json_get_data(ELEMENT ${OBJECT_DATA} ${IDX})
json_get_type(TYPE ${OBJECT_DATA} ${IDX})
if (${TYPE} STREQUAL "STRING")
message(" - <header> : ${ELEMENT}")
list(APPEND LIST_VALUE ${ELEMENT})
elseif (${TYPE} STREQUAL "OBJECT")
json_get_type(TYPE ${ELEMENT} "source")
if (${TYPE} STREQUAL "NOTFOUND")
json_get_type(TYPE ${ELEMENT} "path")
if (${TYPE} STREQUAL "STRING")
json_get_data(ELEMENT_PATH ${ELEMENT} "path")
json_get_data_or_default(ELEMENT_FILTER ${ELEMENT} "filter" "*")
json_get_data_or_default(ELEMENT_RECURSIVE ${ELEMENT} "path" OFF)
json_get_data_or_default(ELEMENT_TO ${ELEMENT} "to" "")
find_all_files(ALL_HEADER_FILES "${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}/${ELEMENT_PATH}" "${ELEMENT_FILTER}" 50)
#message("***********************************************************************")
#foreach(III ${ALL_HEADER_FILES})
# message(" ==> ${III}")
#endforeach()
#message("STATIC_PART = ${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}/${STATIC_PART}")
replace_base_path(ALL_HEADER_FILES_2 "${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}/${ELEMENT_PATH}" "${ALL_HEADER_FILES}")
#message("***********************************************************************")
set(ALL_HEADER_FILES "")
foreach(III ${ALL_HEADER_FILES_2})
#message(" ==> ${III}!${ELEMENT_PATH}:${ELEMENT_TO}")
list(APPEND ALL_HEADER_FILES "${III}!${ELEMENT_PATH}:${ELEMENT_TO}")
endforeach()
list(APPEND LIST_VALUE ${ALL_HEADER_FILES})
else()
message("Dependency 'path' is not a string or is missing type: ${TYPE} : STRING ...")
endif()
else()
if (${TYPE} STREQUAL "STRING")
json_get_data(ELEMENT_SOURCE ${ELEMENT} "source")
message(" - <header> : ${ELEMENT_SOURCE}")
list(APPEND LIST_VALUE ${ELEMENT_SOURCE})
#message("optional type = ${TYPE} <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ${DEPENDENCY_NAME}")
elseif (${TYPE} STREQUAL "ARRAY")
json_get_data(ELEMENT_SOURCE ${ELEMENT} "source")
message(" - <header> : ${ELEMENT_SOURCE}")
list(APPEND LIST_VALUE ${ELEMENT_SOURCE})
else()
message("Dependency 'source' is not a string or is missing type: ${TYPE} : STRING or ARRAY ...")
endif()
endif()
# TODO: add the source specific flags or other things ...
else()
message("'header' element not manage data : ${ELEMENT}")
## TODO add in dependency if optional : check if the element exit in the current module list ...
endif()
endforeach()
endif()
elseif (${TYPE} STREQUAL "OBJECT")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "header")
# todo: manage object with source like { "c++":[...]...}
elseif(${TYPE} STREQUAL "NOTFOUND")
return()
else()
message(WARNING "Unmanaged type='${TYPE}' for 'header' node")
endif()
set(MODULE_MAP_${LOCAL_MODULE_NAME}_HEADER ${MODULE_MAP_${LOCAL_MODULE_NAME}_HEADER} ${LIST_VALUE} CACHE INTERNAL "")
endfunction()
function(GLD_import_element_path NAME_GLD_MODULE MY_JSON_STRING)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
##########################################################
## PATH:
##########################################################
set(LIST_VALUE "")
json_get_type(TYPE ${MY_JSON_STRING} "path")
if (${TYPE} STREQUAL "STRING")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "path")
message(" - <header> : ${OBJECT_DATA}")
list(APPEND LIST_VALUE ${OBJECT_DATA})
elseif (${TYPE} STREQUAL "ARRAY")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "path")
json_size(SIZE ${MY_JSON_STRING} "path")
#message("Dependency SIZE = ${SIZE}")
MATH(EXPR SIZE "${SIZE}-1")
set(VAR_OUT_TMP "")
foreach(IDX RANGE ${SIZE})
json_get_data(ELEMENT ${OBJECT_DATA} ${IDX})
json_get_type(TYPE ${OBJECT_DATA} ${IDX})
if (${TYPE} STREQUAL "STRING")
message(" - <header> : ${ELEMENT}")
list(APPEND LIST_VALUE ${ELEMENT})
else()
message("'path' element not manage data : ${ELEMENT}")
## TODO add in dependency if optional : check if the element exit in the current module list ...
endif()
endforeach()
elseif (${TYPE} STREQUAL "OBJECT")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "path")
# todo: manage object with source like { "c++":[...]...}
elseif(${TYPE} STREQUAL "NOTFOUND")
return()
else()
message(WARNING "Unmanaged type='${TYPE}' for 'path' node")
endif()
set(MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_LOCAL ${MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_LOCAL} ${LIST_VALUE} CACHE INTERNAL "")
endfunction()
function(GLD_import_element_compilation_version NAME_GLD_MODULE MY_JSON_STRING)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
##########################################################
## COMPILATION-VERSION:
##########################################################
endfunction()
function(GLD_import_element_copy NAME_GLD_MODULE MY_JSON_STRING)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
##########################################################
## COPY:
##########################################################
endfunction()
##
## @brief Get the list of all dependency even if they are optional.
## @param[out] VAR_OUT list of dependency library
## @param[out] VAR_OPTIONAL_OUT list of optional dependency library
## @param[in] MY_JSON_STRING Json string
## @note This function is dependent of the target
##
function(GLD_import_full NAME_GLD_MODULE MY_JSON_STRING)
GLD_import_element_dependency(${NAME_GLD_MODULE} ${MY_JSON_STRING})
GLD_import_element_source(${NAME_GLD_MODULE} ${MY_JSON_STRING})
GLD_import_element_header(${NAME_GLD_MODULE} ${MY_JSON_STRING})
GLD_import_element_path(${NAME_GLD_MODULE} ${MY_JSON_STRING})
GLD_import_element_compilation_version(${NAME_GLD_MODULE} ${MY_JSON_STRING})
GLD_import_element_copy(${NAME_GLD_MODULE} ${MY_JSON_STRING})
GLD_import_full_group(${NAME_GLD_MODULE} ${MY_JSON_STRING} "target" ${GLD_TARGET})
GLD_import_full_group(${NAME_GLD_MODULE} ${MY_JSON_STRING} "mode" ${GLD_MODE})
GLD_import_full_group(${NAME_GLD_MODULE} ${MY_JSON_STRING} "arch" ${GLD_ARCH})
GLD_import_full_group(${NAME_GLD_MODULE} ${MY_JSON_STRING} "bus-size" ${GLD_BUS_SIZE})
GLD_import_full_group(${NAME_GLD_MODULE} ${MY_JSON_STRING} "compiler" ${GLD_COMPILER})
GLD_import_full_group(${NAME_GLD_MODULE} ${MY_JSON_STRING} "sanity-compilation" ${GLD_SANITY_MODE})
endfunction()
function(GLD_load_from_file_if_needed VAR_OUT LIBRARY_PATH ELEMENT)
#message("Check element: ${ELEMENT}")
if("${ELEMENT}" MATCHES "file://*")
#message("match file://")
string(REPLACE "file://" "" FILENAME ${ELEMENT})
#message(" ==> ${FILENAME}")
file (STRINGS "${LIBRARY_PATH}/${FILENAME}" DATA_READ)
set(${VAR_OUT} "${DATA_READ}" PARENT_SCOPE)
else()
set(${VAR_OUT} "${ELEMENT}" PARENT_SCOPE)
endif()
endfunction()
function(GLD_get_import_folder VAR_OUT NAME_GLD_MODULE)
set(LIST_OUT "")
set(${VAR_OUT} "${LIST_OUT}" PARENT_SCOPE)
endfunction()
function(GLD_get_import_folder VAR_OUT NAME_GLD_MODULE)
set(LIST_OUT "")
set(${VAR_OUT} "${LIST_OUT}" PARENT_SCOPE)
endfunction()
function(GLD_get_project_dependency VAR_OUT NAME_GLD_MODULE DEPENDENCY)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
set(LIST_OUT "")
if (DEPENDENCY)
foreach(III "${DEPENDENCY}")
GLD_get_module_name(TMP_MODULE_NAME ${III})
list(APPEND LIST_OUT ${III})
endforeach()
endif()
set(${VAR_OUT} "${LIST_OUT}" PARENT_SCOPE)
endfunction()
function(GLD_import NAME_GLD_MODULE)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
# set(MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER ${FOLDER} CACHE INTERNAL "")
# set(MODULE_MAP_${LOCAL_MODULE_NAME}_FILENAME ${FILENAME} CACHE INTERNAL "")
# set(MODULE_MAP_${LOCAL_MODULE_NAME}_NAME ${NAME_GLD_MODULE} CACHE INTERNAL "")
# set(MODULE_MAP_LIST ${MODULE_MAP_LIST} ${NAME_GLD_MODULE} CACHE INTERNAL "")
# Read the JSON file.
set(MY_JSON_STRING ${MODULE_MAP_${LOCAL_MODULE_NAME}_JSON})
# Loop through each element of the JSON array (indices 0 though 1).
json_get_element(LIBRARY_TYPE ${MY_JSON_STRING} "type")
json_get_element(LIBRARY_SUB_TYPE ${MY_JSON_STRING} "sub-type")
json_get_element(LIBRARY_GROUP_ID ${MY_JSON_STRING} "group-id")
json_get_element(LIBRARY_DECRIPTION ${MY_JSON_STRING} "description")
json_get_element(LIBRARY_LICENCE ${MY_JSON_STRING} "license")
json_get_element(LIBRARY_LICENCE_FILE ${MY_JSON_STRING} "license-file")
json_get_element(LIBRARY_MAINTAINER ${MY_JSON_STRING} "maintainer")
json_get_element(LIBRARY_AUTHORS ${MY_JSON_STRING} "author")
json_get_element(LIBRARY_VERSION ${MY_JSON_STRING} "version")
json_get_element(CODE_QUALITY ${MY_JSON_STRING} "code-quality")
message("LIBRARY : ${LIBRARY_GROUP_ID}:${NAME_GLD_MODULE}")
message("LIBRARY_TYPE : ${LIBRARY_TYPE} / ${LIBRARY_SUB_TYPE}")
message("LIBRARY_DECRIPTION : ${LIBRARY_DECRIPTION}")
message("LIBRARY_LICENCE : ${LIBRARY_LICENCE}")
if (LIBRARY_LICENCE_FILE)
message("LIBRARY_LICENCE_FILE : ${LIBRARY_LICENCE_FILE}")
#GLD_load_from_file_if_needed(LIBRARY_LICENCE_FILE "${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}" "${LIBRARY_LICENCE_FILE}")
#message(" ==> : ${LIBRARY_LICENCE_FILE}")
endif()
if (LIBRARY_MAINTAINER)
message("LIBRARY_MAINTAINER : ${LIBRARY_MAINTAINER}")
GLD_load_from_file_if_needed(LIBRARY_MAINTAINER "${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}" "${LIBRARY_MAINTAINER}")
message(" ==> : ${LIBRARY_MAINTAINER}")
endif()
if (LIBRARY_AUTHORS)
message("LIBRARY_AUTHORS : ${LIBRARY_AUTHORS}")
GLD_load_from_file_if_needed(LIBRARY_AUTHORS "${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}" "${LIBRARY_AUTHORS}")
message(" ==> : ${LIBRARY_AUTHORS}")
endif()
if (LIBRARY_VERSION)
message("LIBRARY_VERSION : ${LIBRARY_VERSION}")
GLD_load_from_file_if_needed(LIBRARY_VERSION "${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}" "${LIBRARY_VERSION}")
message(" ==> : ${LIBRARY_VERSION}")
# NOTE CMAKE does not support DEV_MODEL, then we use the lact ELEMENT for dev version (666)
string(REPLACE "-dev" ".666" LIBRARY_VERSION ${LIBRARY_VERSION})
endif()
#string(REPLACE "-" "_" LIBRARY_NAME222 ${NAME_GLD_MODULE})
set(LIBRARY_NAME222 ${NAME_GLD_MODULE})
if (LIBRARY_VERSION)
project(${LIBRARY_NAME222} VERSION ${LIBRARY_VERSION})
set(${LIBRARY_NAME222} PROPERTIES CPACK_PACKAGE_VERSION ${LIBRARY_VERSION})
else()
project(${LIBRARY_NAME222})
endif()
set(MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_LOCAL "" CACHE INTERNAL "")
# TODO : Remove if no element in header...
if (MODULE_MAP_${LOCAL_MODULE_NAME}_HEADER)
set(MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_PUBLIC "${GLD_GLOBAL_BUILD_FOLDER}${NAME_GLD_MODULE}/include/" CACHE INTERNAL "")
endif()
# remove if no library generated
set(MODULE_MAP_${LOCAL_MODULE_NAME}_LIB_PATH "${GLD_GLOBAL_STAGING_FOLDER}${NAME_GLD_MODULE}/lib/" CACHE INTERNAL "")
# remove if no doc ...
set(MODULE_MAP_${LOCAL_MODULE_NAME}_DOC_PATH "${GLD_GLOBAL_STAGING_FOLDER}${NAME_GLD_MODULE}/doc/" CACHE INTERNAL "")
set(MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY "" CACHE INTERNAL "")
set(MODULE_MAP_${LOCAL_MODULE_NAME}_SOURCE "" CACHE INTERNAL "")
set(MODULE_MAP_${LOCAL_MODULE_NAME}_HEADER "" CACHE INTERNAL "")
GLD_import_full(${NAME_GLD_MODULE} ${MY_JSON_STRING})
set(TMP_LIST "")
foreach(III ${MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_LOCAL})
get_filename_component(BASE_FOLDER ${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}/${III} ABSOLUTE)
list(APPEND TMP_LIST "${BASE_FOLDER}")
endforeach()
set(MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_LOCAL "${TMP_LIST}" CACHE INTERNAL "")
message(" _INCLUDE_LOCAL : ${MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_LOCAL}")
message(" _INCLUDE_PUBLIC: ${MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_PUBLIC}")
message(" _LIB_PATH : ${MODULE_MAP_${LOCAL_MODULE_NAME}_LIB_PATH}")
message(" _DOC_PATH : ${MODULE_MAP_${LOCAL_MODULE_NAME}_DOC_PATH}")
message(" _DEPENDENCY : ${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY}")
message(" _SOURCE : ${MODULE_MAP_${LOCAL_MODULE_NAME}_SOURCE}")
message(" _HEADER : ${MODULE_MAP_${LOCAL_MODULE_NAME}_HEADER}")
GLD_get_project_dependency(LIST_PROJECT_DEPENDENCY ${NAME_GLD_MODULE} "${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY}")
message("===> dep = ${LIST_PROJECT_DEPENDENCY}")
set(TMP_LIST "")
foreach(III ${MODULE_MAP_${LOCAL_MODULE_NAME}_SOURCE})
get_filename_component(BASE_FOLDER ${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}/${III} ABSOLUTE)
list(APPEND TMP_LIST "${BASE_FOLDER}")
endforeach()
set(HAS_DATA_TO_BUILD OFF)
if(TMP_LIST)
set(HAS_DATA_TO_BUILD ON)
add_library(${LIBRARY_NAME222}_OBJ OBJECT ${TMP_LIST})
# allow relocation code for shared library:
set_property(TARGET ${LIBRARY_NAME222}_OBJ PROPERTY POSITION_INDEPENDENT_CODE 1)
endif()
foreach(III ${MODULE_MAP_${LOCAL_MODULE_NAME}_HEADER})
copy_file_with_reference(${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER} ${III} ${MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_PUBLIC})
endforeach()
if(HAS_DATA_TO_BUILD)
set(TMP_LIST ${MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_LOCAL})
list(APPEND TMP_LIST ${MODULE_MAP_${LOCAL_MODULE_NAME}_INCLUDE_PUBLIC})
if(TMP_LIST)
target_include_directories(${LIBRARY_NAME222}_OBJ PUBLIC "${TMP_LIST}")
endif()
add_library(${LIBRARY_NAME222}_dynamic SHARED $<TARGET_OBJECTS:${LIBRARY_NAME222}_OBJ>)
add_library(${LIBRARY_NAME222}_static STATIC $<TARGET_OBJECTS:${LIBRARY_NAME222}_OBJ>)
if (LIST_PROJECT_DEPENDENCY)
foreach(III ${LIST_PROJECT_DEPENDENCY})
message(">>>>>>>> ${III}")
add_dependencies(${LIBRARY_NAME222}_dynamic "${III}_dynamic")
add_dependencies(${LIBRARY_NAME222}_static "${III}_static")
endforeach()
endif()
if ("${GLD_TARGET}" STREQUAL "Windows")
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES OUTPUT_NAME ${LIBRARY_NAME222})
# static will keep the element static at the end (the windows architecture fore shared object need to have a static library to access to the DLL ==> create a conflict!!!
else()
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES OUTPUT_NAME ${LIBRARY_NAME222})
set_target_properties(${LIBRARY_NAME222}_static PROPERTIES OUTPUT_NAME ${LIBRARY_NAME222})
endif()
if (LIBRARY_VERSION)
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES VERSION ${LIBRARY_VERSION})
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES SOVERSION ${LIBRARY_VERSION})
endif()
if (LIBRARY_DECRIPTION)
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES DESCRIPTION ${LIBRARY_DECRIPTION})
endif()
# install dynamic & static library
install(TARGETS ${LIBRARY_NAME222}_dynamic EXPORT ${LIBRARY_NAME222}Targets
RUNTIME DESTINATION ${MODULE_MAP_${LOCAL_MODULE_NAME}_LIB_PATH}
)
install(TARGETS ${LIBRARY_NAME222}_static
RUNTIME DESTINATION ${MODULE_MAP_${LOCAL_MODULE_NAME}_LIB_PATH})
#install(TARGETS ${LIBRARY_NAME222} EXPORT ${LIBRARY_NAME222}Targets
# LIBRARY DESTINATION lib
# ARCHIVE DESTINATION lib
# RUNTIME DESTINATION bin
# INCLUDES DESTINATION include
#)
# install exported headers
# this copy all the headers in a single folder:
#install(FILES ${EXPORT_HEADER_LIST} DESTINATION include)
# this keep the basic path for each folders:
set(BASE "${PROJECT_SOURCE_DIR}/install")
foreach(ITEM ${EXPORT_HEADER_LIST})
get_filename_component(ITEM_PATH ${ITEM} PATH)
string(REPLACE ${BASE} "" ITEM_PATH ${ITEM_PATH})
install(FILES ${ITEM}
DESTINATION "include/${ITEM_PATH}"
COMPONENT Devel)
endforeach()
include(CMakePackageConfigHelpers)
#write_basic_package_version_file(
# "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}ConfigVersion.cmake"
# VERSION ${LIBRARY_VERSION}
# COMPATIBILITY AnyNewerVersion
#)
#
#export(EXPORT ${LIBRARY_NAME222}Targets
# FILE "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}Targets.cmake"
# NAMESPACE Upstream::
#)
##configure_file(cmake/${LIBRARY_NAME222}Config.cmake
## "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}Config.cmake"
## COPYONLY
##)
set(CONFIG_PACKAGE_LOCATION cmake/${LIBRARY_NAME222})
install(EXPORT ${LIBRARY_NAME222}Targets
FILE
${LIBRARY_NAME222}Targets.cmake
NAMESPACE
${LIBRARY_NAME222}::
DESTINATION
${CONFIG_PACKAGE_LOCATION}
)
endif()
#install(
# FILES
# cmake/${LIBRARY_NAME222}Config.cmake
# "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}ConfigVersion.cmake"
# DESTINATION
# ${CONFIG_PACKAGE_LOCATION}
# COMPONENT
# Devel
#)
message("CMAKE_INSTALL_LIBDIR===${CMAKE_INSTALL_LIBDIR}")
endfunction()
function(sdfsqdfqsdfqrezesrdtygfhsg LIST_OF_MODULE_AVAILLABLE)
include(CMakePackageConfigHelpers)
configure_package_config_file(cmake/${LIBRARY_NAME222}Config.cmake.in
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}Config.cmake"
INSTALL_DESTINATION ${CONFIG_PACKAGE_LOCATION}
NO_SET_AND_CHECK_MACRO
NO_CHECK_REQUIRED_COMPONENTS_MACRO)
write_basic_package_version_file(
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}ConfigVersion.cmake"
VERSION ${LIBRARY_VERSION}
COMPATIBILITY SameMajorVersion)
install(
FILES
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}Config.cmake"
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}ConfigVersion.cmake"
DESTINATION ${CONFIG_PACKAGE_LOCATION}
COMPONENT Devel)
endfunction()
function(GLD_generate_cmake_wrapping LIST_OF_MODULE_AVAILLABLE)
message("Generate cmake wrapping")
foreach(NAME_GLD_MODULE ${LIST_OF_MODULE_AVAILLABLE})
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
message("Import: ${NAME_GLD_MODULE}")
#MODULE_MAP_LIST_DEPENDENCE_RESOLVED
GLD_import(${NAME_GLD_MODULE})
endforeach()
message("Generate cmake wrapping (DONE)")
endfunction()
function(GLD_get_full_dependency_group VAR_OUT VAR_OPTIONAL_OUT MY_JSON_STRING ELEMENT_TO_CHECK TYPE_VARIABLE)
set(LIST_VALUE "")
set(LIST_OPTIONAL_VALUE "")
json_get_type(TYPE ${MY_JSON_STRING} ${ELEMENT_TO_CHECK})
#message("target type = ${TYPE}")
if (${TYPE} STREQUAL "OBJECT")
json_object_keys(LIST_KEY ${MY_JSON_STRING} ${ELEMENT_TO_CHECK})
foreach (III ${LIST_KEY})
# check the target, no need to had unknown target ...
if (${III} STREQUAL "*")
json_object_values(DATA_TARGET ${MY_JSON_STRING} ${ELEMENT_TO_CHECK} "*")
#message("target(*) data: ${DATA_TARGET}")
GLD_get_full_dependency(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP DATA_TARGET)
list(APPEND LIST_VALUE ${DATA_TARGET})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
elseif (${III} STREQUAL ${TYPE_VARIABLE})
json_object_values(DATA_TARGET ${MY_JSON_STRING} ${ELEMENT_TO_CHECK} "${III}")
GLD_get_full_dependency(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP DATA_TARGET)
#message("target(${III}) data: ${DATA_TARGET}")
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
else()
message("TODO: get dependency manage '${ELEMENT_TO_CHECK}' : ${III}")
endif()
endforeach()
elseif(${TYPE} STREQUAL "NOTFOUND" OR ${TYPE} STREQUAL "NULL")
# nothing to do ..
else()
message("ERROR : '${ELEMENT_TO_CHECK}' can not be other than an json object : ${TYPE}")
endif()
set(${VAR_OUT} ${LIST_VALUE} PARENT_SCOPE)
set(${VAR_OPTIONAL_OUT} ${LIST_OPTIONAL_VALUE} PARENT_SCOPE)
endfunction()
##
## @brief Get the list of all dependency even if they are optional.
## @param[out] VAR_OUT list of dependency library
## @param[out] VAR_OPTIONAL_OUT list of optional dependency library
## @param[in] MY_JSON_STRING Json string
## @note This function is dependent of the target
##
function(GLD_get_full_dependency VAR_OUT VAR_OPTIONAL_OUT MY_JSON_STRING)
json_get_type(TYPE ${MY_JSON_STRING} "dependency")
set(LIST_VALUE "")
set(LIST_OPTIONAL_VALUE "")
#message("Dependency type = ${TYPE}")
if (${TYPE} STREQUAL "ARRAY")
json_size(SIZE ${MY_JSON_STRING} "dependency")
#message("Dependency SIZE = ${SIZE}")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} "dependency")
MATH(EXPR SIZE "${SIZE}-1")
set(VAR_OUT_TMP "")
foreach(IDX RANGE ${SIZE})
json_get_data(ELEMENT ${OBJECT_DATA} ${IDX})
json_get_type(TYPE ${OBJECT_DATA} ${IDX})
if (${TYPE} STREQUAL "STRING")
message(" - : ${ELEMENT}")
list(APPEND VAR_OUT_TMP ${ELEMENT})
elseif (${TYPE} STREQUAL "OBJECT")
json_get_type(TYPE ${ELEMENT} "name")
if (${TYPE} STREQUAL "STRING")
json_get_data(DEPENDENCY_NAME ${ELEMENT} "name")
json_get_type(TYPE ${ELEMENT} "optional")
#message("optional type = ${TYPE} <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ${DEPENDENCY_NAME}")
if (${TYPE} STREQUAL "BOOLEAN")
json_get_data(DEPENDENCY_OPTIONAL ${ELEMENT} "optional")
if (${DEPENDENCY_OPTIONAL})
message(" - : ${DEPENDENCY_NAME} (optional)")
#message("optional value ==========================> '${DEPENDENCY_OPTIONAL}' ==> MAYBE")
list(APPEND LIST_OPTIONAL_VALUE ${DEPENDENCY_NAME})
else()
message(" - : ${DEPENDENCY_NAME}")
#message("optional value ==========================> '${DEPENDENCY_OPTIONAL}' ==> MUST")
list(APPEND VAR_OUT_TMP ${DEPENDENCY_NAME})
endif()
else()
message(" - : ${DEPENDENCY_NAME}")
list(APPEND VAR_OUT_TMP ${DEPENDENCY_NAME})
endif()
#message("optional type = ${TYPE} <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ${DEPENDENCY_NAME}")
else()
message("Dependency 'name' is not a string or is missing type: ${TYPE}")
endif()
else()
message("dependency element not manage data : ${ELEMENT}")
## TODO add in dependency if optional : check if the element exit in the current module list ...
endif()
endforeach()
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
endif()
GLD_get_full_dependency_group(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP MY_JSON_STRING "target" ${GLD_TARGET})
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
GLD_get_full_dependency_group(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP MY_JSON_STRING "mode" ${GLD_MODE})
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
GLD_get_full_dependency_group(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP MY_JSON_STRING "arch" ${GLD_ARCH})
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
GLD_get_full_dependency_group(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP MY_JSON_STRING "bus-size" ${GLD_BUS_SIZE})
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
GLD_get_full_dependency_group(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP MY_JSON_STRING "compiler" ${GLD_COMPILER})
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
GLD_get_full_dependency_group(VAR_OUT_TMP LIST_OPTIONAL_VALUE_TMP MY_JSON_STRING "sanity-compilation" ${GLD_SANITY_MODE})
list(APPEND LIST_VALUE ${VAR_OUT_TMP})
list(APPEND LIST_OPTIONAL_VALUE ${LIST_OPTIONAL_VALUE_TMP})
set(${VAR_OUT} ${LIST_VALUE} PARENT_SCOPE)
set(${VAR_OPTIONAL_OUT} ${LIST_OPTIONAL_VALUE} PARENT_SCOPE)
endfunction()
function(GLD_read_json_file VAR_OUT JSON_FILE)
file(READ ${JSON_FILE} MY_JSON_STRING)
if("${MY_JSON_STRING}" STREQUAL "")
message(WARNING "Empty json file : '${JSON_FILE}'")
else()
string(REPLACE " " "" MY_JSON_STRING ${MY_JSON_STRING})
string(REPLACE "\t" "" MY_JSON_STRING ${MY_JSON_STRING})
string(REPLACE "\n" "" MY_JSON_STRING ${MY_JSON_STRING})
endif()
set(${VAR_OUT} ${MY_JSON_STRING} PARENT_SCOPE)
endfunction()
set(MODULE_MAP_LIST "" CACHE INTERNAL "")
set(MODULE_MAP_LIST_DEPENDENCE_RESOLVED "" CACHE INTERNAL "")
function(GLD_get_module_name VAR_OUT BASE_NAME)
string(REPLACE "_" "_U_" TMP ${BASE_NAME})
string(REPLACE "." "_D_" TMP ${BASE_NAME})
string(REPLACE "-" "_S_" TMP ${TMP})
set(${VAR_OUT} ${TMP} PARENT_SCOPE)
endfunction()
function(GLD_add_module NAME_GLD_MODULE FOLDER FILENAME)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
message("Add module: ${LOCAL_MODULE_NAME} ==> ${NAME_GLD_MODULE} in ${FILENAME}")
# load all the json data:
GLD_read_json_file(JSON_DATA "${FOLDER}/${FILENAME}.json")
if("${JSON_DATA}" STREQUAL "")
message(WARNING "SKIP library: ${NAME_GLD_MODULE}")
else()
set(MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER ${FOLDER} CACHE INTERNAL "")
set(MODULE_MAP_${LOCAL_MODULE_NAME}_FILENAME ${FILENAME} CACHE INTERNAL "")
set(MODULE_MAP_${LOCAL_MODULE_NAME}_NAME ${NAME_GLD_MODULE} CACHE INTERNAL "")
set(MODULE_MAP_LIST ${MODULE_MAP_LIST} ${NAME_GLD_MODULE} CACHE INTERNAL "")
set(MODULE_MAP_${LOCAL_MODULE_NAME}_JSON "${JSON_DATA}" CACHE INTERNAL "")
endif()
endfunction()
function(GLD_generate_module_without_optionnal_inexistant NAME_GLD_MODULE)
GLD_get_module_name(LOCAL_MODULE_NAME ${NAME_GLD_MODULE})
#message("Call : GLD_get_full_dependency(outA, outB, ${MODULE_MAP_${LOCAL_MODULE_NAME}_JSON}) ${NAME_GLD_MODULE} ==> ${LOCAL_MODULE_NAME}")
GLD_get_full_dependency(DEPENDENCY DEPENDENCY_OPTIONAL ${MODULE_MAP_${LOCAL_MODULE_NAME}_JSON})
set(MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY ${DEPENDENCY} CACHE INTERNAL "")
set(MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY_OPTIONAL ${DEPENDENCY_OPTIONAL} CACHE INTERNAL "")
endfunction()
function(GLD_is_in_list VAR_OUT ELEM_TO_CHECK LIST_TO_CHECK)
set(ELEMENT_FIND "true")
#message(" verify '${ELEM_TO_CHECK}' in '${LIST_TO_CHECK}'")
foreach(ELEM ${LIST_TO_CHECK})
if ("${ELEM_TO_CHECK}" STREQUAL "${ELEM}")
set(${VAR_OUT} "true" PARENT_SCOPE)
#message(" ==> true")
return()
endif()
endforeach()
set(${VAR_OUT} "false" PARENT_SCOPE)
#message(" ==> false")
endfunction()
function(GLD_are_in_list VAR_OUT LIST_VALUES LIST_TO_CHECK)
set(ELEMENT_FIND "true")
#message(" verify '${LIST_VALUES}' are in '${LIST_TO_CHECK}'")
foreach(ELEM ${LIST_VALUES})
GLD_is_in_list(EXIST "${ELEM}" "${LIST_TO_CHECK}")
if (${EXIST} STREQUAL "false")
set(${VAR_OUT} "false" PARENT_SCOPE)
#message(" =>> false")
return()
endif()
endforeach()
set(${VAR_OUT} "true" PARENT_SCOPE)
#message(" =>> true")
endfunction()
## todo: REMOVE OPTIONNAL DEPENDENCY THAT DOES NOT EXIST IN THE LIST
## TODO: display and does not include element that dependency are not resolved and indicate which dependency is not found ...
function(GLD_order_dependency_list VAR_OUT DEPENDENCY_FAILED)
set(TMP_ORDERED "")####### crypto;edtaa3;luaWrapper;freetype;")
set(TMP_UN_ADDED "")
message("===========================================")
message("== STEP 1 : Add all module without dependency:")
message("===========================================")
# step 1 Add all module without dependency:
foreach(MODULE_ELEM ${MODULE_MAP_LIST})
message("check add element : ${MODULE_ELEM}")
message(" dependency = '${MODULE_ELEM}'")
GLD_get_module_name(LOCAL_MODULE_NAME ${MODULE_ELEM})
message(" dependency = '${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY}'")
# todo check dependency here ... ${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY}
list(LENGTH MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY LIST_LENGTH)
message(" ==> length'${LIST_LENGTH}'")
if (LIST_LENGTH EQUAL 0)
message("Add element ${III} (0 dependency) ******** ")
list(APPEND TMP_ORDERED ${MODULE_ELEM})
continue()
endif()
list(APPEND TMP_UN_ADDED ${MODULE_ELEM})
endforeach()
message("result: ${TMP_ORDERED}")
message("===========================================")
message("== STEP 2 : Add all when the dependency are available in the list:")
message("===========================================")
# step 2 Add all when the dependency are available in the list:
list(LENGTH TMP_UN_ADDED LIST_TOTAL_LENGTH)
message("unadded : ${LIST_TOTAL_LENGTH}")
# must be resolved in the number of cycle in the list (maximum)
foreach(III RANGE ${LIST_TOTAL_LENGTH})
message("cycle : ${III}")
set(TMP_UN_ADDED_TMP ${TMP_UN_ADDED})
set(TMP_UN_ADDED "")
foreach(ELEM_TO_ADD ${TMP_UN_ADDED_TMP})
message(" check to add : ${ELEM_TO_ADD}")
GLD_get_module_name(LOCAL_MODULE_NAME ${ELEM_TO_ADD})
message(" dependency : ${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY} in ? ${TMP_ORDERED}")
GLD_are_in_list(IS_ALL_PRESENT "${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY}" "${TMP_ORDERED}")
if (${IS_ALL_PRESENT} STREQUAL "true")
message("Add element ${ELEM_TO_ADD} (depend: ${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY})")
list(APPEND TMP_ORDERED ${ELEM_TO_ADD})
else()
list(APPEND TMP_UN_ADDED ${ELEM_TO_ADD})
endif()
endforeach()
endforeach()
message("result:")
foreach(ELEM ${TMP_ORDERED})
message(" - ${ELEM}")
endforeach()
message("===========================================")
message("== STEP 3 : All must be added before...")
message("===========================================")
# step 3 All must be added before...
list(LENGTH TMP_UN_ADDED LIST_TOTAL_LENGTH)
if (${LIST_TOTAL_LENGTH} GREATER_EQUAL 0)
message(WARNING "Some element are not added: (${LIST_TOTAL_LENGTH})")
foreach(ELEM ${TMP_UN_ADDED})
message(" - ${ELEM}")
GLD_get_module_name(LOCAL_MODULE_NAME ${ELEM})
message(" dep : ${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY}")
message(" dep(optional): ${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY_OPTIONAL}")
endforeach()
endif()
set(MODULE_MAP_LIST_DEPENDENCE_RESOLVED ${TMP_ORDERED} CACHE INTERNAL "")
set(${VAR_OUT} ${TMP_ORDERED} PARENT_SCOPE)
set(${DEPENDENCY_FAILED} ${TMP_UN_ADDED} PARENT_SCOPE)
endfunction()
function(GLD_load_all ROOT_FOLDER BASE_NAME COMMENT_ACTION)
message("Parse all files ${BASE_NAME}*.json: base: ${ROOT_FOLDER}")
#file(GLOB_RECURSE GLD_FILES "${ROOT_FOLDER}/GLD_*.json")
find_all_files_exeption(GLD_FILES "${ROOT_FOLDER}" "${BASE_NAME}*.json" 5)
message("List of GLD files:")
foreach(III ${GLD_FILES})
GET_FILENAME_COMPONENT(FILENAME ${III} NAME_WE)
set(FULL_FILENAME ${FILENAME})
string(REPLACE "${BASE_NAME}" "" FILENAME ${FILENAME})
GET_FILENAME_COMPONENT(FOLDER ${III} DIRECTORY)
message(" - ${COMMENT_ACTION} ${FOLDER} ==> ${FILENAME}")
GLD_add_module(${FILENAME} ${FOLDER} ${FULL_FILENAME})
endforeach()
#GLD_import("./" "etk-core")
endfunction()
function(GLD_auto_prebuild_load_all ROOT_FOLDER)
GLD_load_all(${ROOT_FOLDER} "GLDPrebuild_${GLD_TARGET}_" "(prebuild)")
endfunction()
function(GLD_auto_load_all ROOT_FOLDER)
GLD_load_all(${ROOT_FOLDER} "GLD_" "")
endfunction()
function(GLD_instanciate)
message("List of modules:")
foreach(III ${MODULE_MAP_LIST})
GLD_get_module_name(LOCAL_MODULE_NAME ${III})
message(" - ${III}")
endforeach()
foreach(III ${MODULE_MAP_LIST})
GLD_get_module_name(LOCAL_MODULE_NAME ${III})
GLD_generate_module_without_optionnal_inexistant(${III})
message(" - ${III}")
message(" FOLDER=${MODULE_MAP_${LOCAL_MODULE_NAME}_FOLDER}")
#message(" JSON=${MODULE_MAP_${LOCAL_MODULE_NAME}_JSON}")
message(" DEPENDENCY=${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY}")
message(" DEPENDENCY_OPTIONAL=${MODULE_MAP_${LOCAL_MODULE_NAME}_DEPENDENCY_OPTIONAL}")
endforeach()
GLD_order_dependency_list(DEPENDENCY_ORDER DEPENDENCY_FAILED)
GLD_generate_cmake_wrapping("${DEPENDENCY_ORDER}")
#message("dependency resolver & ordered:")
#foreach(III ${DEPENDENCY_ORDER})
# message(" - ${III}")
#endforeach()
#message("dependency fail:")
#foreach(III ${DEPENDENCY_FAILED})
# message(" - ${III}")
#endforeach()
endfunction()

106
cmake/GLDJson.cmake Normal file
View File

@ -0,0 +1,106 @@
cmake_minimum_required(VERSION 3.20)
##
## @brief get the type of a variable
## @param[in] INPUT_JSON Json data.
## @param[in] VARIABLE Name of the variable.
## @param[out] OUT_VAR Retrun type of the node: NULL, NUMBER, STRING, BOOLEAN, ARRAY, OBJECT or NOTFOUND (if does not exist)
##
function(json_get_type OUT_VAR INPUT_JSON VARIABLE)
string(JSON VALUE ERROR_VARIABLE ${VARIABLE} TYPE ${INPUT_JSON} ${VARIABLE})
if (${VALUE} STREQUAL ${VARIABLE}-NOTFOUND)
set(${OUT_VAR} "NOTFOUND" PARENT_SCOPE)
else()
set(${OUT_VAR} "${VALUE}" PARENT_SCOPE)
endif()
endfunction()
function(json_size OUT_VAR INPUT_JSON VARIABLE)
string(JSON VALUE ERROR_VARIABLE ${VARIABLE} LENGTH ${INPUT_JSON} ${VARIABLE})
if (${VALUE} STREQUAL ${VARIABLE}-NOTFOUND)
set("${OUT_VAR}" 0 PARENT_SCOPE)
else ()
set("${OUT_VAR}" ${VALUE} PARENT_SCOPE)
endif()
endfunction()
function(json_get_data OUT_VAR INPUT_JSON VARIABLE)
string(JSON VALUE GET ${INPUT_JSON} ${VARIABLE})
set("${OUT_VAR}" ${VALUE} PARENT_SCOPE)
endfunction()
function(json_get_data_or_default OUT_VAR INPUT_JSON VARIABLE DEFAULT_VALUE)
json_get_type(TYPE ${INPUT_JSON} ${VARIABLE})
if (${TYPE} STREQUAL "NOTFOUND")
set("${OUT_VAR}" ${DEFAULT_VALUE} PARENT_SCOPE)
else()
string(JSON VALUE GET ${INPUT_JSON} ${VARIABLE})
set("${OUT_VAR}" ${VALUE} PARENT_SCOPE)
endif()
endfunction()
function(json_object_key OUT_VAR INPUT_JSON IDX)
string(JSON VALUE MEMBER ${INPUT_JSON} ${IDX})
set("${OUT_VAR}" ${VALUE} PARENT_SCOPE)
endfunction()
function(json_object_keys OUT_VAR MY_JSON_STRING VARIABLE)
json_size(SIZE ${MY_JSON_STRING} ${VARIABLE})
#message("target SIZE = ${SIZE}")
json_get_data(OBJECT_DATA ${MY_JSON_STRING} ${VARIABLE})
if (SIZE EQUAL 0)
return()
endif()
MATH(EXPR SIZE "${SIZE}-1")
set(OUT "")
foreach(IDX RANGE ${SIZE})
json_object_key(ELEMENT ${OBJECT_DATA} ${IDX})
#message(" - : ${ELEMENT}")
list(APPEND OUT ${ELEMENT})
endforeach()
set("${OUT_VAR}" ${OUT} PARENT_SCOPE)
endfunction()
function(json_object_values OUT_VAR INPUT_JSON VARIABLE KEYS)
string(JSON VALUE GET ${INPUT_JSON} ${VARIABLE} ${KEYS})
set("${OUT_VAR}" ${VALUE} PARENT_SCOPE)
endfunction()
function(json_get_list OUT_VAR INPUT_JSON VARIABLE)
string(JSON LIST_JSON_ELEMENTS ERROR_VARIABLE ${VARIABLE} GET ${INPUT_JSON} ${VARIABLE})
if (${LIST_JSON_ELEMENTS} STREQUAL ${VARIABLE}-NOTFOUND)
set("${OUT_VAR}" PARENT_SCOPE)
retrun()
endif()
#message("LIST_JSON_ELEMENTS : ${LIST_JSON_ELEMENTS}")
string(JSON LENGTH_VALUE LENGTH ${LIST_JSON_ELEMENTS})
#message("LENGTH_VALUE : ${LENGTH_VALUE}")
if (${LENGTH_VALUE} EQUAL 0)
set("${OUT_VAR}" PARENT_SCOPE)
retrun()
endif()
set(OUT_LIST)
MATH(EXPR LENGTH_VALUE "${LENGTH_VALUE}-1")
foreach(IDX RANGE ${LENGTH_VALUE})
string(JSON ELEM GET ${LIST_JSON_ELEMENTS} ${IDX})
#message(" - : ${ELEM}")
list(APPEND OUT_LIST ${ELEM})
endforeach()
#message("OUT_LIST : ${OUT_LIST}")
set("${OUT_VAR}" ${OUT_LIST} PARENT_SCOPE)
endfunction()
function(json_get_element OUT_VAR INPUT_JSON VARIABLE)
string(JSON ELEMENT ERROR_VARIABLE ${VARIABLE} GET ${INPUT_JSON} ${VARIABLE})
if ("${ELEMENT}" STREQUAL "${VARIABLE}-NOTFOUND")
set("${OUT_VAR}" "" PARENT_SCOPE)
elseif ("${ELEMENT}" STREQUAL "NOTFOUND")
set("${OUT_VAR}" "" PARENT_SCOPE)
else()
set("${OUT_VAR}" ${ELEMENT} PARENT_SCOPE)
endif()
endfunction()

View File

@ -0,0 +1,19 @@
{
"type":"LIBRARY",
"sub-type":"PREBUILD",
"group-id":"gnu",
"description":"ALSA : Advanced Linux Sound Architecture",
"sanity-compilation": {
"intricate": {
"flag":{
"c-link": "-lasound"
},
"dependency":[
"c"
]
},
"!intricate":{
"error":true
}
}
}

View File

@ -0,0 +1,17 @@
{
"type":"LIBRARY",
"sub-type":"PREBUILD",
"group-id":"gnu",
"description":"C: Generic gnu C library",
"license":"LGPL",
"sanity-compilation": {
"intricate": {
"flag":[
"c-remove": "-nostdinc"
]
},
"!intricate":{
"error":true
}
}
}

View File

@ -0,0 +1,23 @@
{
"type":"LIBRARY",
"sub-type":"PREBUILD",
"group-id":"gnu",
"description":"CXX: Generic gnu C++ library",
"license":"LGPL",
"sanity-compilation": {
"intricate": {
"flag":{
"c++-remove": "-nostdlib",
"c++": "-D__STDCPP_GNU__"
},
"dependency":[
"c",
"m",
"pthread"
]
},
"!intricate":{
"error":true
}
}
}

View File

@ -0,0 +1,20 @@
{
"type":"LIBRARY",
"sub-type":"PREBUILD",
"group-id":"gnu",
"description":"M: Generic math library",
"license":"LGPL",
"sanity-compilation": {
"intricate": {
"flag":{
"c-link": "-lm"
},
"dependency":[
"c"
]
},
"!intricate":{
"error":true
}
}
}

View File

@ -0,0 +1,19 @@
{
"type":"LIBRARY",
"sub-type":"PREBUILD",
"group-id":"org.opengl",
"description":"OpenGL: Generic graphic library",
"sanity-compilation": {
"intricate": {
"flag":{
"c-link": "-lGL"
},
"dependency":[
"c"
]
},
"!intricate":{
"error":true
}
}
}

View File

@ -0,0 +1,20 @@
{
"type":"LIBRARY",
"sub-type":"PREBUILD",
"group-id":"gnu",
"description":"pthread: Generic multithreading system",
"license":"LGPL",
"sanity-compilation": {
"intricate": {
"flag":{
"c-link": "-lpthread"
},
"dependency":[
"c"
]
},
"!intricate":{
"error":true
}
}
}

View File

@ -0,0 +1,19 @@
{
"type":"LIBRARY",
"sub-type":"PREBUILD",
"group-id":"gnu",
"description":"Z : z library ",
"sanity-compilation": {
"intricate": {
"flag":{
"c-link": "-lz"
},
"dependency":[
"c"
]
},
"!intricate":{
"error":true
}
}
}

113
cmake/GLDTargetConfig.cmake Normal file
View File

@ -0,0 +1,113 @@
cmake_minimum_required(VERSION 3.20)
if (WIN32)
set(CPACK_GENERATOR "ZIP")
else()
set(CPACK_GENERATOR "TGZ")
endif()
set(CPACK_VERBATIM_VARIABLES YES)
include(CPack)
## fist step is determining the target:
if (WIN32)
set(GLD_TARGET "Windows" CACHE INTERNAL "")
elseif(APPLE)
set(GLD_TARGET "MacOs" CACHE INTERNAL "")
elseif(LINUX)
set(GLD_TARGET "Linux" CACHE INTERNAL "")
elseif(UNIX AND NOT APPLE)
set(GLD_TARGET "Linux" CACHE INTERNAL "")
else()
message("GLD Can not determine the target !!!")
exit(-1)
endif()
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
set(GLD_COMPILER "clang" CACHE INTERNAL "")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set(GLD_COMPILER "gcc" CACHE INTERNAL "")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
set(GLD_COMPILER "intel" CACHE INTERNAL "")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
set(GLD_COMPILER "msvc" CACHE INTERNAL "")
else()
message("GLD Can not determine the compilator !!!")
exit(-1)
endif()
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
set(GLD_MODE "debug" CACHE INTERNAL "")
elseif(CMAKE_BUILD_TYPE STREQUAL "debug")
set(GLD_MODE "debug" CACHE INTERNAL "")
else()
set(GLD_MODE "release" CACHE INTERNAL "")
endif()
if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64"
OR CMAKE_SYSTEM_PROCESSOR STREQUAL amd64)
set(GLD_ARCH "x86" CACHE INTERNAL "")
set(GLD_BUS_SIZE "64" CACHE INTERNAL "")
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86"
OR CMAKE_SYSTEM_PROCESSOR STREQUAL "i686")
set(GLD_ARCH "x86" CACHE INTERNAL "")
set(GLD_BUS_SIZE "32" CACHE INTERNAL "")
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "ppc64")
set(GLD_ARCH "ppc" CACHE INTERNAL "")
set(GLD_BUS_SIZE "64" CACHE INTERNAL "")
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "ppc")
set(GLD_ARCH "ppc" CACHE INTERNAL "")
set(GLD_BUS_SIZE "32" CACHE INTERNAL "")
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "arm64"
OR CMAKE_SYSTEM_PROCESSOR STREQUAL "aarch64")
set(GLD_ARCH "arm" CACHE INTERNAL "")
set(GLD_BUS_SIZE "64" CACHE INTERNAL "")
elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "arm"
OR CMAKE_SYSTEM_PROCESSOR STREQUAL "armv7l"
OR CMAKE_SYSTEM_PROCESSOR STREQUAL "armv9")
set(GLD_ARCH "arm" CACHE INTERNAL "")
set(GLD_BUS_SIZE "32" CACHE INTERNAL "")
else()
message("GLD Can not determine the architecture and bus-size !!!")
exit(-1)
endif()
# cmake does not support other mode than "intricate" the "isolate" mode is too much complicated to do.
set(GLD_SANITY_MODE "intricate" CACHE INTERNAL "")
# list of current supported language:
# - 'c': C language
# - 'c++': C++ language
# - 'asm': asembler language
# - 'm': Objective-C language
# - 'mm': Objective-C++ language
# - 'java': Java language
# - 'javah': generated c header with Java description (for JNI)
# TODO: maybe permit user to add some other... like "in", "masm", or other pre-step generation code???
set(GLD_SUPPORT_LANGUAGE "c;asm;c++;m;mm;java;javah" CACHE INTERNAL "")
set(GLD_SUPPORT_LANGUAGE_VARIABLE "C;ASM;CXX;M;MM;JAVA;JAVAH" CACHE INTERNAL "")
set(GLD_LANGUAGE_EXTENTION_C "c;C" CACHE INTERNAL "")
set(GLD_LANGUAGE_EXTENTION_CXX "cpp;CPP;cxx;CXX" CACHE INTERNAL "")
set(GLD_LANGUAGE_EXTENTION_ASM "s;S" CACHE INTERNAL "")
set(GLD_LANGUAGE_EXTENTION_M "m;M" CACHE INTERNAL "")
set(GLD_LANGUAGE_EXTENTION_MM "mm;MM" CACHE INTERNAL "")
set(GLD_LANGUAGE_EXTENTION_JAVA "java" CACHE INTERNAL "")
set(GLD_LANGUAGE_EXTENTION_JAVAH "javah" CACHE INTERNAL "")
# where is build the module
set(GLD_GLOBAL_BUILD_FOLDER "${CMAKE_CURRENT_BINARY_DIR}/${GLD_TARGET}_${GLD_ARCH}_${GLD_BUS_SIZE}/${GLD_MODE}/build/${GLD_COMPILER}/" CACHE INTERNAL "")
# where the package is prepared
set(GLD_GLOBAL_STAGING_FOLDER "${CMAKE_CURRENT_BINARY_DIR}/${GLD_TARGET}_${GLD_ARCH}_${GLD_BUS_SIZE}/${GLD_MODE}/staging/${GLD_COMPILER}/" CACHE INTERNAL "")
# whe the bundle (tar, jar ...) is set
set(GLD_GLOBAL_FINAL_FOLDER "${CMAKE_CURRENT_BINARY_DIR}/${GLD_TARGET}_${GLD_ARCH}_${GLD_BUS_SIZE}/${GLD_MODE}/final/${GLD_COMPILER}/" CACHE INTERNAL "")
message("Global GLD properties:")
message(" GLD_MODE : ${GLD_MODE}")
message(" GLD_COMPILER : ${GLD_COMPILER}")
message(" GLD_TARGET : ${GLD_TARGET}")
message(" GLD_ARCH : ${GLD_ARCH}")
message(" GLD_BUS_SIZE : ${GLD_BUS_SIZE}")
message(" GLD_SANITY_MODE : ${GLD_SANITY_MODE}")
message(" GLD_GLOBAL_BUILD_FOLDER : ${GLD_GLOBAL_BUILD_FOLDER}")
message(" GLD_GLOBAL_STAGING_FOLDER : ${GLD_GLOBAL_STAGING_FOLDER}")
message(" GLD_GLOBAL_FINAL_FOLDER : ${GLD_GLOBAL_FINAL_FOLDER}")

131
cmake/GLDTools.cmake Normal file
View File

@ -0,0 +1,131 @@
cmake_minimum_required(VERSION 3.20)
##
## @brief List all folder in a specific inout folder.
## @param[out] VAR_OUT Result list of folders.
## @param[in] BASE_FOLDER Basic folder to parse.
##
function(sub_dir_list VAR_OUT BASE_FOLDER)
#message(" Search: ${BASE_FOLDER}" )
file(GLOB CHILDREN "${BASE_FOLDER}/*")
#message(" ==>>> ${children}" )
set(OUT_DATA "")
foreach(CHILD ${CHILDREN})
#message(" - ${child}" )
if(IS_DIRECTORY ${CHILD})
#message(" ==> is directory" )
list(APPEND OUT_DATA ${CHILD})
endif()
endforeach()
set(${VAR_OUT} ${OUT_DATA} PARENT_SCOPE)
endfunction()
function(replace_base_path VAR_OUT BASE LIST_ELEMENT)
set(OUT_DATA "")
foreach(ELEM ${LIST_ELEMENT})
string(REPLACE "${BASE}" "" TMP_VARIABLE ${ELEM})
list(APPEND OUT_DATA ${TMP_VARIABLE})
endforeach()
set(${VAR_OUT} ${OUT_DATA} PARENT_SCOPE)
endfunction()
function(copy_file_with_reference BASE_PATH SRC DST)
#check if input function if compatible with changing folder model...
string(FIND "${SRC}" "!" POSITION_IN_FILE REVERSE)
#message("POSITION_IN_FILE=${POSITION_IN_FILE}")
if(${POSITION_IN_FILE} GREATER 0)
string(SUBSTRING "${SRC}" 0 ${POSITION_IN_FILE} FILE_NAME)
MATH(EXPR POSITION_IN_FILE "${POSITION_IN_FILE}+1")
string(SUBSTRING "${SRC}" ${POSITION_IN_FILE} -1 PART_2)
string(FIND "${PART_2}" ":" POSITION_IN_FILE REVERSE)
string(SUBSTRING "${PART_2}" 0 ${POSITION_IN_FILE} FOLDER_SRC)
MATH(EXPR POSITION_IN_FILE "${POSITION_IN_FILE}+1")
string(SUBSTRING "${PART_2}" ${POSITION_IN_FILE} -1 FOLDER_DST)
#message("***********************************************************************")
#message("FILE_NAME=${FILE_NAME}")
#message("FOLDER_SRC=${FOLDER_SRC}")
#message("FOLDER_DST=${FOLDER_DST}")
configure_file(${BASE_PATH}/${FOLDER_SRC}/${FILE_NAME} ${DST}/${FOLDER_DST}/${FILE_NAME} COPYONLY)
else()
configure_file(${BASE_PATH}/${SRC} ${DST}/${SRC} COPYONLY)
endif()
endfunction()
##
## @brief Recursive search of a specific patter (stop whan find the pattern in a folder.
## (have a limit of parsing ==> optimise resarch in a worktree).
## @param[out] VAR_OUT List of all files that corespond of the reg-exp
## @param[in] BASE_FOLDER Basic folder to parse.
## @param[in] REG_EXP Regular expression to search the data.
## @param[in] LIMIT Lismit of folder to recursively parse.
##
function(find_all_files VAR_OUT BASE_FOLDER REG_EXP LIMIT)
if (${LIMIT} LESS_EQUAL 0)
set(${VAR_OUT} "" PARENT_SCOPE)
return()
endif()
MATH(EXPR LIMIT "${LIMIT}-1")
get_filename_component(BASE_FOLDER ${BASE_FOLDER} ABSOLUTE)
#message("KK Search in subDiratory: ${BASE_FOLDER}/${REG_EXP}" )
file(GLOB TMP_FILES "${BASE_FOLDER}/${REG_EXP}")
#message("-end-" )
set(GLD_FILES "")
#message("Find file: '${GLD_FILES}'" )
foreach(ELEM ${TMP_FILES})
if(IS_DIRECTORY "${ELEM}")
get_filename_component(DIR_NAME_RELATIVE ${ELEM} NAME)
#message(" =!=> DIR_NAME_RELATIVE = ${DIR_NAME_RELATIVE}" )
if("${ELEM}" STREQUAL "${BASE_FOLDER}" OR "${ELEM}" STREQUAL "${BASE_FOLDER}/" OR "${ELEM}" STREQUAL ".")
continue()
endif()
#message(" element: ${ELEM}" )
#message(" BASE_FOLDER: ${BASE_FOLDER}" )
find_all_files(OUT_SUB_LIST "${ELEM}" "${REG_EXP}" ${LIMIT})
#message(" start Add 1" )
list(APPEND GLD_FILES ${OUT_SUB_LIST})
#message(" -end-" )
else()
#message(" start Add 2" )
list(APPEND GLD_FILES ${ELEM})
#message(" -end-" )
endif()
endforeach()
set(${VAR_OUT} ${GLD_FILES} PARENT_SCOPE)
endfunction()
function(find_all_files_exeption VAR_OUT BASE_FOLDER REG_EXP LIMIT)
if (${LIMIT} LESS_EQUAL 0)
set(${VAR_OUT} "" PARENT_SCOPE)
return()
endif()
MATH(EXPR LIMIT "${LIMIT}-1")
get_filename_component(BASE_FOLDER ${BASE_FOLDER} ABSOLUTE)
#message("KK Search in subDiratory: ${BASE_FOLDER}/${REG_EXP}" )
file(GLOB GLD_FILES "${BASE_FOLDER}/${REG_EXP}")
#message("Find file: '${GLD_FILES}'" )
if("${GLD_FILES}" STREQUAL "")
#message("Search in subDiratory: ${BASE_FOLDER}" )
# no element continue search...
set(SUBDIRS "")
sub_dir_list(SUBDIRS "${BASE_FOLDER}")
#message(" =!=> : ${SUBDIRS}" )
foreach(ELEM ${SUBDIRS})
get_filename_component(DIR_NAME_RELATIVE ${ELEM} NAME)
#message(" =!=> DIR_NAME_RELATIVE = ${DIR_NAME_RELATIVE}" )
if("${DIR_NAME_RELATIVE}" STREQUAL ".git" OR "${DIR_NAME_RELATIVE}" STREQUAL ".island" OR "${DIR_NAME_RELATIVE}" STREQUAL "archive" OR "${DIR_NAME_RELATIVE}" STREQUAL "out" OR "${DIR_NAME_RELATIVE}" STREQUAL "target" OR "${DIR_NAME_RELATIVE}" STREQUAL "lutin")
continue()
endif()
if("${ELEM}" STREQUAL "${BASE_FOLDER}" OR "${ELEM}" STREQUAL "${BASE_FOLDER}/")
continue()
endif()
#message(" element: ${ELEM}" )
#message(" BASE_FOLDER: ${BASE_FOLDER}" )
find_all_files_exeption(OUT_SUB_LIST "${ELEM}" "${REG_EXP}" ${LIMIT})
list(APPEND GLD_FILES ${OUT_SUB_LIST})
endforeach()
endif()
set(${VAR_OUT} ${GLD_FILES} PARENT_SCOPE)
endfunction()

351
cmake/old.cmake Normal file
View File

@ -0,0 +1,351 @@
function(GLD_import_old FOLDER LIBRARY_NAME)
# add the executable
# add_executable(Tutorial tutorial.cxx)
set(LIBRARY_NAME "etk-core")
set_property(
DIRECTORY
APPEND
PROPERTY CMAKE_CONFIGURE_DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/GLD_${LIBRARY_NAME}.json
)
# Read the JSON file.
file(READ ${CMAKE_CURRENT_SOURCE_DIR}/GLD_${LIBRARY_NAME}.json MY_JSON_STRING)
# Loop through each element of the JSON array (indices 0 though 1).
json_get_element(LIBRARY_TYPE ${MY_JSON_STRING} "type")
json_get_element(LIBRARY_GROUP_ID ${MY_JSON_STRING} "group-id")
json_get_element(LIBRARY_DECRIPTION ${MY_JSON_STRING} "description")
json_get_element(LIBRARY_LICENCE ${MY_JSON_STRING} "licence")
json_get_element(LIBRARY_LICENCE_FILE ${MY_JSON_STRING} "licence-file")
json_get_element(LIBRARY_MAINTAINER ${MY_JSON_STRING} "maintainer")
json_get_element(LIBRARY_AUTHORS ${MY_JSON_STRING} "authors")
json_get_element(LIBRARY_VERSION ${MY_JSON_STRING} "version")
json_get_element(LIBRARY_SOURCES ${MY_JSON_STRING} "sources")
json_get_element(LIBRARY_HEADERS ${MY_JSON_STRING} "headers")
json_get_list(LIBRARY_PATH ${MY_JSON_STRING} "path")
json_get_element(LIBRARY_COMPILATION_VERSION ${MY_JSON_STRING} "compilation-version")
json_get_list(LIBRARY_DEPENDENCY ${MY_JSON_STRING} "dependency")
json_get_element(LIBRARY_TARGET ${MY_JSON_STRING} "target")
json_get_element(LIBRARY_COMPILATION_VERSION_LANGUAGE ${MY_JSON_STRING} "language")
json_get_element(LIBRARY_COMPILATION_VERSION_VALUE ${MY_JSON_STRING} "version")
message("LIBRARY_NAME : ${LIBRARY_NAME}")
message("LIBRARY_TYPE : ${LIBRARY_TYPE}")
message("LIBRARY_GROUP_ID : ${LIBRARY_GROUP_ID}")
message("LIBRARY_DECRIPTION : ${LIBRARY_DECRIPTION}")
message("LIBRARY_LICENCE : ${LIBRARY_LICENCE}")
message("LIBRARY_LICENCE_FILE : ${LIBRARY_LICENCE_FILE}")
message("LIBRARY_MAINTAINER : ${LIBRARY_MAINTAINER}")
message("LIBRARY_AUTHORS : ${LIBRARY_AUTHORS}")
message("LIBRARY_VERSION : ${LIBRARY_VERSION}")
message("LIBRARY_COMPILATION_VERSION : ${LIBRARY_COMPILATION_VERSION_LANGUAGE} : ${LIBRARY_COMPILATION_VERSION_VALUE}")
#message("LIBRARY_SOURCES: ${LIBRARY_SOURCES}")
#message("LIBRARY_HEADERS: ${LIBRARY_HEADERS}")
message("LIBRARY_PATH: ${LIBRARY_PATH}")
#message("LIBRARY_COMPILATION_VERSION: ${LIBRARY_COMPILATION_VERSION}")
message("LIBRARY_DEPENDENCY: ${LIBRARY_DEPENDENCY}")
#message("LIBRARY_TARGET: ${LIBRARY_TARGET}")
json_get_list(SOURCES_LIST ${LIBRARY_SOURCES} "list")
message("SOURCES_LIST: ${SOURCES_LIST}")
json_get_list(EXPORT_HEADER_LIST ${LIBRARY_HEADERS} "list")
json_get_element(EXPORT_HEADER_LIST_PATH ${LIBRARY_HEADERS} "destination-path")
message("EXPORT_HEADER_LIST: ${EXPORT_HEADER_LIST}")
message("EXPORT_HEADER_LIST_PATH: ${EXPORT_HEADER_LIST_PATH}")
string(JSON LIBRARY_PLOP ERROR_VARIABLE "qsdfqsdfqsdf" GET ${MY_JSON_STRING} "qsdfqsdfqsdf")
#string(JSON LIBRARY_MEMBERS MEMBER ${MY_JSON_STRING} )
#message("LIBRARY_MEMBERS : ${LIBRARY_MEMBERS}")
message("LIBRARY_PLOP : ${LIBRARY_PLOP}")
string(REPLACE "-" "_" LIBRARY_NAME222 ${LIBRARY_NAME})
set(LIBRARY_NAME222 ${LIBRARY_NAME})
project(${LIBRARY_NAME222} VERSION ${LIBRARY_VERSION})
set(${LIBRARY_NAME222} PROPERTIES CPACK_PACKAGE_VERSION ${LIBRARY_VERSION})
add_library(${LIBRARY_NAME222}_OBJ OBJECT ${SOURCES_LIST})
# shared libraries need PIC
set_property(TARGET ${LIBRARY_NAME222}_OBJ PROPERTY POSITION_INDEPENDENT_CODE 1)
#set_target_properties(${LIBRARY_NAME222} PROPERTIES PUBLIC_HEADER ${EXPORT_HEADER_LIST})
target_include_directories(${LIBRARY_NAME222}_OBJ PUBLIC
${LIBRARY_PATH}
)
add_library(${LIBRARY_NAME222}_dynamic SHARED $<TARGET_OBJECTS:${LIBRARY_NAME222}_OBJ>)
add_library(${LIBRARY_NAME222}_static STATIC $<TARGET_OBJECTS:${LIBRARY_NAME222}_OBJ>)
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES OUTPUT_NAME ${LIBRARY_NAME222})
if (NOT WIN32)
set_target_properties(${LIBRARY_NAME222}_static PROPERTIES OUTPUT_NAME ${LIBRARY_NAME222})
endif()
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES VERSION ${LIBRARY_VERSION})
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES SOVERSION ${LIBRARY_VERSION})
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES DESCRIPTION ${LIBRARY_DECRIPTION})
# install dynamic & static library
install(TARGETS ${LIBRARY_NAME222}_dynamic EXPORT ${LIBRARY_NAME222}Targets
RUNTIME DESTINATION lib
)
install(TARGETS ${LIBRARY_NAME222}_static
RUNTIME DESTINATION lib)
#install(TARGETS ${LIBRARY_NAME222} EXPORT ${LIBRARY_NAME222}Targets
# LIBRARY DESTINATION lib
# ARCHIVE DESTINATION lib
# RUNTIME DESTINATION bin
# INCLUDES DESTINATION include
#)
# install exported headers
# this copy all the headers in a single folder:
#install(FILES ${EXPORT_HEADER_LIST} DESTINATION include)
# this keep the basic path for each folders:
set(BASE "${PROJECT_SOURCE_DIR}/install")
foreach(ITEM ${EXPORT_HEADER_LIST})
get_filename_component(ITEM_PATH ${ITEM} PATH)
string(REPLACE ${BASE} "" ITEM_PATH ${ITEM_PATH})
install(FILES ${ITEM}
DESTINATION "include/${ITEM_PATH}"
COMPONENT Devel)
endforeach()
include(CMakePackageConfigHelpers)
#write_basic_package_version_file(
# "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}ConfigVersion.cmake"
# VERSION ${LIBRARY_VERSION}
# COMPATIBILITY AnyNewerVersion
#)
#
#export(EXPORT ${LIBRARY_NAME222}Targets
# FILE "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}Targets.cmake"
# NAMESPACE Upstream::
#)
##configure_file(cmake/${LIBRARY_NAME222}Config.cmake
## "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}Config.cmake"
## COPYONLY
##)
set(CONFIG_PACKAGE_LOCATION cmake/${LIBRARY_NAME222})
install(EXPORT ${LIBRARY_NAME222}Targets
FILE
${LIBRARY_NAME222}Targets.cmake
NAMESPACE
${LIBRARY_NAME222}::
DESTINATION
${CONFIG_PACKAGE_LOCATION}
)
#install(
# FILES
# cmake/${LIBRARY_NAME222}Config.cmake
# "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}ConfigVersion.cmake"
# DESTINATION
# ${CONFIG_PACKAGE_LOCATION}
# COMPONENT
# Devel
#)
message("CMAKE_INSTALL_LIBDIR===${CMAKE_INSTALL_LIBDIR}")
include(CMakePackageConfigHelpers)
configure_package_config_file(cmake/${LIBRARY_NAME222}Config.cmake.in
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}Config.cmake"
INSTALL_DESTINATION ${CONFIG_PACKAGE_LOCATION}
NO_SET_AND_CHECK_MACRO
NO_CHECK_REQUIRED_COMPONENTS_MACRO)
write_basic_package_version_file(
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}ConfigVersion.cmake"
VERSION ${LIBRARY_VERSION}
COMPATIBILITY SameMajorVersion)
install(
FILES
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}Config.cmake"
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}ConfigVersion.cmake"
DESTINATION ${CONFIG_PACKAGE_LOCATION}
COMPONENT Devel)
endfunction()
function(kjhghkjgkhjgkjhgkjhgkj LIBRARY_NAME)
json_get_element(LIBRARY_SOURCES ${MY_JSON_STRING} "source")
json_get_element(LIBRARY_HEADERS ${MY_JSON_STRING} "header")
json_get_list(LIBRARY_PATH ${MY_JSON_STRING} "path")
json_get_element(LIBRARY_COMPILATION_VERSION ${MY_JSON_STRING} "compilation-version")
json_get_list(LIBRARY_DEPENDENCY ${MY_JSON_STRING} "dependency")
json_get_element(LIBRARY_TARGET ${MY_JSON_STRING} "target")
json_get_element(LIBRARY_COMPILATION_VERSION_LANGUAGE ${MY_JSON_STRING} "language")
json_get_element(LIBRARY_COMPILATION_VERSION_VALUE ${MY_JSON_STRING} "version")
message("LIBRARY_COMPILATION_VERSION : ${LIBRARY_COMPILATION_VERSION_LANGUAGE} : ${LIBRARY_COMPILATION_VERSION_VALUE}")
#message("LIBRARY_SOURCES: ${LIBRARY_SOURCES}")
#message("LIBRARY_HEADERS: ${LIBRARY_HEADERS}")
message("LIBRARY_PATH: ${LIBRARY_PATH}")
#message("LIBRARY_COMPILATION_VERSION: ${LIBRARY_COMPILATION_VERSION}")
message("LIBRARY_DEPENDENCY: ${LIBRARY_DEPENDENCY}")
#message("LIBRARY_TARGET: ${LIBRARY_TARGET}")
json_get_list(SOURCES_LIST ${LIBRARY_SOURCES} "list")
message("SOURCES_LIST: ${SOURCES_LIST}")
json_get_list(EXPORT_HEADER_LIST ${LIBRARY_HEADERS} "list")
json_get_element(EXPORT_HEADER_LIST_PATH ${LIBRARY_HEADERS} "destination-path")
message("EXPORT_HEADER_LIST: ${EXPORT_HEADER_LIST}")
message("EXPORT_HEADER_LIST_PATH: ${EXPORT_HEADER_LIST_PATH}")
string(JSON LIBRARY_PLOP ERROR_VARIABLE "qsdfqsdfqsdf" GET ${MY_JSON_STRING} "qsdfqsdfqsdf")
#string(JSON LIBRARY_MEMBERS MEMBER ${MY_JSON_STRING} )
#message("LIBRARY_MEMBERS : ${LIBRARY_MEMBERS}")
message("LIBRARY_PLOP : ${LIBRARY_PLOP}")
string(REPLACE "-" "_" LIBRARY_NAME222 ${LIBRARY_NAME})
set(LIBRARY_NAME222 ${LIBRARY_NAME})
project(${LIBRARY_NAME222} VERSION ${LIBRARY_VERSION})
set(${LIBRARY_NAME222} PROPERTIES CPACK_PACKAGE_VERSION ${LIBRARY_VERSION})
add_library(${LIBRARY_NAME222}_OBJ OBJECT ${SOURCES_LIST})
# shared libraries need PIC
set_property(TARGET ${LIBRARY_NAME222}_OBJ PROPERTY POSITION_INDEPENDENT_CODE 1)
#set_target_properties(${LIBRARY_NAME222} PROPERTIES PUBLIC_HEADER ${EXPORT_HEADER_LIST})
target_include_directories(${LIBRARY_NAME222}_OBJ PUBLIC
${LIBRARY_PATH}
)
add_library(${LIBRARY_NAME222}_dynamic SHARED $<TARGET_OBJECTS:${LIBRARY_NAME222}_OBJ>)
add_library(${LIBRARY_NAME222}_static STATIC $<TARGET_OBJECTS:${LIBRARY_NAME222}_OBJ>)
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES OUTPUT_NAME ${LIBRARY_NAME222})
if (NOT WIN32)
set_target_properties(${LIBRARY_NAME222}_static PROPERTIES OUTPUT_NAME ${LIBRARY_NAME222})
endif()
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES VERSION ${LIBRARY_VERSION})
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES SOVERSION ${LIBRARY_VERSION})
set_target_properties(${LIBRARY_NAME222}_dynamic PROPERTIES DESCRIPTION ${LIBRARY_DECRIPTION})
# install dynamic & static library
install(TARGETS ${LIBRARY_NAME222}_dynamic EXPORT ${LIBRARY_NAME222}Targets
RUNTIME DESTINATION lib
)
install(TARGETS ${LIBRARY_NAME222}_static
RUNTIME DESTINATION lib)
#install(TARGETS ${LIBRARY_NAME222} EXPORT ${LIBRARY_NAME222}Targets
# LIBRARY DESTINATION lib
# ARCHIVE DESTINATION lib
# RUNTIME DESTINATION bin
# INCLUDES DESTINATION include
#)
# install exported headers
# this copy all the headers in a single folder:
#install(FILES ${EXPORT_HEADER_LIST} DESTINATION include)
# this keep the basic path for each folders:
set(BASE "${PROJECT_SOURCE_DIR}/install")
foreach(ITEM ${EXPORT_HEADER_LIST})
get_filename_component(ITEM_PATH ${ITEM} PATH)
string(REPLACE ${BASE} "" ITEM_PATH ${ITEM_PATH})
install(FILES ${ITEM}
DESTINATION "include/${ITEM_PATH}"
COMPONENT Devel)
endforeach()
include(CMakePackageConfigHelpers)
#write_basic_package_version_file(
# "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}ConfigVersion.cmake"
# VERSION ${LIBRARY_VERSION}
# COMPATIBILITY AnyNewerVersion
#)
#
#export(EXPORT ${LIBRARY_NAME222}Targets
# FILE "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}Targets.cmake"
# NAMESPACE Upstream::
#)
##configure_file(cmake/${LIBRARY_NAME222}Config.cmake
## "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}Config.cmake"
## COPYONLY
##)
set(CONFIG_PACKAGE_LOCATION cmake/${LIBRARY_NAME222})
install(EXPORT ${LIBRARY_NAME222}Targets
FILE
${LIBRARY_NAME222}Targets.cmake
NAMESPACE
${LIBRARY_NAME222}::
DESTINATION
${CONFIG_PACKAGE_LOCATION}
)
#install(
# FILES
# cmake/${LIBRARY_NAME222}Config.cmake
# "${CMAKE_CURRENT_BINARY_DIR}/${LIBRARY_NAME222}/${LIBRARY_NAME222}ConfigVersion.cmake"
# DESTINATION
# ${CONFIG_PACKAGE_LOCATION}
# COMPONENT
# Devel
#)
message("CMAKE_INSTALL_LIBDIR===${CMAKE_INSTALL_LIBDIR}")
include(CMakePackageConfigHelpers)
configure_package_config_file(cmake/${LIBRARY_NAME222}Config.cmake.in
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}Config.cmake"
INSTALL_DESTINATION ${CONFIG_PACKAGE_LOCATION}
NO_SET_AND_CHECK_MACRO
NO_CHECK_REQUIRED_COMPONENTS_MACRO)
write_basic_package_version_file(
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}ConfigVersion.cmake"
VERSION ${LIBRARY_VERSION}
COMPATIBILITY SameMajorVersion)
install(
FILES
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}Config.cmake"
"${PROJECT_BINARY_DIR}/${LIBRARY_NAME222}ConfigVersion.cmake"
DESTINATION ${CONFIG_PACKAGE_LOCATION}
COMPONENT Devel)
endfunction()

View File

@ -57,13 +57,21 @@ def import_path_local(path, limit_sub_folder, exclude_path = [], base_name = "")
if path in exclude_path:
debug.debug("find '" + str(path) + "' in exclude_path=" + str(exclude_path))
return []
# filter elements:
# filter elements for lutin:
tmp_list_lutin_file = filter_name_and_file(path, list_files, base_name + "*.py")
debug.verbose("lutin files: " + str(path) + " : " + str(tmp_list_lutin_file))
# Import the module:
for filename in tmp_list_lutin_file:
out.append(os.path.join(path, filename))
debug.extreme_verbose(" Find a file : '" + str(out[-1]) + "'")
# filter elements for GLD:
tmp_list_gld_file = filter_name_and_file(path, list_files, "GLD_*.json")
debug.verbose("GLD files: " + str(path) + " : " + str(tmp_list_lutin_file))
# Import the module:
for filename in tmp_list_gld_file:
out.append(os.path.join(path, filename))
debug.extreme_verbose(" Find a file : '" + str(out[-1]) + "'")
need_parse_sub_folder = True
rm_value = -1
# check if we need to parse sub_folder
@ -104,12 +112,10 @@ def import_path_local(path, limit_sub_folder, exclude_path = [], base_name = "")
out.append(elem)
return out
def init():
global is_init;
if is_init == True:
return
debug.verbose("Use Make as a make stadard")
sys.path.append(tools.get_run_path())
# create the list of basic folder:
basic_folder_list = []

View File

@ -33,6 +33,7 @@ __start_builder_name="Builder_"
##
def import_path(path_list):
global builder_list
gld_base = env.get_gld_build_system_base_name()
global_base = env.get_build_system_base_name()
debug.debug("BUILDER: Init with Files list:")
for elem in path_list:
@ -43,19 +44,28 @@ def import_path(path_list):
filename = filename[:-3]
base_file_name = filename
# Remove global base name:
filename = filename[len(global_base):]
# Check if it start with the local patern:
if filename[:len(__start_builder_name)] != __start_builder_name:
debug.extreme_verbose("BUILDER: Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
if filename[:len(gld_base)] == gld_base:
filename = filename[len(gld_base):]
# Check if it start with the local patern:
if filename[:len(__start_builder_name)] != __start_builder_name:
debug.extreme_verbose("BUILDER: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
continue
# Remove local patern
builder_name = filename[len(__start_builder_name):]
debug.verbose("BUILDER: Integrate: '" + builder_name + "' from '" + elem + "'")
the_builder = __import__(base_file_name)
builder_list.append({"name":builder_name,
"element":the_builder
})
debug.debug('BUILDER: type=' + the_builder.get_type() + " in=" + str(the_builder.get_input_type()) + " out=" + str(the_builder.get_output_type()))
elif filename[:len(global_base)] == global_base:
filename = filename[len(global_base):]
# Check if it start with the local patern:
if filename[:len(__start_builder_name)] != __start_builder_name:
debug.extreme_verbose("BUILDER: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
# Remove local patern
builder_name = filename[len(__start_builder_name):]
debug.verbose("BUILDER: Integrate: '" + builder_name + "' from '" + elem + "'")
the_builder = __import__(base_file_name)
builder_list.append({"name":builder_name,
"order":the_builder.get_order(),
"element":the_builder
})
debug.debug('BUILDER: type=' + the_builder.get_type() + " order=" + str(the_builder.get_order()) + " in=" + str(the_builder.get_input_type()) + " out=" + str(the_builder.get_output_type()))
debug.verbose("List of BUILDER: ")
for elem in builder_list:
debug.verbose(" " + str(elem["name"]))
@ -72,8 +82,8 @@ def init():
element["element"].init()
##
## @brief Get a builder tool with specifiying the input type (like cpp, S ...)
## @param[in] input_type (string) extention file that can be compile
## @brief Get a builder tool with specifying the input type (like cpp, S ...)
## @param[in] input_type (string) extension file that can be compile
##
def get_builder(input_type):
global builder_list
@ -84,10 +94,59 @@ def get_builder(input_type):
# we can not find the builder ...
debug.error("Can not find builder for type : '" + str(input_type) + "'")
raise ValueError('type error :' + str(input_type))
##
## @brief Get a builder tool with his name
## @param[in] name (string) name of the builder
##
def get_builder_named(name):
global builder_list
for element in builder_list:
if element["name"] == name:
return element["element"]
# we can not find the builder ...
debug.error("Can not find builder for type : '" + str(input_type) + "'")
raise ValueError('type error :' + str(input_type))
##
## @brief Get a builder tool with specifiying the output type (like .exe, .jar ...)
## @param[in] input_type (string) extention file that can be generated
## @brief get all the builder with extension to detect automaticly mode to compile
## @return a map with the key name of the builder, and a table of extension files
##
def get_full_builder_extention():
global builder_list
out = {};
for element in builder_list:
if element["element"] != None:
out[element["name"]] = element["element"].get_input_type();
return out;
##
## @brief get all the builder in the common order build
## @return a list with the ordered builder names
##
def get_ordered_builder_list():
global builder_list
table = {};
for element in builder_list:
table[element["order"]] = element["name"];
out = []
for key in sorted(table.keys()):
out.append(table[key]);
debug.extreme_verbose("builder ordered=" + str(table));
debug.extreme_verbose(" ==> " + str(out));
return out;
def find_builder_with_input_extention(extension):
extention_map = get_full_builder_extention();
for builder_name in get_ordered_builder_list():
debug.extreme_verbose("builder_name: " + str(extension) + " in " + str(extention_map[builder_name]));
if extension in extention_map[builder_name]:
return builder_name;
debug.warning("does not find the builder: for extension: " + str(extension))
return "?";
##
## @brief Get a builder tool with specifying the output type (like .exe, .jar ...)
## @param[in] input_type (string) extension file that can be generated
##
def get_builder_with_output(output_type):
global builder_list

View File

@ -72,14 +72,14 @@ def need_re_build(dst, src, depend_file=None, file_cmd="", cmd_line="", force_id
debug.extreme_verbose(" force_identical='" + str(force_identical) + "'")
# if force mode selected ==> just force rebuild ...
if env.get_force_mode():
debug.extreme_verbose(" ==> must rebuild (force mode)")
debug.verbose(" ==> must rebuild (force mode)")
return True
# check if the destination existed:
if dst != "" \
and dst != None \
and os.path.exists(dst) == False:
debug.extreme_verbose(" ==> must rebuild (dst does not exist)")
debug.verbose(" ==> must rebuild (dst does not exist)")
return True
if src != "" \
and src != None \
@ -92,25 +92,25 @@ def need_re_build(dst, src, depend_file=None, file_cmd="", cmd_line="", force_id
and src != "" \
and src != None \
and os.path.getmtime(src) > os.path.getmtime(dst):
debug.extreme_verbose(" ==> must rebuild (source time greater)")
debug.verbose(" ==> must rebuild (source time greater)")
return True
if depend_file != "" \
and depend_file != None \
and os.path.exists(depend_file) == False:
debug.extreme_verbose(" ==> must rebuild (no depending file)")
debug.verbose(" ==> must rebuild (no depending file)")
return True
if file_cmd != "" \
and file_cmd != None:
if os.path.exists(file_cmd) == False:
debug.extreme_verbose(" ==> must rebuild (no commandLine file)")
debug.verbose(" ==> must rebuild (no commandLine file)")
return True
# check if the 2 cmd_line are similar :
file2 = open(file_cmd, "r")
first_and_unique_line = file2.read()
if first_and_unique_line != cmd_line:
debug.extreme_verbose(" ==> must rebuild (cmd_lines are not identical)")
debug.verbose(" ==> must rebuild (cmd_lines are not identical)")
debug.extreme_verbose(" ==> '" + cmd_line + "'")
debug.extreme_verbose(" ==> '" + first_and_unique_line + "'")
file2.close()
@ -124,35 +124,37 @@ def need_re_build(dst, src, depend_file=None, file_cmd="", cmd_line="", force_id
file = open(depend_file, "r")
for cur_line in file.readlines():
# normal file : end with : ": \\n"
cur_line = cur_line[:len(cur_line)-1]
cur_line_data = cur_line[:len(cur_line)-1]
# removing last \ ...
if cur_line[len(cur_line)-1:] == '\\' :
cur_line = cur_line[:len(cur_line)-1]
if cur_line_data[len(cur_line_data)-1:] == '\\' :
cur_line_data = cur_line_data[:len(cur_line_data)-1]
# remove white space :
#debug.verbose(" Line (read) : '" + cur_line + "'");
cur_line = cur_line.strip()
cur_line_data = cur_line_data.strip()
#debug.verbose(" Line (strip) : '" + cur_line + "'");
test_file=""
if cur_line[len(cur_line)-1:] == ':':
debug.extreme_verbose(" Line (no check (already done) : '" + cur_line + "'");
elif len(cur_line) == 0 \
or cur_line == '\\':
debug.extreme_verbose(" Line (Not parsed) : '" + cur_line + "'");
else:
test_file = cur_line
debug.extreme_verbose(" Line (might check) : '" + test_file + "'");
# really check files:
if test_file != "":
debug.extreme_verbose(" ==> test");
if False==os.path.exists(test_file):
debug.extreme_verbose(" ==> must rebuild (a dependency file does not exist)")
file.close()
return True
if os.path.getmtime(test_file) > os.path.getmtime(dst):
debug.extreme_verbose(" ==> must rebuild (a dependency file time is newer)")
file.close()
return True
for cur_line in cur_line_data.split(" "):
test_file=""
if cur_line[len(cur_line)-1:] == ':':
debug.extreme_verbose(" Line (no check (already done) : '" + cur_line + "'");
elif len(cur_line) == 0 \
or cur_line == '\\':
debug.extreme_verbose(" Line (Not parsed) : '" + cur_line + "'");
else:
test_file = cur_line
debug.extreme_verbose(" Line (might check) : '" + test_file + "'");
# really check files:
if test_file != "":
debug.extreme_verbose(" ==> test");
if False==os.path.exists(test_file):
debug.verbose(" ==> must rebuild (a dependency file does not exist) " + str(test_file))
file.close()
return True
if os.path.getmtime(test_file) > os.path.getmtime(dst):
debug.verbose(" ==> must rebuild (a dependency file time is newer)" + str(test_file))
file.close()
return True
# close the current file :
file.close()
# check the 2 files are identical:
@ -161,12 +163,12 @@ def need_re_build(dst, src, depend_file=None, file_cmd="", cmd_line="", force_id
size_src = _file_size(src)
size_dst = _file_size(dst)
if size_src != size_dst:
debug.extreme_verbose(" Force Rewrite not the same size size_src=" + str(size_src) + " != size_dest=" + str(size_dst))
debug.info(" Force Rewrite not the same size size_src=" + str(size_src) + " != size_dest=" + str(size_dst))
return True
data_src = _file_read_data(src, binary=True)
data_dst = _file_read_data(dst, binary=True)
if data_src != data_dst:
debug.extreme_verbose(" Force Rewrite not the same data")
debug.info(" Force Rewrite not the same data")
return True
debug.extreme_verbose(" ==> Not rebuild (all dependency is OK)")

View File

@ -86,6 +86,12 @@ def get_build_system_base_name():
global build_system_base_name
return build_system_base_name
gld_build_system_base_name = "GLD"
def get_gld_build_system_base_name():
global gld_build_system_base_name
return gld_build_system_base_name
print_pretty_mode=False
@ -112,17 +118,29 @@ def get_warning_mode():
global store_warning
return store_warning
ccache=False
ccache=False;
def set_ccache(val):
global ccache
global ccache;
if val == True:
ccache = True
ccache = True;
else:
ccache = False
ccache = False;
def get_ccache():
global ccache
return ccache
global ccache;
return ccache;
async_fail=False
def set_async_fail(val):
global async_fail;
if val == True:
async_fail = True;
else:
async_fail = False;
def get_async_fail():
global async_fail;
return async_fail;
def end_with(name, list):
for appl in list:

View File

@ -93,6 +93,7 @@ class HeritageList:
debug.extreme_verbose(" add: " + str(herit.name))
listHeritage.remove(herit)
self.list_heritage.append(copy.deepcopy(herit))
# check if nothing is include in heritage...
if currentHeritageSize == len(listHeritage):
debug.warning("Not resolve dependency between the library ==> can be a cyclic dependency !!!")
for herit in listHeritage:
@ -263,4 +264,3 @@ class heritage:
def __repr__(self):
return "{Heritage:" + str(self.name) + " depend on: " + str(reversed(self.depends)) + " ... }"

View File

@ -28,6 +28,7 @@ __start_macro_name="Macro_"
##
def import_path(path_list):
global __macro_list
gld_base = env.get_gld_build_system_base_name()
global_base = env.get_build_system_base_name()
debug.debug("TARGET: Init with Files list:")
for elem in path_list:
@ -37,15 +38,23 @@ def import_path(path_list):
# Remove .py at the end:
filename = filename[:-3]
# Remove global base name:
filename = filename[len(global_base):]
# Check if it start with the local patern:
if filename[:len(__start_macro_name)] != __start_macro_name:
debug.extreme_verbose("MACRO: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
if filename[:len(gld_base)] == gld_base:
filename = filename[len(gld_base):]
# Check if it start with the local patern:
if filename[:len(__start_macro_name)] != __start_macro_name:
debug.extreme_verbose("MACRO: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
continue
# Remove local patern
target_name = filename[len(__start_macro_name):]
debug.verbose("MACRO: Integrate: '" + target_name + "' from '" + elem + "'")
__macro_list.append([target_name, elem])
elif filename[:len(global_base)] == global_base:
filename = filename[len(global_base):]
# Check if it start with the local patern:
if filename[:len(__start_macro_name)] != __start_macro_name:
debug.extreme_verbose("MACRO: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
# Remove local patern
target_name = filename[len(__start_macro_name):]
debug.verbose("MACRO: Integrate: '" + target_name + "' from '" + elem + "'")
__macro_list.append([target_name, elem])
debug.verbose("New list MACRO: ")
for elem in __macro_list:
debug.verbose(" " + str(elem[0]))

File diff suppressed because it is too large Load Diff

770
lutin/moduleGLD.py Normal file
View File

@ -0,0 +1,770 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
import sys
import os
import copy
import inspect
import fnmatch
import json
# Local import
from . import host
from . import tools
from realog import debug
from . import heritage
from . import builder
from . import multiprocess
from . import image
from . import module
from . import license
from . import env
from warnings import catch_warnings
from xmlrpc.client import boolean
def get_module_type_availlable():
return [
'BINARY',
'BINARY_DYNAMIC',
'BINARY_STAND_ALONE',
'LIBRARY',
'LIBRARY_HEADER_ONLY',
'LIBRARY_DYNAMIC',
'LIBRARY_STATIC',
'PACKAGE',
'PREBUILD',
'DATA'
]
list_of_property_module=[
"type",
"sub-type",
"group-id",
"description",
"license",
"license-file",
"author",
"maintainer",
"version",
"version-id",
"code-quality",
"visibility-map",
"header-install-mode",
"package" # package is for specifie some right in LUTIN
];
list_of_element_ignored=[
"comment", # just to post a comment in the configuration files
"todo", # just to post a todo in the configuration files
];
list_of_element_availlable=[
"source",
"header",
"path",
"compilation-version",
"dependency",
"copy",
"flag",
"flag-export",
"compiler",
"mode",
"target",
"arch",
"bus-size", # todo
"sanity-compilation", # todo "isolate", "intricate", "*" permit to specify element to copy for the isolation mode. intricate is for current mode where everything is mixed together ...
"compilator"
];
"""
{
"type":"LIBRARY",
"group-id":"com.atria-soft",
"description":"Ewol tool kit (base: container)",
"license":"MPL-2",
"license-file":"file://license.txt",
"maintainer":"Edouard DUPIN <yui.heero@gmail.com>",
"author":"file://../authors.txt",
"version":"1.5.3",
"__version":"file://../version.txt",
"code-quality":"MEDIUM",
"mode": {
"*": {
"target": {
"*": {
"arch": {
"*": {},
}
},
},
"arch": {}
},
"release": {
},
"debug": {
}
},
"source": [
{
"source":"xxx/plop.cpp",
"flag":[
...
]
},
"xxx/Yyy.cpp",
"xxx/YuyTer.cpp"
"xxx/plouf.java"
],
"source": { # this is the canocical mode ==> mermit to separate the language, otherwise this is auto-detection mode ...
"*": [
...
],
"c": [
...
],
"c++": [
...
],
"nasm": [
...
],
"java": [
...
],
"javah": [
...
] ...
},
"visibility-map": "libbsd/src/libbsd.map", # for application that need to control the visibility of the exports: -Wl,--version-script=/src/libbsd.map"
"header-install-mode": "AFTER", # or "BEFORE"<< default is before ==> better to isolate the include folder...
"header": [
"xxx/Yyy.hpp",
"xxx/YuyTer.hpp"
],
"header": { # this is the canocical mode ==> mermit to separate the language, otherwise this is auto-detection mode ...
"c": [
"xxx/Yyy.hpp",
"xxx/YuyTer.hpp"
]
},
"path":[
"."
],
"compilation-version": {
"c++": 2017, # -2017 for gnu17
"java": 16
},
"dependency": [
"c",
"m",
"pthread"
],
"copy":[
...
];
"mode": {
"*": {
},
"debug": {
},
"release": {
},
"coverage": {
}
},
"target": {
"*": {
},
"Android": {
"dependency": [
"SDK"
]
},
"MacOs": {
"dependency": [
"cxx"
]
},
"Windows": {
},
"Linux": {
"dependency": [
{
"name": "X11",
"optional": true,
"export": false,
"source": [
"gale/context/X11/Context.cpp"
],
"flag": {
"c++": "-DGALE_BUILD_X11"
},
"missing-flag": {
"c++": "-DGALE_DOES_NOT_BUILD_X11"
}
},
},
"Debian": { ## Debian/Ubuntu/Suze/RedHat/ArchLinux/Gento ... heritate from linux ...
},
"IOs": {
},
"Web": {
},
"MacOs|IOs": {
},
"comment": "For all element expect IOS and Android",
"!Android&!IOs": {
},
}
"flag": {
"c++": "-DGALE_BUILD_X11",
"c": [
"-DAPPL_VERSION={{{project.version}}}",
"-DAPPL_NAME={{{project.name}}}",
"-DAPPL_TYPE={{{project.type}}}",
"-DAPPL_PATH={{{project.path}}}"
]
},
"arch": {
"x86": {
},
"arm": {
},
"ppc": {
}
"misc": {
}
},
"bus-size": {
"*": {
},
"8": {
},
"16": {
},
"32": {
},
"64": {
},
"128": {
}
},
"compilator": {
"*": {
},
"gcc": {
},
"clang": {
},
"mingw": {
},
"msvc": {
},
"intel": {
}
},
"sanity-compilation": {
"*": {
},
"isolate": {
},
"intricate": {
}
},
"instruction-set":{
not present right now... :the instruction mode available...:
}
#### TODO: later
"import": [
"GDSFGH.json" ## import an other file to have generic definitions ...
]
}
get_compilator
get_mode
get_arch
"""
def check_compatible(mode, value, list_to_check, json_path):
if value == "":
debug.debug("the <" + mode + ">: condition '" + str(value) + "' empty element >> " + json_path);
return False;
if value == "*":
return True;
find_a_valid_key = False;
debug.verbose("Depact: " + value);
# fist step ==> split in the | value ==> multiple check cases
for elemOR in value.split("|"):
debug.verbose(" |: " + elemOR);
# check the condition is True:
condition = True;
if elemOR == "" or elemOR == " " or elemOR == "\t":
debug.warning("the <" + mode + ">: condition '" + str(value) + "' is not supported must not have ' ' or '\\t' or empty element >> " + json_path);
return False;
for elemAND in elemOR.split("&"):
debug.verbose(" &: " + elemAND);
if elemAND == "" or elemAND == " " or elemAND == "\t":
debug.warning("the <" + mode + ">: condition '" + str(value) + "' is not supported must not have ' ' or '\\t' or empty element >> " + json_path);
return False;
invert_condition = False;
if elemAND[0] == "!":
debug.verbose(" ==> invert condition");
invert_condition = True;
elemAND = elemAND[1:]
if elemAND in list_to_check:
debug.verbose(" ==> detect condition OK");
if invert_condition:
condition = False;
debug.verbose(" FALSE");
break;
else:
debug.verbose(" TRUE");
continue;
if invert_condition:
debug.verbose(" TRUE");
continue;
else:
condition = False;
debug.verbose(" FALSE");
break;
if condition:
debug.verbose(" Detect OR condition at TRUE !!!!");
find_a_valid_key = True;
break;
if find_a_valid_key:
return True;
"""
if "|" in value:
debug.warning("in <" + mode + ">: '" + str(value) + " not managed '|' >> " + json_path);
return False;
if "&" in value:
debug.warning("in <" + mode + ">: '" + str(value) + " not managed '&' >> " + json_path);
return False;
if "!" in value:
debug.warning("in <" + mode + ">: '" + str(value) + " not managed '!' >> " + json_path);
return False;
if value in list_to_check or value == "*":
return True;
"""
debug.debug("the <" + mode + ">: '" + str(value) + "' is not compatible with '" + str(list_to_check) + "' >> " + json_path);
return False;
def replace_dynamic_tags(my_module, data):
out = data;
out = out.replace("{{{project.version}}}", tools.version_to_string(my_module.get_version()));
out = out.replace("{{{project.name}}}", my_module.get_name());
out = out.replace("{{{project.type}}}", my_module.get_type());
out = out.replace("{{{project.path}}}", my_module.get_origin_path());
out = out.replace("{{{quote}}}", "\\'");
out = out.replace("{{{quote2}}}", "\\\""); # "
return out;
def parse_node_arch(target, path, json_path, my_module, data):
for elem in data.keys():
if check_compatible("arch", elem, target.get_arch(), json_path):
parse_node_generic(target, path, json_path, my_module, data[elem]);
def parse_node_mode(target, path, json_path, my_module, data):
for elem in data.keys():
if check_compatible("mode", elem, target.get_mode(), json_path):
parse_node_generic(target, path, json_path, my_module, data[elem]);
def parse_node_platform(target, path, json_path, my_module, data):
for elem in data.keys():
if check_compatible("target", elem, target.get_type(), json_path):
parse_node_generic(target, path, json_path, my_module, data[elem]);
def parse_node_flag(target, path, json_path, my_module, data, export = False):
if type(data) != dict:
debug.error("Can not parseflag other than dictionnary in: " + str(json_path));
for elem in data.keys():
if type(data[elem]) == list:
tmp = []
for elenFlag in data[elem]:
tmp.append(replace_dynamic_tags(my_module, elenFlag));
my_module.add_flag(elem, tmp, export);
elif type(data[elem]) == str:
my_module.add_flag(elem, replace_dynamic_tags(my_module, data[elem]), export);
else:
debug.error("not manage list of flag other than string and list of string, but it is " + str(type(data[elem])) + " in: '" + str(json_path) + "' for: " + str(data));
def parse_node_header_dict(target, path, json_path, my_module, data, builder_name = None):
if "path" in data.keys() or "to" in data.keys() or "recursive" in data.keys() or "filter" in data.keys():
#{'path': 'thirdparty/src/', 'filter': '*.h', 'to': 'g3log'}
elem_path = "";
elem_to = "";
elem_recursive = True;
elem_filter = "*"
if "path" in data:
elem_path = data["path"];
if "to" in data:
elem_to = data["to"];
if "recursive" in data:
elem_recursive = data["recursive"];
if "filter" in data:
elem_filter = data["filter"];
if elem_path == "":
debug.error("header does not support type of dict: " + str(data) + " ==> missing 'path'")
my_module.add_header_path(elem_path, regex=elem_filter, clip_path=None, recursive=elem_recursive, destination_path=elem_to, builder_name=builder_name);
else:
for builder_key in data.keys():
my_module.add_header_file(data[builder_key], builder_name=builder_key);
def parse_node_header_list(target, path, json_path, my_module, data, builder_name = None):
for elem in data:
if type(elem) == list or type(elem) == str:
my_module.add_header_file(elem, builder_name = builder_name);
elif type(elem) == dict:
parse_node_header_dict(target, path, json_path, my_module, elem, builder_name);
else:
debug.error("headers does not manage other than string, list and object");
def parse_node_header(target, path, json_path, my_module, data, builder_name = None):
if type(data) == str:
my_module.add_header_file(data, builder_name = builder_name);
if type(data) == list:
parse_node_header_list(target, path, json_path, my_module, data, builder_name);
elif type(data) == dict:
parse_node_header_dict(target, path, json_path, my_module, data, builder_name);
else:
debug.error("Wrong type for node 'headers' [] or {}");
def parse_node_generic(target, path, json_path, my_module, data, first = False ):
for elem in data.keys():
if elem in list_of_property_module:
if first == True:
continue;
else:
debug.error("key: '" + elem + "' is NOT allowed at expect in the root node: " + json_path);
continue;
if elem in list_of_element_ignored:
continue;
if elem not in list_of_element_availlable:
debug.warning("key: '" + elem + "' is unknown: " + json_path);
debug.warning("Available List: " + str(list_of_element_ignored) + " or: " + str(list_of_element_availlable));
if "source" in data.keys():
if type(data["source"]) == str:
my_module.add_src_file(data["source"]);
elif type(data["source"]) == list:
my_module.add_src_file(data["source"]);
elif type(data["source"]) == dict:
for builder_key in data["source"].keys():
my_module.add_src_file(data["source"][builder_key], builder_name=builder_key);
else:
debug.error("'" + json_path + "'Wrong type for node 'source' [] or {} or string");
if "header" in data.keys():
parse_node_header(target, path, json_path, my_module, data["header"]);
if "path" in data.keys():
if type(data["path"]) == list:
my_module.add_path(data["path"]);
elif type(data["path"]) == dict:
for key in data["path"]:
my_module.add_path(data["path"][key], type = key);
else:
debug.error("Wrong type for node 'path' [] or {}");
if "dependency" in data.keys():
if type(data["dependency"]) == list:
for elem in data["dependency"]:
GLD_add_depend(my_module, elem);
elif type(data["dependency"]) == str:
GLD_add_depend(my_module, data["dependency"]);
elif type(data["dependency"]) == dict:
GLD_add_depend(my_module, data["dependency"]);
else:
debug.error("Wrong type for node 'dependency' [] or {} or \"\"");
if "compilation-version" in data.keys():
if type(data["compilation-version"]) == dict:
GLD_compile_version(my_module, data["compilation-version"]);
else:
debug.error("Wrong type for node 'compilation-version' {'??lang??':1234}");
if "copy" in data.keys():
if type(data["copy"]) == list:
for elem in data["copy"]:
GLD_copy(my_module, elem);
elif type(data["copy"]) == dict:
GLD_copy(my_module, data["copy"]);
else:
debug.error("Wrong type for node 'dependency' []");
if "arch" in data.keys():
parse_node_arch(target, path, json_path, my_module, data["arch"]);
if "target" in data.keys():
parse_node_platform(target, path, json_path, my_module, data["target"]);
if "mode" in data.keys():
parse_node_mode(target, path, json_path, my_module, data["mode"]);
if "flag" in data.keys():
parse_node_flag(target, path, json_path, my_module, data["flag"], False);
if "flag-export" in data.keys():
parse_node_flag(target, path, json_path, my_module, data["flag-export"], True);
def load_module_from_GLD(target, name, path, json_path):
debug.debug("Parse file: "+ json_path + "'");
try:
data = json.load(open(json_path,))
except json.decoder.JSONDecodeError as ex:
debug.error("Can not parse the file : "+ json_path + " Detect error as : " + str(ex));
property = get_module_option_GLD(path, data, name)
# create the module:
my_module = module.Module(json_path, name, property["type"])
# debug.warning("plopppp " + json.dumps(property, sort_keys=True, indent=4))
# overwrite some package default property (if not set by user)
if property["compagny-type"] != None:
my_module._pkg_set_if_default("COMPAGNY_TYPE", property["compagny-type"])
if property["compagny-name"] != None:
my_module._pkg_set_if_default("COMPAGNY_NAME", property["compagny-name"])
if property["maintainer"] != None:
my_module._pkg_set_if_default("MAINTAINER", property["maintainer"])
if property["name"] != None:
my_module._pkg_set_if_default("NAME", property["name"])
if property["description"] != None:
my_module._pkg_set_if_default("DESCRIPTION", property["description"])
if property["license"] != None:
my_module._pkg_set_if_default("LICENSE", property["license"])
if property["version"] != None:
my_module._pkg_set_if_default("VERSION", property["version"])
if "visibility-map" in data.keys():
if type(data["visibility-map"]) == str:
my_module.set_visibility_map(data["visibility-map"]);
else:
debug.warning("can not support for element: 'visibility-map' must be a string (representing a file)");
if "header-install-mode" in data.keys():
if data["header-install-mode"] == "AFTER":
my_module.set_include_header_after(True);
elif data["header-install-mode"] == "BEFORE":
my_module.set_include_header_after(False);
else:
debug.warning("can not support for element: 'header-install-mode' other value than [BEFORE,AFTER]");
if "code-quality" in data.keys():
if data["code-quality"] in ["LOW","MEDIUM","HARD","PROFESSIONAL"]:
my_module.set_code_quality(data["code-quality"]);
else:
debug.warning("Does not support other level than [LOW, MEDIUM, HARD, PROFESSIONAL]");
# parsing all the file to configure:
parse_node_generic(target, path, json_path, my_module, data, True);
return my_module
def GLD_add_depend(my_module, data):
if type(data) == str:
my_module.add_depend(data);
elif type(data) == dict:
if "name" in data.keys():
name = data["name"];
else:
debug.error("Can not have dependency without name ...");
optional = False;
if "optional" in data.keys():
if type(data["optional"]) == boolean:
optional = data["optional"];
else:
debug.error("Can not have dependency 'optional' in an other type than boolean ...");
export = False;
if "export" in data.keys():
if type(data["export"]) == boolean:
optional = data["export"];
else:
debug.error("Can not have dependency 'export' in an other type than boolean ...");
flags_data = None;
if "flag" in data.keys():
for elem in data["flag"].keys():
flags_data = [elem, data["flag"][elem]]
missing_flags_data = None;
if "missing-flag" in data.keys():
if "language" in data["missing-flag"].keys() and "value" in data["missing-flag"].keys():
missing_flags_data = [data["missing-flag"]["language"], data["missing-flag"]["value"]]
else:
debug.error("Can not have dependency 'missing-flag' without value 'language' and 'value' ...");
src_file=[]
if "source" in data.keys():
if type(data["source"]) == list:
src_file = data["source"];
elif type(data["source"]) == str:
src_file = [ data["source"] ];
else:
debug.error("Can not have dependency 'source' in an other type than [] or string: '" + str(data["source"]) + "'");
header_file=[]
if "header" in data.keys():
if type(data["header"]) == list:
header_file = data["header"];
elif type(data["header"]) == str:
header_file = [ data["header"] ];
else:
debug.error("Can not have dependency 'header' in an other type than [] or string: '" + str(data["header"]) + "'");
compiler={}
if "compiler" in data.keys():
if type(data["compiler"]) == dict:
compiler = data["compiler"];
else:
debug.error("Can not have dependency 'compiler' in an other type than {}: '" + str(data["compiler"]) + "'");
if optional == False:
my_module.add_depend(name);
my_module.add_header_file(header_file);
my_module.add_src_file(src_file);
# TODO: add flags
else:
my_module.add_optionnal_depend(name, flags_data, export=export, compilation_flags_not_found = missing_flags_data, src_file=src_file, header_file=header_file)
else:
debug.error("dependency only support [ {} or string ]");
def GLD_compile_version(my_module, data):
for elem in data.keys():
if data[elem] < 0:
my_module.compile_version(elem, -data[elem], gnu=True)
else:
my_module.compile_version(elem, data[elem])
def GLD_copy(my_module, data):
try:
if type(data) == dict:
path_src = None;
file_src = None;
path_to = "";
group_folder = "in-shared";
recursive = False;
if "path" in data.keys():
path_src = data["path"];
if "group" in data.keys():
group_folder = data["group"];
if "file" in data.keys():
file_src = data["file"];
if "to" in data.keys():
path_to = data["to"];
if "recursive" in data.keys():
if type(data["recursive"]) == bool:
recursive = data["recursive"];
else:
debug.error("recursive is a boolean !!!");
if path_src == None and file_src == None:
debug.error("copy must at least have 'path' or 'file' !!!");
if path_src != None:
my_module.copy_path(path_src, path_to, group_folder=group_folder);
if file_src != None:
my_module.copy_file(file_src, path_to, group_folder=group_folder);
elif type(data) == str:
my_module.copy_file(data, "", group_folder=group_folder);
else:
debug.error("in module : " + my_module.get_name() + " not supported type for copy: " + type(data) + " string or object data=" + str(data));
except Exception as e:
debug.warning("in module : " + my_module.get_name());
raise e;
def get_module_option_GLD(path, data, name):
type = None;
if "type" in data.keys():
type = data["type"];
# TODO: check it is in a list ...
else:
debug.error(" the node 'type' must be provided in the module: " + name);
sub_type = None
if "sub-type" in data.keys():
sub_type = data["sub-type"];
compagny_type = None;
compagny_name = None;
group_id = None;
if "group-id" in data.keys():
compagny_name = data["group-id"];
group_id = data["group-id"];
description = None;
if "description" in data.keys():
description = data["description"];
license = None;
if "license" in data.keys():
license = data["license"];
license_file = None;
if "license-file" in data.keys():
license_file = data["license-file"];
maintainer = None;
if "author" in data.keys():
maintainer = tools.get_maintainer_from_file_or_direct(path, data["author"]);
version = None;
if "version" in data.keys():
version = tools.get_version_from_file_or_direct(path, data["version"]);
version_id = None;
if "version-id" in data.keys():
version_id = data["version-id"];
# check type property:
if type not in get_module_type_availlable():
debug.error("Does not support module type: '" + str(type) + "' not in " + str(get_module_type_availlable()) + " path: " + str(path));
list_sub_type = ["TEST", "SAMPLE", "TOOL", None];
if sub_type not in list_sub_type:
debug.error("Does not support module sub-type: '" + str(sub_type) + "' not in " + str(list_sub_type) + " path: " + str(path));
return {
"name":name,
"description":description,
"type":type,
"sub-type":sub_type,
"license":license,
"license-file":license_file,
"compagny-type":compagny_type,
"compagny-name":compagny_name,
"group-id":group_id,
"maintainer":maintainer,
"version":version,
"version-id":version_id,
}

View File

@ -61,6 +61,30 @@ def run_command_no_lock_out(cmd_line):
# launch the subprocess:
p.communicate()
def run_command_pwd(cmd_line, cwd):
# prepare command line:
args = shlex.split(cmd_line)
debug.verbose("cmd = " + str(args))
try:
# create the subprocess
"""
if cwd != None:
debug.info("path = " + cwd)
"""
p = subprocess.Popen(args, cwd=cwd);
except subprocess.CalledProcessError as e:
debug.error("subprocess.CalledProcessError : " + str(args))
except Exception as eee:
debug.warning("On : " + str(args))
debug.error("Exception: " + str(eee))
except:
print("Unexpected error:", sys.exc_info()[0])
raise
p.communicate();
# Check error :
return p.returncode
##
## @brief Execute the command and ruturn generate data
##

View File

@ -203,38 +203,45 @@ __start_system_name="System_"
##
def import_path(path_list):
global __system_list
gld_base = env.get_gld_build_system_base_name()
global_base = env.get_build_system_base_name()
debug.debug("SYSTEM: Init with Files list:")
for elem in path_list:
sys.path.append(os.path.dirname(elem))
# Get file name:
filename = os.path.basename(elem)
# Remove .py at the end:
filename = filename[:-3]
# Remove global base name:
filename = filename[len(global_base):]
# Check if it start with the local patern:
if filename[:len(__start_system_name)] != __start_system_name:
debug.extreme_verbose("SYSTEM: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
if filename[:len(gld_base)] == gld_base:
filename = filename[len(gld_base):-5]
# Check if it start with the local patern:
if filename[:len(__start_system_name)] != __start_system_name:
debug.extreme_verbose("SYSTEM: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
continue
# Remove local patern
system_name = filename[len(__start_system_name):]
system_type, system_name = system_name.split('_')
debug.verbose("SYSTEM: Integrate: '" + system_type + "':'" + system_name + "' from '" + elem + "'")
if system_type in __system_list:
__system_list[system_type].append({"name":system_name,
"path":elem,
"system":None,
"loaded":False,
"exist":False,
"module":None})
else:
__system_list[system_type] = [{"name":system_name,
"path":elem,
"system":None,
"loaded":False,
"exist":False,
"module":None}]
elif filename[:len(global_base)] == global_base:
filename = filename[len(global_base):-3]
# Check if it start with the local patern:
if filename[:len(__start_system_name)] != __start_system_name:
debug.extreme_verbose("SYSTEM: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
# Remove local patern
system_name = filename[len(__start_system_name):]
system_type, system_name = system_name.split('_')
debug.verbose("SYSTEM: Integrate: '" + system_type + "':'" + system_name + "' from '" + elem + "'")
if system_type in __system_list:
__system_list[system_type].append({"name":system_name,
"path":elem,
"system":None,
"loaded":False,
"exist":False,
"module":None})
else:
__system_list[system_type] = [{"name":system_name,
"path":elem,
"system":None,
"loaded":False,
"exist":False,
"module":None}]
debug.verbose("New list system: ")
for elem in __system_list:
debug.verbose(" " + str(elem))

View File

@ -109,7 +109,7 @@ class Target:
if "debug" == self.config["mode"]:
self.add_flag("c", [
"-g",
"-DDEBUG"
#"-DDEBUG"
])
if env.get_force_optimisation() == False:
self.add_flag("c", "-O0")
@ -117,7 +117,7 @@ class Target:
self.add_flag("c", "-O3")
else:
self.add_flag("c", [
"-DNDEBUG",
#"-DNDEBUG",
"-O3"
])
@ -144,6 +144,8 @@ class Target:
self.path_bin="bin"
self.path_lib="lib"
self.path_data="share"
self.path_data_in_shared="data_share"
self.path_data_in_bin="data_bin"
self.path_doc="doc"
self.path_include="include"
self.path_temporary_generate="generate"
@ -444,10 +446,15 @@ class Target:
## @brief Get the data path where pre-write the install "data" files
## @param[in] self (handle) Class handle
## @param[in] name (string) Name of the module
## @param[in] group (enum [in-bin, in-shared]) Group of the destination
## @return (string) The path
##
def get_build_path_data(self, name):
return os.path.join(self.get_build_path(name), self.path_data, name)
def get_build_path_data(self, name, group):
if group == "in-shared":
return os.path.join(self.get_build_path(name), self.path_data_in_shared, name)
elif group == "in-bin":
return os.path.join(self.get_build_path(name), self.path_data_in_bin, name)
debug.error("wrong type (impossible case...)")
##
## @brief Get the include path where pre-install "include" headers files
@ -519,10 +526,15 @@ class Target:
## @brief Get the data path for staging step
## @param[in] self (handle) Class handle
## @param[in] name (string) Name of the package
## @param[in] group (enum [in-bin, in-shared]) Group of the destination
## @return (string) The path
##
def get_staging_path_data(self, name, tmp=False):
return os.path.join(self.get_staging_path(name, tmp), self.path_data, name)
def get_staging_path_data(self, name, tmp=False, group = "in-shared"):
if group == "in-shared":
return os.path.join(self.get_staging_path(name, tmp), self.path_data, name)
elif group == "in-bin":
return os.path.join(self.get_staging_path(name, tmp), self.path_bin, name)
debug.error("wrong type (impossible case...)")
##
## @brief Get the include path for staging step
@ -645,6 +657,7 @@ class Target:
## @return (None|Module handle| ...) complicated return ...
##
def build(self, name, optionnal=False, actions=[], package_name=None):
debug.extreme_verbose("call build ... " + str(name));
if len(name.split("?")) != 1\
or len(name.split("@")) != 1:
debug.error("need update")
@ -711,120 +724,145 @@ class Target:
for mod in self.module_list:
mod.clean(self)
else:
module_name = name
action_list = actions
for action_name in action_list:
debug.verbose("requested : " + module_name + "?" + action_name + " [START]")
ret = None;
if action_name == "install":
try:
self.install_package(module_name)
except AttributeError:
debug.error("target have no 'install_package' instruction")
elif action_name == "uninstall":
try:
self.un_install_package(module_name)
except AttributeError:
debug.error("target have no 'un_install_package' instruction")
elif action_name[:3] == "run":
"""
if mod.get_type() != "BINARY" \
and mod.get_type() != "PACKAGE":
debug.error("Can not run other than 'BINARY' ... pakage='" + mod.get_type() + "' for module='" + module_name + "'")
"""
bin_name = None
if len(action_name) > 3:
if action_name[3] == '%':
bin_name = ""
for elem in action_name[4:]:
if elem == ":":
break;
bin_name += elem
# we have option:
action_name2 = action_name.replace("\:", "1234COLUMN4321")
option_list = action_name2.split(":")
if len(option_list) == 0:
if bin_name != None:
debug.warning("action 'run' wrong options options ... : '" + action_name + "' might be separate with ':'")
option_list = []
if name.find("*") != -1:
list_of_all_element = module.list_filtered_module(name);
else:
list_of_all_element = [name]
global_run_error = [];
for module_name in list_of_all_element:
action_list = actions
for action_name in action_list:
debug.verbose("requested : " + module_name + "?" + action_name + " [START]")
ret = None;
if action_name == "install":
try:
self.install_package(module_name)
except AttributeError:
debug.error("target have no 'install_package' instruction")
elif action_name == "uninstall":
try:
self.un_install_package(module_name)
except AttributeError:
debug.error("target have no 'un_install_package' instruction")
elif action_name[:3] == "run":
sub_action_name = action_name[3:];
"""
if mod.get_type() != "BINARY" \
and mod.get_type() != "PACKAGE":
debug.error("Can not run other than 'BINARY' ... pakage='" + mod.get_type() + "' for module='" + module_name + "'")
"""
bin_name = None
if len(sub_action_name) > 0:
if sub_action_name[0] == '%':
bin_name = ""
for elem in sub_action_name[1:]:
if elem == ":":
break;
bin_name += elem
# we have option:
action_name2 = action_name.replace("\:", "1234COLUMN4321")
option_list = action_name2.split(":")
if len(option_list) == 0:
if bin_name != None:
debug.warning("action 'run' wrong options options ... : '" + action_name + "' might be separate with ':'")
option_list = []
else:
option_list = []
ret_value = self.run(module_name, option_list, bin_name);
else:
option_list_tmp = option_list[1:]
option_list = []
for elem in option_list_tmp:
option_list.append(elem.replace("1234COLUMN4321", ":"))
#try:
ret_value = self.run(module_name, option_list, bin_name)
if not env.get_async_fail():
if ret_value != 0:
debug.error("FAIL in execute process : '" + str(module_name) + "' ==> bin name='" + str(bin_name) + "' with option: " + str(option_list) + " RETURN value: " + str(ret));
else:
if ret_value != 0:
debug.warning("FAIL execute process : '" + str(module_name) + "' ==> bin name='" + str(bin_name) + "' with option: " + str(option_list) + " RETURN value: " + str(ret));
global_run_error.append({
"module": module_name,
"bin": bin_name,
"options": option_list,
"return": ret_value,
"type": "Execution Fail ..."
})
else:
global_run_error.append({
"module": module_name,
"bin": bin_name,
"options": option_list,
"return": ret_value,
"type": "Execution OK ..."
})
#except AttributeError:
# debug.error("target have no 'run' instruction")
elif action_name == "log":
try:
self.show_log(module_name)
except AttributeError:
debug.error("target have no 'show_log' instruction")
else:
option_list = []
#try:
self.run(module_name, option_list, bin_name)
#except AttributeError:
# debug.error("target have no 'run' instruction")
elif action_name == "log":
try:
self.show_log(module_name)
except AttributeError:
debug.error("target have no 'show_log' instruction")
else:
present = self.load_if_needed(module_name, optionnal=optionnal)
if present == False \
and optionnal == True:
ret = [heritage.HeritageList(), False]
present = self.load_if_needed(module_name, optionnal=optionnal)
if present == False \
and optionnal == True:
ret = [heritage.HeritageList(), False, []]
else:
for mod in self.module_list:
debug.verbose("compare " + mod.get_name() + " == " + module_name)
if mod.get_name() == module_name:
if action_name[:4] == "dump":
debug.info("dump module '" + module_name + "'")
if len(action_name) > 4:
debug.warning("action 'dump' does not support options ... : '" + action_name + "'")
ret = mod.display()
break
elif action_name[:5] == "clean":
debug.info("clean module '" + module_name + "'")
if len(action_name) > 5:
debug.warning("action 'clean' does not support options ... : '" + action_name + "'")
ret = mod.clean(self)
break
elif action_name[:4] == "gcov":
debug.debug("gcov on module '" + module_name + "'")
if len(action_name) > 4:
# we have option:
option_list = action_name.split(":")
if len(option_list) == 0:
debug.warning("action 'gcov' wrong options options ... : '" + action_name + "' might be separate with ':'")
option_list = []
if "output" in option_list:
ret = mod.gcov(self, generate_output=True)
else:
ret = mod.gcov(self, generate_output=False)
break
elif action_name[:5] == "build":
if len(action_name) > 5:
debug.warning("action 'build' does not support options ... : '" + action_name + "'")
debug.debug("build module '" + module_name + "'")
ret = [mod.build(self, package_name), True, []]
break
# at the end of the build selected...
if optionnal == True \
and ret == None:
ret = [heritage.HeritageList(), False, []]
break
if ret == None:
debug.error("not know module name : '" + module_name + "' to '" + action_name + "' it")
debug.verbose("requested : " + module_name + "?" + action_name + " [STOP]")
#display errors
if len(global_run_error) != 0:
pass;
else:
for mod in self.module_list:
debug.verbose("compare " + mod.get_name() + " == " + module_name)
if mod.get_name() == module_name:
if action_name[:4] == "dump":
debug.info("dump module '" + module_name + "'")
if len(action_name) > 4:
debug.warning("action 'dump' does not support options ... : '" + action_name + "'")
ret = mod.display()
break
elif action_name[:5] == "clean":
debug.info("clean module '" + module_name + "'")
if len(action_name) > 5:
debug.warning("action 'clean' does not support options ... : '" + action_name + "'")
ret = mod.clean(self)
break
elif action_name[:4] == "gcov":
debug.debug("gcov on module '" + module_name + "'")
if len(action_name) > 4:
# we have option:
option_list = action_name.split(":")
if len(option_list) == 0:
debug.warning("action 'gcov' wrong options options ... : '" + action_name + "' might be separate with ':'")
option_list = []
else:
option_list = option_list[1:]
else:
option_list = []
if "output" in option_list:
ret = mod.gcov(self, generate_output=True)
else:
ret = mod.gcov(self, generate_output=False)
break
elif action_name[:5] == "build":
if len(action_name) > 5:
debug.warning("action 'build' does not support options ... : '" + action_name + "'")
debug.debug("build module '" + module_name + "'")
if optionnal == True:
ret = [mod.build(self, package_name), True]
else:
ret = mod.build(self, package_name)
break
if optionnal == True \
and ret == None:
ret = [heritage.HeritageList(), False]
break
if ret == None:
debug.error("not know module name : '" + module_name + "' to '" + action_name + "' it")
debug.verbose("requested : " + module_name + "?" + action_name + " [STOP]")
if len(action_list) == 1:
return ret
if len(action_list) == 1 and len(list_of_all_element) == 1:
return ret;
# end of all element processing...
return [None, False, global_run_error];
##
## @brief Add action to do for package specific part when build upper element
## @param[in] name_of_state (string) a state to call action
## - BINARY
## - BINARY_SHARED
## - BINARY_DYNAMIC
## - BINARY_STAND_ALONE
## - LIBRARY
## - LIBRARY_DYNAMIC
@ -870,7 +908,7 @@ class Target:
elif module.get_type() == 'BINARY' \
or module.get_type() == 'BINARY_STAND_ALONE':
self.make_package_binary(pkg_name, pkg_properties, base_pkg_path, heritage_list, static = True)
elif module.get_type() == 'BINARY_SHARED':
elif module.get_type() == 'BINARY_DYNAMIC':
self.make_package_binary(pkg_name, pkg_properties, base_pkg_path, heritage_list, static = False)
elif module.get_type() == 'PACKAGE':
self.make_package_binary(pkg_name, pkg_properties, base_pkg_path, heritage_list, static = False)
@ -899,7 +937,7 @@ class Target:
debug.debug("heritage for " + str(pkg_name) + ":")
for heritage in heritage_list.list_heritage:
debug.debug("sub elements: " + str(heritage.name))
path_src = self.get_build_path_data(heritage.name)
path_src = self.get_build_path_data(heritage.name, group="in-shared")
debug.verbose(" has directory: " + path_src)
if os.path.isdir(path_src):
if static == True:
@ -991,6 +1029,29 @@ class Target:
except:
debug.extreme_verbose("can not find : " + path_src)
pass
# copy data for the data inside bin (stupid, but needed)
debug.debug("heritage for " + str(pkg_name) + ":")
for heritage in heritage_list.list_heritage:
debug.debug("sub elements: " + str(heritage.name))
path_src = self.get_build_path_data(heritage.name, group="in-bin")
if os.path.isdir(path_src):
if static == True:
debug.debug(" need copy: " + path_src + " to " + path_package_bin)
#copy all data:
tools.copy_anything(path_src,
path_package_bin,
recursive=True,
force_identical=True,
in_list=copy_list)
else:
debug.debug(" need copy: " + os.path.dirname(path_src) + " to " + path_package_bin)
#copy all data:
tools.copy_anything(os.path.dirname(path_src),
path_package_bin,
recursive=True,
force_identical=True,
in_list=copy_list)
#real copy files
ret_copy = tools.copy_list(copy_list)
ret_remove = False
@ -1109,28 +1170,29 @@ class Target:
or ret_changelog
def install_package(self, pkg_name):
debug.debug("------------------------------------------------------------------------")
debug.info("-- Install package '" + pkg_name + "'")
debug.debug("------------------------------------------------------------------------")
debug.error("action not implemented ...")
debug.debug("------------------------------------------------------------------------");
debug.info("-- Install package '" + pkg_name + "'");
debug.debug("------------------------------------------------------------------------");
debug.error("action not implemented ...");
def un_install_package(self, pkg_name):
debug.debug("------------------------------------------------------------------------")
debug.info("-- Un-Install package '" + pkg_name + "'")
debug.debug("------------------------------------------------------------------------")
debug.error("action not implemented ...")
debug.debug("------------------------------------------------------------------------");
debug.info("-- Un-Install package '" + pkg_name + "'");
debug.debug("------------------------------------------------------------------------");
debug.error("action not implemented ...");
def run(self, pkg_name, option_list, binary_name = None):
debug.debug("------------------------------------------------------------------------")
debug.info("-- Run package '" + pkg_name + "' + option: " + str(option_list))
debug.debug("------------------------------------------------------------------------")
debug.error("action not implemented ...")
debug.debug("------------------------------------------------------------------------");
debug.info("-- Run package '" + pkg_name + "' + option: " + str(option_list));
debug.debug("------------------------------------------------------------------------");
debug.warning("action not implemented ...");
return -1;
def show_log(self, pkg_name):
debug.debug("------------------------------------------------------------------------")
debug.info("-- Show log logcat '" + pkg_name + "'")
debug.debug("------------------------------------------------------------------------")
debug.error("action not implemented ...")
debug.debug("------------------------------------------------------------------------");
debug.info("-- Show log logcat '" + pkg_name + "'");
debug.debug("------------------------------------------------------------------------");
debug.error("action not implemented ...");
##
## @brief convert a s list of string in a string separated by a ","
@ -1158,6 +1220,7 @@ __start_target_name="Target_"
##
def import_path(path_list):
global __target_list
gld_base = env.get_gld_build_system_base_name()
global_base = env.get_build_system_base_name()
debug.debug("TARGET: Init with Files list:")
for elem in path_list:
@ -1167,15 +1230,23 @@ def import_path(path_list):
# Remove .py at the end:
filename = filename[:-3]
# Remove global base name:
filename = filename[len(global_base):]
# Check if it start with the local patern:
if filename[:len(__start_target_name)] != __start_target_name:
debug.extreme_verbose("TARGET: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
if filename[:len(gld_base)] == gld_base:
filename = filename[len(gld_base):]
# Check if it start with the local patern:
if filename[:len(__start_target_name)] != __start_target_name:
debug.extreme_verbose("TARGET: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
continue
# Remove local patern
target_name = filename[len(__start_target_name):]
debug.verbose("TARGET: Integrate: '" + target_name + "' from '" + elem + "'")
__target_list.append([target_name, elem])
elif filename[:len(global_base)] == global_base:
filename = filename[len(global_base):]
# Check if it start with the local patern:
if filename[:len(__start_target_name)] != __start_target_name:
debug.extreme_verbose("TARGET: NOT-Integrate: '" + filename + "' from '" + elem + "' ==> rejected")
continue
# Remove local patern
target_name = filename[len(__start_target_name):]
debug.verbose("TARGET: Integrate: '" + target_name + "' from '" + elem + "'")
__target_list.append([target_name, elem])
debug.verbose("New list TARGET: ")
for elem in __target_list:
debug.verbose(" " + str(elem[0]))
@ -1187,7 +1258,7 @@ def load_target(name, config):
global __target_list
debug.debug("load target: " + name)
if len(__target_list) == 0:
debug.error("No target to compile !!!")
debug.error("No target to execute !!!")
debug.debug("list target: " + str(__target_list))
for mod in __target_list:
if mod[0] == name:

View File

@ -310,6 +310,17 @@ def filter_extention(list_files, extentions, invert=False):
out.append(file)
return out
def filter_map(input_data, extentions, invert=False):
out = {}
for key in input_data.keys():
if invert:
if key not in extentions:
out[key] = input_data[key];
else:
if key in extentions:
out[key] = input_data[key];
return out
def move_if_needed(src, dst):
if not os.path.isfile(src):
@ -417,6 +428,9 @@ def get_version_from_file_or_direct(path_module, filename_or_version):
if type(filename_or_version) == list:
return filename_or_version
# this use a version file
if filename_or_version[:7] == "file://":
filename_or_version = filename_or_version[7:];
# this use a version file
file_data = file_read_data(os.path.join(path_module, filename_or_version))
if len(file_data) == 0:
debug.warning("not enought data in the file version size=0 " + path_module + " / " + filename_or_version)
@ -455,6 +469,8 @@ def get_maintainer_from_file_or_direct(path_module, filename_or_author):
if type(filename_or_author) == list:
return filename_or_author
# this use a version file
if filename_or_author[:7] == "file://":
filename_or_author = filename_or_author[7:];
file_data = file_read_data(os.path.join(path_module, filename_or_author))
if len(file_data) == 0:
debug.warning("not enought data in the file author size=0 " + path_module + " / " + filename_or_author)

View File

@ -31,6 +31,12 @@ def init():
def get_type():
return "linker"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 1100
##
## @brief Get builder input file type

View File

@ -40,6 +40,13 @@ def get_type():
def get_input_type():
return ["cpp", "CPP", "cxx", "CXX", "xx", "XX", "CC", "cc"]
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 600
##
## @brief Get builder output file type
## @return List of extention supported
@ -152,11 +159,16 @@ def get_version_compilation_flags(flags, dependency_flags):
is_gnu = default_version_gnu
version = max(version_local, dependency_version)
if version == 2020:
if version == 2023:
if is_gnu == True:
out = ["-std=gnu++2a", "-D__CPP_VERSION__=2020"]
out = ["-std=gnu++23", "-D__CPP_VERSION__=2023"]
else:
out = ["-std=c++2a", "-D__CPP_VERSION__=2020"]
out = ["-std=c++23", "-D__CPP_VERSION__=2023"]
elif version == 2020:
if is_gnu == True:
out = ["-std=gnu++20", "-D__CPP_VERSION__=2020"]
else:
out = ["-std=c++20", "-D__CPP_VERSION__=2020"]
elif version == 2017:
if is_gnu == True:
out = ["-std=gnu++17", "-D__CPP_VERSION__=2017"]

View File

@ -34,6 +34,13 @@ def init():
def get_type():
return "compiler"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 400
##
## @brief Get builder input file type
## @return List of extention supported

View File

@ -31,6 +31,13 @@ def init():
def get_type():
return "linker"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 1200
##
## @brief Get builder input file type
## @return List of extention supported

View File

@ -29,6 +29,13 @@ def init():
def get_type():
return "compiler"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 800
##
## @brief Get builder input file type
## @return List of extention supported

View File

@ -29,6 +29,13 @@ def init():
def get_type():
return "compiler"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 100
##
## @brief Get builder input file type
## @return List of extention supported

View File

@ -31,6 +31,13 @@ def init():
def get_type():
return "linker"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 900
##
## @brief Get builder input file type
## @return List of extention supported
@ -56,7 +63,7 @@ def get_support_multithreading():
##
## @brief Commands for running gcc to link a shared library.
##
def link(file, binary, target, depancy, flags, name, basic_path, static=False):
def link(file, binary, target, depancy, flags, name, basic_path, static=False, visibility_file = None):
file_src = file
file_dst = target.get_build_file_dynamic(name)
file_depend = file_dst + target.suffix_dependence
@ -143,6 +150,9 @@ def link(file, binary, target, depancy, flags, name, basic_path, static=False):
cmd.append("-Wl,-R$ORIGIN/../lib/")
except:
pass
if visibility_file != None:
cmd.append("-Wl,--version-script=" + visibility_file);
for view in ["local", "export"]:
if view not in flags:
continue
@ -161,6 +171,8 @@ def link(file, binary, target, depancy, flags, name, basic_path, static=False):
and depend.need_re_package(file_dst, depancy.src, False, file_cmd, cmdLine) == False:
return file_dst
tools.create_directory_of_file(file_dst)
debug.print_element("SharedLib", name, "==>", os.path.relpath(file_dst))
multiprocess.run_command(cmdLine, store_output_file=file_warning)
# strip the output file:

View File

@ -31,6 +31,13 @@ def init():
def get_type():
return "linker"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 1000
##
## @brief Get builder input file type
## @return List of extention supported

View File

@ -35,6 +35,13 @@ def init():
def get_type():
return "compiler"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 500
##
## @brief Get builder input file type
## @return List of extention supported

View File

@ -35,6 +35,13 @@ def init():
def get_type():
return "compiler"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 700
##
## @brief Get builder input file type
## @return List of extention supported

View File

@ -0,0 +1,134 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
##
## C++ builder
##
from lutin import multiprocess
from lutin import tools
from realog import debug
from lutin import depend
from lutin import env
##
## Initialize the builder, if needed ... to get dependency between builder (for example)
##
def init():
pass
##
## Get the current builder type.
## Return the type of builder
##
def get_type():
return "compiler"
##
## @brief Get builder input file type
## @return List of extention supported
##
def get_input_type():
return [];#["s"]
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 200
##
## @brief Get builder output file type
## @return List of extention supported
##
def get_output_type():
return ["o"]
##
## @brief Get builder support multi-threading or not
## @return True Multithreading supported
## @return False Multithreading NOT supported
##
def get_support_multithreading():
return True
##
## @brief Commands for running gcc to compile a C++ file in object file.
##
def compile(file, binary, target, depancy, flags, path, name, basic_path, module_src):
file_src = target.get_full_name_source(basic_path, file)
file_cmd = target.get_full_name_cmd(name, basic_path, file)
file_dst = target.get_full_name_destination(name, basic_path, file, get_output_type())
file_depend = target.get_full_dependency(name, basic_path, file)
file_warning = target.get_full_name_warning(name, basic_path, file)
# set ccache interface:
compilator_ccache = ""
if env.get_ccache() == True:
compilator_ccache = "ccache"
# create the command line before requesting start:
cmd = [
compilator_ccache,
"nasm",
"-o", file_dst,
"-f", "elf64",
target.sysroot
]
for view in ["export", "local"]:
for type in ["nasm"]:
try:
cmd.append(tools.add_prefix("-I",path[view][type]))
except:
pass
for type in ["nasm"]:
try:
cmd.append(tools.add_prefix("-I",depancy.path[type]))
except:
pass
cmd.append(target.global_include_cc)
list_flags = [];
if "nasm" in target.global_flags:
list_flags.append(target.global_flags["nasm"])
for view in ["local", "export"]:
if view in flags:
for type in ["nasm"]:
if type in flags[view]:
list_flags.append(flags[view][type])
# get blacklist of flags
list_flags_blacklist = [];
if "nasm-remove" in target.global_flags:
list_flags_blacklist.append(target.global_flags["nasm-remove"])
for type in ["nasm-remove"]:
if type in depancy.flags:
list_flags_blacklist.append(depancy.flags[type])
for view in ["local", "export"]:
if view in flags:
for type in ["c-nasm"]:
if type in flags[view]:
list_flags_blacklist.append(flags[view][type])
# apply blacklisting of data and add it on the cmdLine
clean_flags = tools.remove_element(list_flags, list_flags_blacklist)
#debug.warning("plop " + str(list_flags_blacklist) + " " + str(list_flags) + " --> " + str(clean_flags) )
cmd.append(clean_flags);
cmd.append(["-DPIC"])
cmd.append(["-MP"])
cmd.append(file_src)
# Create cmd line
cmdLine = tools.list_to_str(cmd)
# check the dependency for this file :
if depend.need_re_build(file_dst, file_src, file_depend, file_cmd, cmdLine) == False:
return {"action":"add", "file":file_dst}
tools.create_directory_of_file(file_dst)
comment = ["nasm", name, "<==", file]
#process element
multiprocess.run_in_pool(cmdLine, comment, file_cmd, store_output_file=file_warning)
return {"action":"add", "file":file_dst}

View File

@ -29,24 +29,31 @@ def init():
def get_type():
return "compiler"
##
## @brief get the order of the current builder
## @return the string that define the build order
##
def get_order():
return 300
##
## @brief Get builder input file type
## @return List of extention supported
## @return List of extension supported
##
def get_input_type():
return ["s", "S"]
##
## @brief Get builder output file type
## @return List of extention supported
## @return List of extension supported
##
def get_output_type():
return ["o"]
##
## @brief Get builder support multi-threading or not
## @return True Multithreading supported
## @return False Multithreading NOT supported
## @return True Multi-threading supported
## @return False Multi-threading NOT supported
##
def get_support_multithreading():
return True

View File

@ -22,8 +22,8 @@ class System(system.System):
self.set_valid(True)
# no check needed ==> just add this:
self.add_depend(['c'])
self.add_flag('link-lib', 'Xv')
self.add_flag('link-lib', 'Xt')
#self.add_flag('link-lib', 'Xv')
#self.add_flag('link-lib', 'Xt')
self.add_flag('link-lib', 'X11')
if env.get_isolate_system() == True:
self.add_header_file([

View File

@ -0,0 +1,40 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("bz2 : ???")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "bsd")
self.add_depend([
'c'
])
"""
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/sys/mman.h",
"/usr/include/sys/stat.h"
],
clip_path="/usr/include",
recursive=False)
"""

View File

@ -0,0 +1,40 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("bz2 : ???")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "bz2")
self.add_depend([
'c'
])
"""
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/sys/mman.h",
"/usr/include/sys/stat.h"
],
clip_path="/usr/include",
recursive=False)
"""

View File

@ -30,7 +30,7 @@ class System(system.System):
self.add_flag("c++", "-D__STDCPP_GNU__")
if env.get_isolate_system() == False:
self.add_flag("c++-remove", "-nostdlib")
self.add_flag("need-libstdc++", True)
self.add_flag("need-libstdc++", True) # regarder a quoi ca sert !!!!
else:
self.add_flag("link-lib", "stdc++")
compilator_gcc = "g++"

View File

@ -22,7 +22,7 @@ class System(system.System):
self.set_valid(True)
# no check needed ==> just add this:
self.add_depend([
'khr',
# TODO: needed for wayland : 'khr',
])
self.add_flag('link-lib', 'EGL')
if env.get_isolate_system() == True:

View File

@ -0,0 +1,40 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("bz2 : ???")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "gnutls")
self.add_depend([
'c'
])
"""
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/sys/mman.h",
"/usr/include/sys/stat.h"
],
clip_path="/usr/include",
recursive=False)
"""

View File

@ -0,0 +1,40 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("ICU : ICU is a generic interface to manage multiple language model")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "icuuc")
self.add_flag("link-lib", "icui18n")
self.add_flag("link-lib", "icudata")
self.add_depend([
'c'
])
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/unicode/icuplug.h",
"/usr/include/unicode/icudataver.h"
],
clip_path="/usr/include",
recursive=False)

View File

@ -0,0 +1,35 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("lzma : ???")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "lzma")
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/lzma/lzma12.h",
"/usr/include/lzma.h"
],
clip_path="/usr/include",
recursive=False)

View File

@ -0,0 +1,35 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("python numpy library")
# check if the library exist:
for version in ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]:
if os.path.isdir("/usr/lib/python" + version + "/site-packages/numpy/core/include"):
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "python" + version);
if env.get_isolate_system() == True:
self.add_header_file(self, "/usr/lib/python" + version + "/site-packages/numpy/core/include/*", clip_path="/usr/lib/python" + version + "/site-packages/numpy/core/include/", recursive=True);
else:
self.add_path("/usr/lib/python" + version + "/site-packages/numpy/core/include/");
return;

View File

@ -0,0 +1,35 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("Python3 library \n Can be install with the package:\n - zlib1g-dev")
# check if the library exist:
for version in ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]:
if os.path.isdir("/usr/include/python" + version):
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "python" + version);
if env.get_isolate_system() == True:
self.add_header_file(self, "/usr/include/python" + version + "/*", clip_path="/usr/include/", recursive=True);
else:
self.add_path("/usr/include/python" + version + "/");
return;

View File

@ -0,0 +1,38 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("RT : rt access to the shared momory interface")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "rt")
self.add_depend([
'c'
])
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/sys/mman.h",
"/usr/include/sys/stat.h"
],
clip_path="/usr/include",
recursive=False)

View File

@ -0,0 +1,31 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("sodium : ???")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "sodium")
self.add_depend([
'c'
])

View File

@ -22,7 +22,7 @@ class System(system.System):
self.set_valid(True)
# no check needed ==> just add this:
self.add_depend(['c'])
self.add_flag('link-lib', 'xkbcommon')
#self.add_flag('link-lib', 'xkbcommon')
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/xkbcommon/*"

View File

@ -0,0 +1,35 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
##
## @copyright 2012, Edouard DUPIN, all right reserved
##
## @license MPL v2.0 (see license file)
##
from realog import debug
from lutin import system
from lutin import tools
from lutin import env
import os
class System(system.System):
def __init__(self, target):
system.System.__init__(self)
# create some HELP:
self.set_help("zstd : ???")
# No check ==> on the basic std libs:
self.set_valid(True)
# todo : create a searcher of the presence of the library:
self.add_flag("link-lib", "zstd")
if env.get_isolate_system() == True:
self.add_header_file([
"/usr/include/zstd_error.h",
"/usr/include/zstd.h"
],
clip_path="/usr/include",
recursive=False)

View File

@ -165,16 +165,18 @@ class Target(target.Target):
def run(self, pkg_name, option_list, binary_name = None):
if binary_name == None:
binary_name = pkg_name;
appl_path = os.path.join(self.get_staging_path(pkg_name), pkg_name + ".app", "bin")
debug.debug("------------------------------------------------------------------------")
debug.info("-- Run package '" + pkg_name + "' executable: '" + binary_name + "' + option: " + str(option_list))
debug.info("-- Run path (PWD) '" + str(appl_path))
debug.debug("------------------------------------------------------------------------")
appl_path = os.path.join(self.get_staging_path(pkg_name), pkg_name + ".app", "bin", binary_name)
cmd = appl_path + " "
cmd = os.path.join(appl_path, binary_name) + " ";
for elem in option_list:
cmd += elem + " "
multiprocess.run_command_no_lock_out(cmd)
cmd += elem + " ";
ret = multiprocess.run_command_pwd(cmd, appl_path);
debug.debug("------------------------------------------------------------------------")
debug.info("-- Run package '" + pkg_name + "' Finished")
debug.info("-- Run package '" + pkg_name + "' Finished ret=" + str(ret))
debug.debug("------------------------------------------------------------------------")
return ret;

View File

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/python3
# -*- coding: utf-8 -*-
##
## @author Edouard DUPIN
@ -9,14 +9,25 @@
##
from setuptools import setup
import os
def readme():
with open('README.rst') as f:
with open('README.md') as f:
return f.read()
def read_version_file():
if not os.path.isfile("version.txt"):
return ""
file = open("version.txt", "r")
data_file = file.read()
file.close()
if len(data_file) > 4 and data_file[-4:] == "-dev":
data_file = data_file[:-4]
return data_file
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
setup(name='lutin',
version='2.5.0',
version=read_version_file(),
description='Lutin generic builder (might replace makefile, CMake ...)',
long_description=readme(),
url='http://github.com/HeeroYui/lutin',
@ -26,13 +37,15 @@ setup(name='lutin',
packages=['lutin',
'lutin/z_builder',
'lutin/z_system',
'lutin/z_target'],
'lutin/z_target',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python',
'Topic :: Software Development :: Build Tools',
],
long_description_content_type="text/markdown",
keywords='builder c++ c android ios macos makefile cmake',
scripts=['bin/lutin'],
# Does not work on MacOs

1
version.txt Normal file
View File

@ -0,0 +1 @@
2.7.1-dev