diff -Nru lilv-0.24.4~dfsg0/AUTHORS lilv-0.24.6/AUTHORS --- lilv-0.24.4~dfsg0/AUTHORS 2015-11-14 19:49:15.000000000 +0000 +++ lilv-0.24.6/AUTHORS 2019-11-09 19:17:05.000000000 +0000 @@ -8,4 +8,7 @@ Stefano D'Angelo Plugin execution via Python bindings: - Kaspar Emanuel \ No newline at end of file + Kaspar Emanuel + +Python preset script: + Christopher Arndt diff -Nru lilv-0.24.4~dfsg0/bindings/lilv.i lilv-0.24.6/bindings/lilv.i --- lilv-0.24.4~dfsg0/bindings/lilv.i 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/bindings/lilv.i 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -%module lilv -%typedef unsigned uint32_t; -%{ -#define SWIG_FILE_WITH_INIT -#include "lilv/lilv.h" -#include "lilv/lilvmm.hpp" -%} - -%include "numpy.i" -%init %{ - import_array(); -%} -%apply (float* INPLACE_ARRAY1) {(void* data_location)} - -%feature("compactdefaultargs") %{ - lilv_plugin_get_num_ports_of_class; - get_num_ports_of_class; -%} -%varargs(3, LilvNode* node = NULL) lilv_plugin_get_num_ports_of_class; -%varargs(3, LilvNode* node = NULL) get_num_ports_of_class; -%typemap(in, numinputs=0) LilvNode *node3 ""; // Make sure it's NULL terminated - -%include "lilv/lilv.h" -%include "lilv/lilvmm.hpp" - -namespace Lilv { - -%extend Plugins { -%pythoncode %{ - def __iter__(self): - class Iterator(object): - def __init__(self, plugins): - self.plugins = plugins - self.iter = plugins.begin() - - def __next__(self): - if self.plugins.is_end(self.iter): - raise StopIteration - plugin = self.plugins.get(self.iter) - self.iter = self.plugins.next(self.iter) - return plugin - - def next(self): - "Python 2 iterator protocol" - return Iterator.__next__(self) - - return Iterator(self) - - def get_by_uri(self, *args): - """get_by_uri(self, LilvNode uri) -> PluginClass""" - ret = _lilv.Plugins_get_by_uri(self, *args) - if ret.me is None: - return None - else: - return ret -%} -}; - -%extend Node { -%pythoncode %{ - def __str__(self): - return self.get_turtle_token() -%} -}; - -} /* namespace Lilv */ diff -Nru lilv-0.24.4~dfsg0/bindings/numpy.i lilv-0.24.6/bindings/numpy.i --- lilv-0.24.4~dfsg0/bindings/numpy.i 2015-11-14 19:49:15.000000000 +0000 +++ lilv-0.24.6/bindings/numpy.i 1970-01-01 00:00:00.000000000 +0000 @@ -1,1746 +0,0 @@ -/* -*- C -*- (not really, but good for syntax highlighting) */ -#ifdef SWIGPYTHON - -%{ -#ifndef SWIG_FILE_WITH_INIT -# define NO_IMPORT_ARRAY -#endif -#include "stdio.h" -#include -%} - -/**********************************************************************/ - -%fragment("NumPy_Backward_Compatibility", "header") -{ -/* Support older NumPy data type names -*/ -%#if NDARRAY_VERSION < 0x01000000 -%#define NPY_BOOL PyArray_BOOL -%#define NPY_BYTE PyArray_BYTE -%#define NPY_UBYTE PyArray_UBYTE -%#define NPY_SHORT PyArray_SHORT -%#define NPY_USHORT PyArray_USHORT -%#define NPY_INT PyArray_INT -%#define NPY_UINT PyArray_UINT -%#define NPY_LONG PyArray_LONG -%#define NPY_ULONG PyArray_ULONG -%#define NPY_LONGLONG PyArray_LONGLONG -%#define NPY_ULONGLONG PyArray_ULONGLONG -%#define NPY_FLOAT PyArray_FLOAT -%#define NPY_DOUBLE PyArray_DOUBLE -%#define NPY_LONGDOUBLE PyArray_LONGDOUBLE -%#define NPY_CFLOAT PyArray_CFLOAT -%#define NPY_CDOUBLE PyArray_CDOUBLE -%#define NPY_CLONGDOUBLE PyArray_CLONGDOUBLE -%#define NPY_OBJECT PyArray_OBJECT -%#define NPY_STRING PyArray_STRING -%#define NPY_UNICODE PyArray_UNICODE -%#define NPY_VOID PyArray_VOID -%#define NPY_NTYPES PyArray_NTYPES -%#define NPY_NOTYPE PyArray_NOTYPE -%#define NPY_CHAR PyArray_CHAR -%#define NPY_USERDEF PyArray_USERDEF -%#define npy_intp intp - -%#define NPY_MAX_BYTE MAX_BYTE -%#define NPY_MIN_BYTE MIN_BYTE -%#define NPY_MAX_UBYTE MAX_UBYTE -%#define NPY_MAX_SHORT MAX_SHORT -%#define NPY_MIN_SHORT MIN_SHORT -%#define NPY_MAX_USHORT MAX_USHORT -%#define NPY_MAX_INT MAX_INT -%#define NPY_MIN_INT MIN_INT -%#define NPY_MAX_UINT MAX_UINT -%#define NPY_MAX_LONG MAX_LONG -%#define NPY_MIN_LONG MIN_LONG -%#define NPY_MAX_ULONG MAX_ULONG -%#define NPY_MAX_LONGLONG MAX_LONGLONG -%#define NPY_MIN_LONGLONG MIN_LONGLONG -%#define NPY_MAX_ULONGLONG MAX_ULONGLONG -%#define NPY_MAX_INTP MAX_INTP -%#define NPY_MIN_INTP MIN_INTP - -%#define NPY_FARRAY FARRAY -%#define NPY_F_CONTIGUOUS F_CONTIGUOUS -%#endif -} - -/**********************************************************************/ - -/* The following code originally appeared in - * enthought/kiva/agg/src/numeric.i written by Eric Jones. It was - * translated from C++ to C by John Hunter. Bill Spotz has modified - * it to fix some minor bugs, upgrade from Numeric to numpy (all - * versions), add some comments and functionality, and convert from - * direct code insertion to SWIG fragments. - */ - -%fragment("NumPy_Macros", "header") -{ -/* Macros to extract array attributes. - */ -%#define is_array(a) ((a) && PyArray_Check((PyArrayObject *)a)) -%#define array_type(a) (int)(PyArray_TYPE(a)) -%#define array_numdims(a) (((PyArrayObject *)a)->nd) -%#define array_dimensions(a) (((PyArrayObject *)a)->dimensions) -%#define array_size(a,i) (((PyArrayObject *)a)->dimensions[i]) -%#define array_data(a) (((PyArrayObject *)a)->data) -%#define array_is_contiguous(a) (PyArray_ISCONTIGUOUS(a)) -%#define array_is_native(a) (PyArray_ISNOTSWAPPED(a)) -%#define array_is_fortran(a) (PyArray_ISFORTRAN(a)) -} - -/**********************************************************************/ - -%fragment("NumPy_Utilities", "header") -{ - /* Given a PyObject, return a string describing its type. - */ - const char* pytype_string(PyObject* py_obj) { - if (py_obj == NULL ) return "C NULL value"; - if (py_obj == Py_None ) return "Python None" ; - if (PyCallable_Check(py_obj)) return "callable" ; - if (PyString_Check( py_obj)) return "string" ; - if (PyInt_Check( py_obj)) return "int" ; - if (PyFloat_Check( py_obj)) return "float" ; - if (PyDict_Check( py_obj)) return "dict" ; - if (PyList_Check( py_obj)) return "list" ; - if (PyTuple_Check( py_obj)) return "tuple" ; - if (PyModule_Check( py_obj)) return "module" ; -%#if PY_MAJOR_VERSION < 3 - if (PyFile_Check( py_obj)) return "file" ; - if (PyInstance_Check(py_obj)) return "instance" ; -%#endif - - return "unkown type"; - } - - /* Given a NumPy typecode, return a string describing the type. - */ - const char* typecode_string(int typecode) { - static const char* type_names[25] = {"bool", "byte", "unsigned byte", - "short", "unsigned short", "int", - "unsigned int", "long", "unsigned long", - "long long", "unsigned long long", - "float", "double", "long double", - "complex float", "complex double", - "complex long double", "object", - "string", "unicode", "void", "ntypes", - "notype", "char", "unknown"}; - return typecode < 24 ? type_names[typecode] : type_names[24]; - } - - /* Make sure input has correct numpy type. Allow character and byte - * to match. Also allow int and long to match. This is deprecated. - * You should use PyArray_EquivTypenums() instead. - */ - int type_match(int actual_type, int desired_type) { - return PyArray_EquivTypenums(actual_type, desired_type); - } -} - -/**********************************************************************/ - -%fragment("NumPy_Object_to_Array", "header", - fragment="NumPy_Backward_Compatibility", - fragment="NumPy_Macros", - fragment="NumPy_Utilities") -{ - /* Given a PyObject pointer, cast it to a PyArrayObject pointer if - * legal. If not, set the python error string appropriately and - * return NULL. - */ - PyArrayObject* obj_to_array_no_conversion(PyObject* input, int typecode) - { - PyArrayObject* ary = NULL; - if (is_array(input) && (typecode == NPY_NOTYPE || - PyArray_EquivTypenums(array_type(input), typecode))) - { - ary = (PyArrayObject*) input; - } - else if is_array(input) - { - const char* desired_type = typecode_string(typecode); - const char* actual_type = typecode_string(array_type(input)); - PyErr_Format(PyExc_TypeError, - "Array of type '%s' required. Array of type '%s' given", - desired_type, actual_type); - ary = NULL; - } - else - { - const char * desired_type = typecode_string(typecode); - const char * actual_type = pytype_string(input); - PyErr_Format(PyExc_TypeError, - "Array of type '%s' required. A '%s' was given", - desired_type, actual_type); - ary = NULL; - } - return ary; - } - - /* Convert the given PyObject to a NumPy array with the given - * typecode. On success, return a valid PyArrayObject* with the - * correct type. On failure, the python error string will be set and - * the routine returns NULL. - */ - PyArrayObject* obj_to_array_allow_conversion(PyObject* input, int typecode, - int* is_new_object) - { - PyArrayObject* ary = NULL; - PyObject* py_obj; - if (is_array(input) && (typecode == NPY_NOTYPE || - PyArray_EquivTypenums(array_type(input),typecode))) - { - ary = (PyArrayObject*) input; - *is_new_object = 0; - } - else - { - py_obj = PyArray_FROMANY(input, typecode, 0, 0, NPY_DEFAULT); - /* If NULL, PyArray_FromObject will have set python error value.*/ - ary = (PyArrayObject*) py_obj; - *is_new_object = 1; - } - return ary; - } - - /* Given a PyArrayObject, check to see if it is contiguous. If so, - * return the input pointer and flag it as not a new object. If it is - * not contiguous, create a new PyArrayObject using the original data, - * flag it as a new object and return the pointer. - */ - PyArrayObject* make_contiguous(PyArrayObject* ary, int* is_new_object, - int min_dims, int max_dims) - { - PyArrayObject* result; - if (array_is_contiguous(ary)) - { - result = ary; - *is_new_object = 0; - } - else - { - result = (PyArrayObject*) PyArray_ContiguousFromObject((PyObject*)ary, - array_type(ary), - min_dims, - max_dims); - *is_new_object = 1; - } - return result; - } - - /* Given a PyArrayObject, check to see if it is Fortran-contiguous. - * If so, return the input pointer, but do not flag it as not a new - * object. If it is not Fortran-contiguous, create a new - * PyArrayObject using the original data, flag it as a new object - * and return the pointer. - */ - PyArrayObject* make_fortran(PyArrayObject* ary, int* is_new_object, - int min_dims, int max_dims) - { - PyArrayObject* result; - if (array_is_fortran(ary)) - { - result = ary; - *is_new_object = 0; - } - else - { - Py_INCREF(ary->descr); - result = (PyArrayObject*) PyArray_FromArray(ary, ary->descr, NPY_FORTRAN); - *is_new_object = 1; - } - return result; - } - - /* Convert a given PyObject to a contiguous PyArrayObject of the - * specified type. If the input object is not a contiguous - * PyArrayObject, a new one will be created and the new object flag - * will be set. - */ - PyArrayObject* obj_to_array_contiguous_allow_conversion(PyObject* input, - int typecode, - int* is_new_object) - { - int is_new1 = 0; - int is_new2 = 0; - PyArrayObject* ary2; - PyArrayObject* ary1 = obj_to_array_allow_conversion(input, typecode, - &is_new1); - if (ary1) - { - ary2 = make_contiguous(ary1, &is_new2, 0, 0); - if ( is_new1 && is_new2) - { - Py_DECREF(ary1); - } - ary1 = ary2; - } - *is_new_object = is_new1 || is_new2; - return ary1; - } - - /* Convert a given PyObject to a Fortran-ordered PyArrayObject of the - * specified type. If the input object is not a Fortran-ordered - * PyArrayObject, a new one will be created and the new object flag - * will be set. - */ - PyArrayObject* obj_to_array_fortran_allow_conversion(PyObject* input, - int typecode, - int* is_new_object) - { - int is_new1 = 0; - int is_new2 = 0; - PyArrayObject* ary2; - PyArrayObject* ary1 = obj_to_array_allow_conversion(input, typecode, - &is_new1); - if (ary1) - { - ary2 = make_fortran(ary1, &is_new2, 0, 0); - if (is_new1 && is_new2) - { - Py_DECREF(ary1); - } - ary1 = ary2; - } - *is_new_object = is_new1 || is_new2; - return ary1; - } - - /* The following code was added by Ilmar M. Wilbers for forcing a copy of the - * object even when it is a NumPy array. This is meant for use with the - * IN_ARRAY typemaps, and allows the user to perform changes on an array - * without these chenges being reflected in the calling code. - */ - - /* Convert the given PyObject to a NumPy array with the given - * typecode as a copy. On success, return a valid PyArrayObject* with the - * correct type. On failure, the python error string will be set and - * the routine returns NULL. - */ - PyArrayObject* obj_to_array_force_conversion(PyObject* input, int typecode, - int* is_new_object) - { - PyArrayObject* ary = NULL; - PyObject* py_obj; - if (is_array(input) && (typecode == NPY_NOTYPE || - PyArray_EquivTypenums(array_type(input),typecode))) - { - py_obj = PyArray_Copy((PyArrayObject*) input); - ary = (PyArrayObject*) py_obj; - *is_new_object = 1; - } - else - { - py_obj = PyArray_FROMANY(input, typecode, 0, 0, NPY_DEFAULT); - /* If NULL, PyArray_FromObject will have set python error value.*/ - ary = (PyArrayObject*) py_obj; - *is_new_object = 1; - } - return ary; - } - - /* Convert a given PyObject to a contiguous PyArrayObject of the - * specified type. If the input object is not a contiguous - * PyArrayObject, a new one will be created and the new object flag - * will be set. - */ - PyArrayObject* obj_to_array_contiguous_force_conversion(PyObject* input, - int typecode, - int* is_new_object) - { - int is_new1 = 0; - int is_new2 = 0; - PyArrayObject* ary2; - PyArrayObject* ary1 = obj_to_array_force_conversion(input, typecode, - &is_new1); - if (ary1) - { - ary2 = make_contiguous(ary1, &is_new2, 0, 0); - if ( is_new1 && is_new2) - { - Py_DECREF(ary1); - } - ary1 = ary2; - } - *is_new_object = is_new1 || is_new2; - return ary1; - } - - /* Convert a given PyObject to a Fortran-ordered PyArrayObject of the - * specified type. If the input object is not a Fortran-ordered - * PyArrayObject, a new one will be created and the new object flag - * will be set. - */ - PyArrayObject* obj_to_array_fortran_force_conversion(PyObject* input, - int typecode, - int* is_new_object) - { - int is_new1 = 0; - int is_new2 = 0; - PyArrayObject* ary2; - PyArrayObject* ary1 = obj_to_array_force_conversion(input, typecode, - &is_new1); - if (ary1) - { - ary2 = make_fortran(ary1, &is_new2, 0, 0); - if (is_new1 && is_new2) - { - Py_DECREF(ary1); - } - ary1 = ary2; - } - *is_new_object = is_new1 || is_new2; - return ary1; - } - /* End modifications by Ilmar M. Wilbers - */ - -} /* end fragment */ - - -/**********************************************************************/ - -%fragment("NumPy_Array_Requirements", "header", - fragment="NumPy_Backward_Compatibility", - fragment="NumPy_Macros") -{ - /* Test whether a python object is contiguous. If array is - * contiguous, return 1. Otherwise, set the python error string and - * return 0. - */ - int require_contiguous(PyArrayObject* ary) - { - int contiguous = 1; - if (!array_is_contiguous(ary)) - { - PyErr_SetString(PyExc_TypeError, - "Array must be contiguous. A non-contiguous array was given"); - contiguous = 0; - } - return contiguous; - } - - /* Require that a numpy array is not byte-swapped. If the array is - * not byte-swapped, return 1. Otherwise, set the python error string - * and return 0. - */ - int require_native(PyArrayObject* ary) - { - int native = 1; - if (!array_is_native(ary)) - { - PyErr_SetString(PyExc_TypeError, - "Array must have native byteorder. " - "A byte-swapped array was given"); - native = 0; - } - return native; - } - - /* Require the given PyArrayObject to have a specified number of - * dimensions. If the array has the specified number of dimensions, - * return 1. Otherwise, set the python error string and return 0. - */ - int require_dimensions(PyArrayObject* ary, int exact_dimensions) - { - int success = 1; - if (array_numdims(ary) != exact_dimensions) - { - PyErr_Format(PyExc_TypeError, - "Array must have %d dimensions. Given array has %d dimensions", - exact_dimensions, array_numdims(ary)); - success = 0; - } - return success; - } - - /* Require the given PyArrayObject to have one of a list of specified - * number of dimensions. If the array has one of the specified number - * of dimensions, return 1. Otherwise, set the python error string - * and return 0. - */ - int require_dimensions_n(PyArrayObject* ary, int* exact_dimensions, int n) - { - int success = 0; - int i; - char dims_str[255] = ""; - char s[255]; - for (i = 0; i < n && !success; i++) - { - if (array_numdims(ary) == exact_dimensions[i]) - { - success = 1; - } - } - if (!success) - { - for (i = 0; i < n-1; i++) - { - sprintf(s, "%d, ", exact_dimensions[i]); - strcat(dims_str,s); - } - sprintf(s, " or %d", exact_dimensions[n-1]); - strcat(dims_str,s); - PyErr_Format(PyExc_TypeError, - "Array must have %s dimensions. Given array has %d dimensions", - dims_str, array_numdims(ary)); - } - return success; - } - - /* Require the given PyArrayObject to have a specified shape. If the - * array has the specified shape, return 1. Otherwise, set the python - * error string and return 0. - */ - int require_size(PyArrayObject* ary, npy_intp* size, int n) - { - int i; - int success = 1; - int len; - char desired_dims[255] = "["; - char s[255]; - char actual_dims[255] = "["; - for(i=0; i < n;i++) - { - if (size[i] != -1 && size[i] != array_size(ary,i)) - { - success = 0; - } - } - if (!success) - { - for (i = 0; i < n; i++) - { - if (size[i] == -1) - { - sprintf(s, "*,"); - } - else - { - sprintf(s, "%ld,", (long int)size[i]); - } - strcat(desired_dims,s); - } - len = strlen(desired_dims); - desired_dims[len-1] = ']'; - for (i = 0; i < n; i++) - { - sprintf(s, "%ld,", (long int)array_size(ary,i)); - strcat(actual_dims,s); - } - len = strlen(actual_dims); - actual_dims[len-1] = ']'; - PyErr_Format(PyExc_TypeError, - "Array must have shape of %s. Given array has shape of %s", - desired_dims, actual_dims); - } - return success; - } - - /* Require the given PyArrayObject to to be FORTRAN ordered. If the - * the PyArrayObject is already FORTRAN ordered, do nothing. Else, - * set the FORTRAN ordering flag and recompute the strides. - */ - int require_fortran(PyArrayObject* ary) - { - int success = 1; - int nd = array_numdims(ary); - int i; - if (array_is_fortran(ary)) return success; - /* Set the FORTRAN ordered flag */ - ary->flags = NPY_FARRAY; - /* Recompute the strides */ - ary->strides[0] = ary->strides[nd-1]; - for (i=1; i < nd; ++i) - ary->strides[i] = ary->strides[i-1] * array_size(ary,i-1); - return success; - } -} - -/* Combine all NumPy fragments into one for convenience */ -%fragment("NumPy_Fragments", "header", - fragment="NumPy_Backward_Compatibility", - fragment="NumPy_Macros", - fragment="NumPy_Utilities", - fragment="NumPy_Object_to_Array", - fragment="NumPy_Array_Requirements") { } - -/* End John Hunter translation (with modifications by Bill Spotz) - */ - -/* %numpy_typemaps() macro - * - * This macro defines a family of 42 typemaps that allow C arguments - * of the form - * - * (DATA_TYPE IN_ARRAY1[ANY]) - * (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1) - * (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1) - * - * (DATA_TYPE IN_ARRAY2[ANY][ANY]) - * (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2) - * (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2) - * - * (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY]) - * (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3) - * (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3) - * - * (DATA_TYPE INPLACE_ARRAY1[ANY]) - * (DATA_TYPE* INPLACE_ARRAY1) - * (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1) - * (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1) - * - * (DATA_TYPE INPLACE_ARRAY2[ANY][ANY]) - * (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2) - * (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2) - * - * (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY]) - * (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_ARRAY3) - * (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_FARRAY3) - * - * (DATA_TYPE ARGOUT_ARRAY1[ANY]) - * (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1) - * (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1) - * - * (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY]) - * - * (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY]) - * - * (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1) - * (DIM_TYPE* DIM1, DATA_TYPE** ARGOUTVIEW_ARRAY1) - * - * (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2) - * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_ARRAY2) - * (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2) - * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_FARRAY2) - * - * (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3) - * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_ARRAY3) - * (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3) - * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_FARRAY3) - * - * where "DATA_TYPE" is any type supported by the NumPy module, and - * "DIM_TYPE" is any int-like type suitable for specifying dimensions. - * The difference between "ARRAY" typemaps and "FARRAY" typemaps is - * that the "FARRAY" typemaps expect FORTRAN ordering of - * multidimensional arrays. In python, the dimensions will not need - * to be specified (except for the "DATA_TYPE* ARGOUT_ARRAY1" - * typemaps). The IN_ARRAYs can be a numpy array or any sequence that - * can be converted to a numpy array of the specified type. The - * INPLACE_ARRAYs must be numpy arrays of the appropriate type. The - * ARGOUT_ARRAYs will be returned as new numpy arrays of the - * appropriate type. - * - * These typemaps can be applied to existing functions using the - * %apply directive. For example: - * - * %apply (double* IN_ARRAY1, int DIM1) {(double* series, int length)}; - * double prod(double* series, int length); - * - * %apply (int DIM1, int DIM2, double* INPLACE_ARRAY2) - * {(int rows, int cols, double* matrix )}; - * void floor(int rows, int cols, double* matrix, double f); - * - * %apply (double IN_ARRAY3[ANY][ANY][ANY]) - * {(double tensor[2][2][2] )}; - * %apply (double ARGOUT_ARRAY3[ANY][ANY][ANY]) - * {(double low[2][2][2] )}; - * %apply (double ARGOUT_ARRAY3[ANY][ANY][ANY]) - * {(double upp[2][2][2] )}; - * void luSplit(double tensor[2][2][2], - * double low[2][2][2], - * double upp[2][2][2] ); - * - * or directly with - * - * double prod(double* IN_ARRAY1, int DIM1); - * - * void floor(int DIM1, int DIM2, double* INPLACE_ARRAY2, double f); - * - * void luSplit(double IN_ARRAY3[ANY][ANY][ANY], - * double ARGOUT_ARRAY3[ANY][ANY][ANY], - * double ARGOUT_ARRAY3[ANY][ANY][ANY]); - */ - -%define %numpy_typemaps(DATA_TYPE, DATA_TYPECODE, DIM_TYPE) - -/************************/ -/* Input Array Typemaps */ -/************************/ - -/* Typemap suite for (DATA_TYPE IN_ARRAY1[ANY]) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE IN_ARRAY1[ANY]) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE IN_ARRAY1[ANY]) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[1] = { $1_dim0 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 1) || - !require_size(array, size, 1)) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} -%typemap(freearg) - (DATA_TYPE IN_ARRAY1[ANY]) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[1] = { -1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 1) || - !require_size(array, size, 1)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); -} -%typemap(freearg) - (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[1] = {-1}; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 1) || - !require_size(array, size, 1)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DATA_TYPE*) array_data(array); -} -%typemap(freearg) - (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DATA_TYPE IN_ARRAY2[ANY][ANY]) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE IN_ARRAY2[ANY][ANY]) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE IN_ARRAY2[ANY][ANY]) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[2] = { $1_dim0, $1_dim1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 2) || - !require_size(array, size, 2)) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} -%typemap(freearg) - (DATA_TYPE IN_ARRAY2[ANY][ANY]) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[2] = { -1, -1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 2) || - !require_size(array, size, 2)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); -} -%typemap(freearg) - (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[2] = { -1, -1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 2) || - !require_size(array, size, 2)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DATA_TYPE*) array_data(array); -} -%typemap(freearg) - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[2] = { -1, -1 }; - array = obj_to_array_fortran_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 2) || - !require_size(array, size, 2) || !require_fortran(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); -} -%typemap(freearg) - (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[2] = { -1, -1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 2) || - !require_size(array, size, 2) || !require_fortran(array)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DATA_TYPE*) array_data(array); -} -%typemap(freearg) - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY]) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY]) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY]) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[3] = { $1_dim0, $1_dim1, $1_dim2 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 3) || - !require_size(array, size, 3)) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} -%typemap(freearg) - (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY]) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, - * DIM_TYPE DIM3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[3] = { -1, -1, -1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 3) || - !require_size(array, size, 3)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); - $4 = (DIM_TYPE) array_size(array,2); -} -%typemap(freearg) - (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, - * DATA_TYPE* IN_ARRAY3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[3] = { -1, -1, -1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 3) || - !require_size(array, size, 3)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DIM_TYPE) array_size(array,2); - $4 = (DATA_TYPE*) array_data(array); -} -%typemap(freearg) - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, - * DIM_TYPE DIM3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[3] = { -1, -1, -1 }; - array = obj_to_array_fortran_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 3) || - !require_size(array, size, 3) | !require_fortran(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); - $4 = (DIM_TYPE) array_size(array,2); -} -%typemap(freearg) - (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, - * DATA_TYPE* IN_FARRAY3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3) -{ - $1 = is_array($input) || PySequence_Check($input); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3) - (PyArrayObject* array=NULL, int is_new_object=0) -{ - npy_intp size[3] = { -1, -1, -1 }; - array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE, - &is_new_object); - if (!array || !require_dimensions(array, 3) || - !require_size(array, size, 3) || !require_fortran(array)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DIM_TYPE) array_size(array,2); - $4 = (DATA_TYPE*) array_data(array); -} -%typemap(freearg) - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3) -{ - if (is_new_object$argnum && array$argnum) - { Py_DECREF(array$argnum); } -} - -/***************************/ -/* In-Place Array Typemaps */ -/***************************/ - -/* Typemap suite for (DATA_TYPE INPLACE_ARRAY1[ANY]) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE INPLACE_ARRAY1[ANY]) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE INPLACE_ARRAY1[ANY]) - (PyArrayObject* array=NULL) -{ - npy_intp size[1] = { $1_dim0 }; - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,1) || !require_size(array, size, 1) || - !require_contiguous(array) || !require_native(array)) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} - -/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY1) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* INPLACE_ARRAY1) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* INPLACE_ARRAY1) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,1) || !require_contiguous(array) - || !require_native(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); -} - -/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1) - (PyArrayObject* array=NULL, int i=1) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,1) || !require_contiguous(array) - || !require_native(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = 1; - for (i=0; i < array_numdims(array); ++i) $2 *= array_size(array,i); -} - -/* Typemap suite for (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1) - (PyArrayObject* array=NULL, int i=0) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,1) || !require_contiguous(array) - || !require_native(array)) SWIG_fail; - $1 = 1; - for (i=0; i < array_numdims(array); ++i) $1 *= array_size(array,i); - $2 = (DATA_TYPE*) array_data(array); -} - -/* Typemap suite for (DATA_TYPE INPLACE_ARRAY2[ANY][ANY]) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE INPLACE_ARRAY2[ANY][ANY]) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE INPLACE_ARRAY2[ANY][ANY]) - (PyArrayObject* array=NULL) -{ - npy_intp size[2] = { $1_dim0, $1_dim1 }; - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,2) || !require_size(array, size, 2) || - !require_contiguous(array) || !require_native(array)) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} - -/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,2) || !require_contiguous(array) - || !require_native(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,2) || !require_contiguous(array) || - !require_native(array)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DATA_TYPE*) array_data(array); -} - -/* Typemap suite for (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,2) || !require_contiguous(array) - || !require_native(array) || !require_fortran(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,2) || !require_contiguous(array) || - !require_native(array) || !require_fortran(array)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DATA_TYPE*) array_data(array); -} - -/* Typemap suite for (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY]) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY]) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY]) - (PyArrayObject* array=NULL) -{ - npy_intp size[3] = { $1_dim0, $1_dim1, $1_dim2 }; - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,3) || !require_size(array, size, 3) || - !require_contiguous(array) || !require_native(array)) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} - -/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, - * DIM_TYPE DIM3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,3) || !require_contiguous(array) || - !require_native(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); - $4 = (DIM_TYPE) array_size(array,2); -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, - * DATA_TYPE* INPLACE_ARRAY3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_ARRAY3) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_ARRAY3) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,3) || !require_contiguous(array) - || !require_native(array)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DIM_TYPE) array_size(array,2); - $4 = (DATA_TYPE*) array_data(array); -} - -/* Typemap suite for (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, - * DIM_TYPE DIM3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,3) || !require_contiguous(array) || - !require_native(array) || !require_fortran(array)) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); - $2 = (DIM_TYPE) array_size(array,0); - $3 = (DIM_TYPE) array_size(array,1); - $4 = (DIM_TYPE) array_size(array,2); -} - -/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, - * DATA_TYPE* INPLACE_FARRAY3) - */ -%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY, - fragment="NumPy_Macros") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_FARRAY3) -{ - $1 = is_array($input) && PyArray_EquivTypenums(array_type($input), - DATA_TYPECODE); -} -%typemap(in, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_FARRAY3) - (PyArrayObject* array=NULL) -{ - array = obj_to_array_no_conversion($input, DATA_TYPECODE); - if (!array || !require_dimensions(array,3) || !require_contiguous(array) - || !require_native(array) || !require_fortran(array)) SWIG_fail; - $1 = (DIM_TYPE) array_size(array,0); - $2 = (DIM_TYPE) array_size(array,1); - $3 = (DIM_TYPE) array_size(array,2); - $4 = (DATA_TYPE*) array_data(array); -} - -/*************************/ -/* Argout Array Typemaps */ -/*************************/ - -/* Typemap suite for (DATA_TYPE ARGOUT_ARRAY1[ANY]) - */ -%typemap(in,numinputs=0, - fragment="NumPy_Backward_Compatibility,NumPy_Macros") - (DATA_TYPE ARGOUT_ARRAY1[ANY]) - (PyObject * array = NULL) -{ - npy_intp dims[1] = { $1_dim0 }; - array = PyArray_SimpleNew(1, dims, DATA_TYPECODE); - if (!array) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} -%typemap(argout) - (DATA_TYPE ARGOUT_ARRAY1[ANY]) -{ - $result = SWIG_Python_AppendOutput($result,array$argnum); -} - -/* Typemap suite for (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1) - */ -%typemap(in,numinputs=1, - fragment="NumPy_Fragments") - (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1) - (PyObject * array = NULL) -{ - npy_intp dims[1]; - if (!PyInt_Check($input)) - { - const char* typestring = pytype_string($input); - PyErr_Format(PyExc_TypeError, - "Int dimension expected. '%s' given.", - typestring); - SWIG_fail; - } - $2 = (DIM_TYPE) PyInt_AsLong($input); - dims[0] = (npy_intp) $2; - array = PyArray_SimpleNew(1, dims, DATA_TYPECODE); - if (!array) SWIG_fail; - $1 = (DATA_TYPE*) array_data(array); -} -%typemap(argout) - (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1) -{ - $result = SWIG_Python_AppendOutput($result,array$argnum); -} - -/* Typemap suite for (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1) - */ -%typemap(in,numinputs=1, - fragment="NumPy_Fragments") - (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1) - (PyObject * array = NULL) -{ - npy_intp dims[1]; - if (!PyInt_Check($input)) - { - const char* typestring = pytype_string($input); - PyErr_Format(PyExc_TypeError, - "Int dimension expected. '%s' given.", - typestring); - SWIG_fail; - } - $1 = (DIM_TYPE) PyInt_AsLong($input); - dims[0] = (npy_intp) $1; - array = PyArray_SimpleNew(1, dims, DATA_TYPECODE); - if (!array) SWIG_fail; - $2 = (DATA_TYPE*) array_data(array); -} -%typemap(argout) - (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1) -{ - $result = SWIG_Python_AppendOutput($result,array$argnum); -} - -/* Typemap suite for (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY]) - */ -%typemap(in,numinputs=0, - fragment="NumPy_Backward_Compatibility,NumPy_Macros") - (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY]) - (PyObject * array = NULL) -{ - npy_intp dims[2] = { $1_dim0, $1_dim1 }; - array = PyArray_SimpleNew(2, dims, DATA_TYPECODE); - if (!array) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} -%typemap(argout) - (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY]) -{ - $result = SWIG_Python_AppendOutput($result,array$argnum); -} - -/* Typemap suite for (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY]) - */ -%typemap(in,numinputs=0, - fragment="NumPy_Backward_Compatibility,NumPy_Macros") - (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY]) - (PyObject * array = NULL) -{ - npy_intp dims[3] = { $1_dim0, $1_dim1, $1_dim2 }; - array = PyArray_SimpleNew(3, dims, DATA_TYPECODE); - if (!array) SWIG_fail; - $1 = ($1_ltype) array_data(array); -} -%typemap(argout) - (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY]) -{ - $result = SWIG_Python_AppendOutput($result,array$argnum); -} - -/*****************************/ -/* Argoutview Array Typemaps */ -/*****************************/ - -/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1) - */ -%typemap(in,numinputs=0) - (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1 ) - (DATA_TYPE* data_temp , DIM_TYPE dim_temp) -{ - $1 = &data_temp; - $2 = &dim_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility") - (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1) -{ - npy_intp dims[1] = { *$2 }; - PyObject * array = PyArray_SimpleNewFromData(1, dims, DATA_TYPECODE, (void*)(*$1)); - if (!array) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,array); -} - -/* Typemap suite for (DIM_TYPE* DIM1, DATA_TYPE** ARGOUTVIEW_ARRAY1) - */ -%typemap(in,numinputs=0) - (DIM_TYPE* DIM1 , DATA_TYPE** ARGOUTVIEW_ARRAY1) - (DIM_TYPE dim_temp, DATA_TYPE* data_temp ) -{ - $1 = &dim_temp; - $2 = &data_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility") - (DIM_TYPE* DIM1, DATA_TYPE** ARGOUTVIEW_ARRAY1) -{ - npy_intp dims[1] = { *$1 }; - PyObject * array = PyArray_SimpleNewFromData(1, dims, DATA_TYPECODE, (void*)(*$2)); - if (!array) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,array); -} - -/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2) - */ -%typemap(in,numinputs=0) - (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1 , DIM_TYPE* DIM2 ) - (DATA_TYPE* data_temp , DIM_TYPE dim1_temp, DIM_TYPE dim2_temp) -{ - $1 = &data_temp; - $2 = &dim1_temp; - $3 = &dim2_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility") - (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2) -{ - npy_intp dims[2] = { *$2, *$3 }; - PyObject * array = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$1)); - if (!array) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,array); -} - -/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_ARRAY2) - */ -%typemap(in,numinputs=0) - (DIM_TYPE* DIM1 , DIM_TYPE* DIM2 , DATA_TYPE** ARGOUTVIEW_ARRAY2) - (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DATA_TYPE* data_temp ) -{ - $1 = &dim1_temp; - $2 = &dim2_temp; - $3 = &data_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility") - (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_ARRAY2) -{ - npy_intp dims[2] = { *$1, *$2 }; - PyObject * array = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$3)); - if (!array) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,array); -} - -/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2) - */ -%typemap(in,numinputs=0) - (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1 , DIM_TYPE* DIM2 ) - (DATA_TYPE* data_temp , DIM_TYPE dim1_temp, DIM_TYPE dim2_temp) -{ - $1 = &data_temp; - $2 = &dim1_temp; - $3 = &dim2_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements") - (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2) -{ - npy_intp dims[2] = { *$2, *$3 }; - PyObject * obj = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$1)); - PyArrayObject * array = (PyArrayObject*) obj; - if (!array || !require_fortran(array)) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,obj); -} - -/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_FARRAY2) - */ -%typemap(in,numinputs=0) - (DIM_TYPE* DIM1 , DIM_TYPE* DIM2 , DATA_TYPE** ARGOUTVIEW_FARRAY2) - (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DATA_TYPE* data_temp ) -{ - $1 = &dim1_temp; - $2 = &dim2_temp; - $3 = &data_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements") - (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_FARRAY2) -{ - npy_intp dims[2] = { *$1, *$2 }; - PyObject * obj = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$3)); - PyArrayObject * array = (PyArrayObject*) obj; - if (!array || !require_fortran(array)) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,obj); -} - -/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, - DIM_TYPE* DIM3) - */ -%typemap(in,numinputs=0) - (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3) - (DATA_TYPE* data_temp, DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp) -{ - $1 = &data_temp; - $2 = &dim1_temp; - $3 = &dim2_temp; - $4 = &dim3_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility") - (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3) -{ - npy_intp dims[3] = { *$2, *$3, *$4 }; - PyObject * array = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$1)); - if (!array) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,array); -} - -/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, - DATA_TYPE** ARGOUTVIEW_ARRAY3) - */ -%typemap(in,numinputs=0) - (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_ARRAY3) - (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp, DATA_TYPE* data_temp) -{ - $1 = &dim1_temp; - $2 = &dim2_temp; - $3 = &dim3_temp; - $4 = &data_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility") - (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_ARRAY3) -{ - npy_intp dims[3] = { *$1, *$2, *$3 }; - PyObject * array = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$3)); - if (!array) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,array); -} - -/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, - DIM_TYPE* DIM3) - */ -%typemap(in,numinputs=0) - (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3) - (DATA_TYPE* data_temp, DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp) -{ - $1 = &data_temp; - $2 = &dim1_temp; - $3 = &dim2_temp; - $4 = &dim3_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements") - (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3) -{ - npy_intp dims[3] = { *$2, *$3, *$4 }; - PyObject * obj = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$1)); - PyArrayObject * array = (PyArrayObject*) obj; - if (!array || require_fortran(array)) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,obj); -} - -/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, - DATA_TYPE** ARGOUTVIEW_FARRAY3) - */ -%typemap(in,numinputs=0) - (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_FARRAY3) - (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp, DATA_TYPE* data_temp) -{ - $1 = &dim1_temp; - $2 = &dim2_temp; - $3 = &dim3_temp; - $4 = &data_temp; -} -%typemap(argout, - fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements") - (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_FARRAY3) -{ - npy_intp dims[3] = { *$1, *$2, *$3 }; - PyObject * obj = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$3)); - PyArrayObject * array = (PyArrayObject*) obj; - if (!array || require_fortran(array)) SWIG_fail; - $result = SWIG_Python_AppendOutput($result,obj); -} - -%enddef /* %numpy_typemaps() macro */ -/* *************************************************************** */ - -/* Concrete instances of the %numpy_typemaps() macro: Each invocation - * below applies all of the typemaps above to the specified data type. - */ -%numpy_typemaps(signed char , NPY_BYTE , int) -%numpy_typemaps(unsigned char , NPY_UBYTE , int) -%numpy_typemaps(short , NPY_SHORT , int) -%numpy_typemaps(unsigned short , NPY_USHORT , int) -%numpy_typemaps(int , NPY_INT , int) -%numpy_typemaps(unsigned int , NPY_UINT , int) -%numpy_typemaps(long , NPY_LONG , int) -%numpy_typemaps(unsigned long , NPY_ULONG , int) -%numpy_typemaps(long long , NPY_LONGLONG , int) -%numpy_typemaps(unsigned long long, NPY_ULONGLONG, int) -%numpy_typemaps(float , NPY_FLOAT , int) -%numpy_typemaps(double , NPY_DOUBLE , int) - -/* *************************************************************** - * The follow macro expansion does not work, because C++ bool is 4 - * bytes and NPY_BOOL is 1 byte - * - * %numpy_typemaps(bool, NPY_BOOL, int) - */ - -/* *************************************************************** - * On my Mac, I get the following warning for this macro expansion: - * 'swig/python detected a memory leak of type 'long double *', no destructor found.' - * - * %numpy_typemaps(long double, NPY_LONGDOUBLE, int) - */ - -/* *************************************************************** - * Swig complains about a syntax error for the following macro - * expansions: - * - * %numpy_typemaps(complex float, NPY_CFLOAT , int) - * - * %numpy_typemaps(complex double, NPY_CDOUBLE, int) - * - * %numpy_typemaps(complex long double, NPY_CLONGDOUBLE, int) - */ - -#endif /* SWIGPYTHON */ diff -Nru lilv-0.24.4~dfsg0/bindings/python/lilv.py lilv-0.24.6/bindings/python/lilv.py --- lilv-0.24.4~dfsg0/bindings/python/lilv.py 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/bindings/python/lilv.py 2019-11-09 19:08:55.000000000 +0000 @@ -1,246 +1,130 @@ """Lilv Python interface""" -__author__ = "David Robillard" -__copyright__ = "Copyright 2016 David Robillard" -__license__ = "ISC" -__version__ = "0.22.1" +__author__ = "David Robillard" +__copyright__ = "Copyright 2016-2019 David Robillard" +__license__ = "ISC" +__version__ = "0.24.5" __maintainer__ = "David Robillard" -__email__ = "d@drobilla.net" -__status__ = "Production" +__email__ = "d@drobilla.net" +__status__ = "Production" -import ctypes -import os import sys from ctypes import Structure, CDLL, POINTER, CFUNCTYPE from ctypes import c_bool, c_double, c_float, c_int, c_size_t, c_uint, c_uint32 from ctypes import c_char, c_char_p, c_void_p -from ctypes import byref +from ctypes import byref, cast -# Load lilv library +# Option constants +OPTION_FILTER_LANG = "http://drobilla.net/ns/lilv#filter-lang" +OPTION_DYN_MANIFEST = "http://drobilla.net/ns/lilv#dyn-manifest" -_lib = CDLL("liblilv-0.so") -# Set namespaced aliases for all lilv functions +class _LilvLib: + """Object that represents the liblilv C library""" -class String(str): - # Wrapper for string parameters to pass as raw C UTF-8 strings - def from_param(cls, obj): - return obj.encode('utf-8') + def __init__(self): + if sys.platform == "darwin": + self.lib = CDLL("liblilv-0.dylib") + elif sys.platform == "win32": + self.lib = CDLL("lilv-0.dll") + else: + self.lib = CDLL("liblilv-0.so") + + +# Load lilv C library and define library global (which is populated below) +c = _LilvLib() - from_param = classmethod(from_param) def _as_uri(obj): + """Utility function for converting some object into a URI node""" if type(obj) in [Plugin, PluginClass, UI]: return obj.get_uri() else: - return obj + assert type(obj) == Node + assert obj.node + return Node(obj.world, c.node_duplicate(obj.node)) -free = _lib.lilv_free -# uri_to_path = _lib.lilv_uri_to_path -file_uri_parse = _lib.lilv_file_uri_parse -new_uri = _lib.lilv_new_uri -new_file_uri = _lib.lilv_new_file_uri -new_string = _lib.lilv_new_string -new_int = _lib.lilv_new_int -new_float = _lib.lilv_new_float -new_bool = _lib.lilv_new_bool -node_free = _lib.lilv_node_free -node_duplicate = _lib.lilv_node_duplicate -node_equals = _lib.lilv_node_equals -node_get_turtle_token = _lib.lilv_node_get_turtle_token -node_is_uri = _lib.lilv_node_is_uri -node_as_uri = _lib.lilv_node_as_uri -node_is_blank = _lib.lilv_node_is_blank -node_as_blank = _lib.lilv_node_as_blank -node_is_literal = _lib.lilv_node_is_literal -node_is_string = _lib.lilv_node_is_string -node_as_string = _lib.lilv_node_as_string -node_get_path = _lib.lilv_node_get_path -node_is_float = _lib.lilv_node_is_float -node_as_float = _lib.lilv_node_as_float -node_is_int = _lib.lilv_node_is_int -node_as_int = _lib.lilv_node_as_int -node_is_bool = _lib.lilv_node_is_bool -node_as_bool = _lib.lilv_node_as_bool -plugin_classes_free = _lib.lilv_plugin_classes_free -plugin_classes_size = _lib.lilv_plugin_classes_size -plugin_classes_begin = _lib.lilv_plugin_classes_begin -plugin_classes_get = _lib.lilv_plugin_classes_get -plugin_classes_next = _lib.lilv_plugin_classes_next -plugin_classes_is_end = _lib.lilv_plugin_classes_is_end -plugin_classes_get_by_uri = _lib.lilv_plugin_classes_get_by_uri -scale_points_free = _lib.lilv_scale_points_free -scale_points_size = _lib.lilv_scale_points_size -scale_points_begin = _lib.lilv_scale_points_begin -scale_points_get = _lib.lilv_scale_points_get -scale_points_next = _lib.lilv_scale_points_next -scale_points_is_end = _lib.lilv_scale_points_is_end -uis_free = _lib.lilv_uis_free -uis_size = _lib.lilv_uis_size -uis_begin = _lib.lilv_uis_begin -uis_get = _lib.lilv_uis_get -uis_next = _lib.lilv_uis_next -uis_is_end = _lib.lilv_uis_is_end -uis_get_by_uri = _lib.lilv_uis_get_by_uri -nodes_free = _lib.lilv_nodes_free -nodes_size = _lib.lilv_nodes_size -nodes_begin = _lib.lilv_nodes_begin -nodes_get = _lib.lilv_nodes_get -nodes_next = _lib.lilv_nodes_next -nodes_is_end = _lib.lilv_nodes_is_end -nodes_get_first = _lib.lilv_nodes_get_first -nodes_contains = _lib.lilv_nodes_contains -nodes_merge = _lib.lilv_nodes_merge -plugins_size = _lib.lilv_plugins_size -plugins_begin = _lib.lilv_plugins_begin -plugins_get = _lib.lilv_plugins_get -plugins_next = _lib.lilv_plugins_next -plugins_is_end = _lib.lilv_plugins_is_end -plugins_get_by_uri = _lib.lilv_plugins_get_by_uri -world_new = _lib.lilv_world_new -world_set_option = _lib.lilv_world_set_option -world_free = _lib.lilv_world_free -world_load_all = _lib.lilv_world_load_all -world_load_bundle = _lib.lilv_world_load_bundle -world_load_specifications = _lib.lilv_world_load_specifications -world_load_plugin_classes = _lib.lilv_world_load_plugin_classes -world_unload_bundle = _lib.lilv_world_unload_bundle -world_load_resource = _lib.lilv_world_load_resource -world_unload_resource = _lib.lilv_world_unload_resource -world_get_plugin_class = _lib.lilv_world_get_plugin_class -world_get_plugin_classes = _lib.lilv_world_get_plugin_classes -world_get_all_plugins = _lib.lilv_world_get_all_plugins -world_find_nodes = _lib.lilv_world_find_nodes -world_get = _lib.lilv_world_get -world_ask = _lib.lilv_world_ask -plugin_verify = _lib.lilv_plugin_verify -plugin_get_uri = _lib.lilv_plugin_get_uri -plugin_get_bundle_uri = _lib.lilv_plugin_get_bundle_uri -plugin_get_data_uris = _lib.lilv_plugin_get_data_uris -plugin_get_library_uri = _lib.lilv_plugin_get_library_uri -plugin_get_name = _lib.lilv_plugin_get_name -plugin_get_class = _lib.lilv_plugin_get_class -plugin_get_value = _lib.lilv_plugin_get_value -plugin_has_feature = _lib.lilv_plugin_has_feature -plugin_get_supported_features = _lib.lilv_plugin_get_supported_features -plugin_get_required_features = _lib.lilv_plugin_get_required_features -plugin_get_optional_features = _lib.lilv_plugin_get_optional_features -plugin_has_extension_data = _lib.lilv_plugin_has_extension_data -plugin_get_extension_data = _lib.lilv_plugin_get_extension_data -plugin_get_num_ports = _lib.lilv_plugin_get_num_ports -plugin_get_port_ranges_float = _lib.lilv_plugin_get_port_ranges_float -plugin_has_latency = _lib.lilv_plugin_has_latency -plugin_get_latency_port_index = _lib.lilv_plugin_get_latency_port_index -plugin_get_port_by_index = _lib.lilv_plugin_get_port_by_index -plugin_get_port_by_symbol = _lib.lilv_plugin_get_port_by_symbol -plugin_get_port_by_designation = _lib.lilv_plugin_get_port_by_designation -plugin_get_project = _lib.lilv_plugin_get_project -plugin_get_author_name = _lib.lilv_plugin_get_author_name -plugin_get_author_email = _lib.lilv_plugin_get_author_email -plugin_get_author_homepage = _lib.lilv_plugin_get_author_homepage -plugin_is_replaced = _lib.lilv_plugin_is_replaced -plugin_get_related = _lib.lilv_plugin_get_related -port_get_node = _lib.lilv_port_get_node -port_get_value = _lib.lilv_port_get_value -port_get = _lib.lilv_port_get -port_get_properties = _lib.lilv_port_get_properties -port_has_property = _lib.lilv_port_has_property -port_supports_event = _lib.lilv_port_supports_event -port_get_index = _lib.lilv_port_get_index -port_get_symbol = _lib.lilv_port_get_symbol -port_get_name = _lib.lilv_port_get_name -port_get_classes = _lib.lilv_port_get_classes -port_is_a = _lib.lilv_port_is_a -port_get_range = _lib.lilv_port_get_range -port_get_scale_points = _lib.lilv_port_get_scale_points -state_new_from_world = _lib.lilv_state_new_from_world -state_new_from_file = _lib.lilv_state_new_from_file -state_new_from_string = _lib.lilv_state_new_from_string -state_new_from_instance = _lib.lilv_state_new_from_instance -state_free = _lib.lilv_state_free -state_equals = _lib.lilv_state_equals -state_get_num_properties = _lib.lilv_state_get_num_properties -state_get_plugin_uri = _lib.lilv_state_get_plugin_uri -state_get_uri = _lib.lilv_state_get_uri -state_get_label = _lib.lilv_state_get_label -state_set_label = _lib.lilv_state_set_label -state_set_metadata = _lib.lilv_state_set_metadata -state_emit_port_values = _lib.lilv_state_emit_port_values -state_restore = _lib.lilv_state_restore -state_save = _lib.lilv_state_save -state_to_string = _lib.lilv_state_to_string -state_delete = _lib.lilv_state_delete -scale_point_get_label = _lib.lilv_scale_point_get_label -scale_point_get_value = _lib.lilv_scale_point_get_value -plugin_class_get_parent_uri = _lib.lilv_plugin_class_get_parent_uri -plugin_class_get_uri = _lib.lilv_plugin_class_get_uri -plugin_class_get_label = _lib.lilv_plugin_class_get_label -plugin_class_get_children = _lib.lilv_plugin_class_get_children -plugin_instantiate = _lib.lilv_plugin_instantiate -instance_free = _lib.lilv_instance_free -plugin_get_uis = _lib.lilv_plugin_get_uis -ui_get_uri = _lib.lilv_ui_get_uri -ui_get_classes = _lib.lilv_ui_get_classes -ui_is_a = _lib.lilv_ui_is_a -ui_is_supported = _lib.lilv_ui_is_supported -ui_get_bundle_uri = _lib.lilv_ui_get_bundle_uri -ui_get_binary_uri = _lib.lilv_ui_get_binary_uri -## LV2 types +# LV2 types -LV2_Handle = POINTER(None) -LV2_URID_Map_Handle = POINTER(None) +LV2_Handle = POINTER(None) +LV2_URID_Map_Handle = POINTER(None) LV2_URID_Unmap_Handle = POINTER(None) -LV2_URID = c_uint32 +LV2_URID = c_uint32 + class LV2_Feature(Structure): - __slots__ = [ 'URI', 'data' ] - _fields_ = [('URI', c_char_p), - ('data', POINTER(None))] + __slots__ = ["URI", "data"] + _fields_ = [("URI", c_char_p), ("data", POINTER(None))] + class LV2_Descriptor(Structure): - __slots__ = [ 'URI', - 'instantiate', - 'connect_port', - 'activate', - 'run', - 'deactivate', - 'cleanup', - 'extension_data' ] + __slots__ = [ + "URI", + "instantiate", + "connect_port", + "activate", + "run", + "deactivate", + "cleanup", + "extension_data", + ] + LV2_Descriptor._fields_ = [ - ('URI', c_char_p), - ('instantiate', CFUNCTYPE(LV2_Handle, POINTER(LV2_Descriptor), - c_double, c_char_p, POINTER(POINTER(LV2_Feature)))), - ('connect_port', CFUNCTYPE(None, LV2_Handle, c_uint32, POINTER(None))), - ('activate', CFUNCTYPE(None, LV2_Handle)), - ('run', CFUNCTYPE(None, LV2_Handle, c_uint32)), - ('deactivate', CFUNCTYPE(None, LV2_Handle)), - ('cleanup', CFUNCTYPE(None, LV2_Handle)), - ('extension_data', CFUNCTYPE(c_void_p, c_char_p)), + ("URI", c_char_p), + ( + "instantiate", + CFUNCTYPE( + LV2_Handle, + POINTER(LV2_Descriptor), + c_double, + c_char_p, + POINTER(POINTER(LV2_Feature)), + ), + ), + ("connect_port", CFUNCTYPE(None, LV2_Handle, c_uint32, POINTER(None))), + ("activate", CFUNCTYPE(None, LV2_Handle)), + ("run", CFUNCTYPE(None, LV2_Handle, c_uint32)), + ("deactivate", CFUNCTYPE(None, LV2_Handle)), + ("cleanup", CFUNCTYPE(None, LV2_Handle)), + ("extension_data", CFUNCTYPE(c_void_p, c_char_p)), ] + class LV2_URID_Map(Structure): - __slots__ = [ 'handle', 'map' ] - _fields_ = [ - ('handle', LV2_URID_Map_Handle), - ('map', CFUNCTYPE(LV2_URID, LV2_URID_Map_Handle, c_char_p)), + __slots__ = ["handle", "map"] + _fields_ = [ + ("handle", LV2_URID_Map_Handle), + ("map", CFUNCTYPE(LV2_URID, LV2_URID_Map_Handle, c_char_p)), ] + class LV2_URID_Unmap(Structure): - __slots__ = [ 'handle', 'unmap' ] - _fields_ = [ - ('handle', LV2_URID_Unmap_Handle), - ('unmap', CFUNCTYPE(c_char_p, LV2_URID_Unmap_Handle, LV2_URID)), + __slots__ = ["handle", "unmap"] + _fields_ = [ + ("handle", LV2_URID_Unmap_Handle), + ("unmap", CFUNCTYPE(c_char_p, LV2_URID_Unmap_Handle, LV2_URID)), ] + # Lilv types + class Plugin(Structure): """LV2 Plugin.""" + + @classmethod + def wrap(cls, world, plugin): + return Plugin(world, plugin) if world is not None and plugin else None + def __init__(self, world, plugin): - self.world = world + assert isinstance(world, World) + assert type(plugin) == POINTER(Plugin) + assert plugin + + self.world = world self.plugin = plugin def __eq__(self, other): @@ -249,21 +133,22 @@ def verify(self): """Check if `plugin` is valid. - This is not a rigorous validator, but can be used to reject some malformed - plugins that could cause bugs (e.g. plugins with missing required fields). - - Note that normal hosts do NOT need to use this - lilv does not - load invalid plugins into plugin lists. This is included for plugin - testing utilities, etc. + This is not a rigorous validator, but can be used to reject some + malformed plugins that could cause bugs (e.g. plugins with missing + required fields). + + Note that normal hosts do NOT need to use this - lilv does not load + invalid plugins into plugin lists. This is included for plugin testing + utilities, etc. """ - return plugin_verify(self.plugin) + return c.plugin_verify(self.plugin) def get_uri(self): """Get the URI of `plugin`. Any serialization that refers to plugins should refer to them by this. - Hosts SHOULD NOT save any filesystem paths, plugin indexes, etc. in saved - files pass save only the URI. + Hosts SHOULD NOT save any filesystem paths, plugin indexes, etc. in + saved files pass save only the URI. The URI is a globally unique identifier for one specific plugin. Two plugins with the same URI are compatible in port signature, and should @@ -271,29 +156,33 @@ is upgraded in an incompatible way (eg if it has different ports), it MUST have a different URI than it's predecessor. """ - return Node.wrap(node_duplicate(plugin_get_uri(self.plugin))) + return Node.wrap( + self.world, c.node_duplicate(c.plugin_get_uri(self.plugin)) + ) def get_bundle_uri(self): """Get the (resolvable) URI of the plugin's "main" bundle. - This returns the URI of the bundle where the plugin itself was found. Note - that the data for a plugin may be spread over many bundles, that is, - get_data_uris() may return URIs which are not within this bundle. - - Typical hosts should not need to use this function. - Note this always returns a fully qualified URI. If you want a local - filesystem path, use lilv.file_uri_parse(). - """ - return Node.wrap(node_duplicate(plugin_get_bundle_uri(self.plugin))) + This returns the URI of the bundle where the plugin itself was found. + Note that the data for a plugin may be spread over many bundles, that + is, get_data_uris() may return URIs which are not within this bundle. + + Typical hosts should not need to use this function. Note this always + returns a fully qualified URI. If you want a local filesystem path, + use lilv.file_uri_parse(). + """ + return Node.wrap( + self.world, c.node_duplicate(c.plugin_get_bundle_uri(self.plugin)) + ) def get_data_uris(self): """Get the (resolvable) URIs of the RDF data files that define a plugin. - Typical hosts should not need to use this function. - Note this always returns fully qualified URIs. If you want local - filesystem paths, use lilv.file_uri_parse(). + Typical hosts should not need to use this function. Note this always + returns fully qualified URIs. If you want local filesystem paths, use + lilv.file_uri_parse(). """ - return Nodes(plugin_get_data_uris(self.plugin)) + return Nodes(self.world, c.plugin_get_data_uris(self.plugin), False) def get_library_uri(self): """Get the (resolvable) URI of the shared library for `plugin`. @@ -301,7 +190,9 @@ Note this always returns a fully qualified URI. If you want a local filesystem path, use lilv.file_uri_parse(). """ - return Node.wrap(node_duplicate(plugin_get_library_uri(self.plugin))) + return Node.wrap( + self.world, c.node_duplicate(c.plugin_get_library_uri(self.plugin)) + ) def get_name(self): """Get the name of `plugin`. @@ -310,11 +201,11 @@ translated according to the current locale, this value MUST NOT be used as a plugin identifier (use the URI for that). """ - return Node.wrap(plugin_get_name(self.plugin)) + return Node.wrap(self.world, c.plugin_get_name(self.plugin)) def get_class(self): """Get the class this plugin belongs to (e.g. Filters).""" - return PluginClass(plugin_get_class(self.plugin)) + return PluginClass(self.world, c.plugin_get_class(self.plugin)) def get_value(self, predicate): """Get a value associated with the plugin in a plugin's data files. @@ -328,7 +219,9 @@ May return None if the property was not found, or if object(s) is not sensibly represented as a LilvNodes (e.g. blank nodes). """ - return Nodes(plugin_get_value(self.plugin, predicate.node)) + return Nodes( + self.world, c.plugin_get_value(self.plugin, predicate.node), True + ) def has_feature(self, feature_uri): """Return whether a feature is supported by a plugin. @@ -336,43 +229,50 @@ This will return true if the feature is an optional or required feature of the plugin. """ - return plugin_has_feature(self.plugin, feature_uri.node) + return c.plugin_has_feature(self.plugin, feature_uri.node) def get_supported_features(self): """Get the LV2 Features supported (required or optionally) by a plugin. A feature is "supported" by a plugin if it is required OR optional. - Since required features have special rules the host must obey, this function - probably shouldn't be used by normal hosts. Using get_optional_features() - and get_required_features() separately is best in most cases. - """ - return Nodes(plugin_get_supported_features(self.plugin)) + Since required features have special rules the host must obey, this + function probably shouldn't be used by normal hosts. Using + get_optional_features() and get_required_features() separately is best + in most cases. + """ + return Nodes( + self.world, c.plugin_get_supported_features(self.plugin), True + ) def get_required_features(self): """Get the LV2 Features required by a plugin. - If a feature is required by a plugin, hosts MUST NOT use the plugin if they do not - understand (or are unable to support) that feature. + If a feature is required by a plugin, hosts MUST NOT use the plugin if + they do not understand (or are unable to support) that feature. - All values returned here MUST be return plugin_(self.plugin)ed to the plugin's instantiate method - (along with data, if necessary, as defined by the feature specification) - or plugin instantiation will fail. - """ - return Nodes(plugin_get_required_features(self.plugin)) + All values returned here MUST be return plugin_(self.plugin)ed to the + plugin's instantiate method (along with data, if necessary, as defined + by the feature specification) or plugin instantiation will fail. + """ + return Nodes( + self.world, c.plugin_get_required_features(self.plugin), True + ) def get_optional_features(self): """Get the LV2 Features optionally supported by a plugin. - Hosts MAY ignore optional plugin features for whatever reasons. Plugins - MUST operate (at least somewhat) if they are instantiated without being - passed optional features. - """ - return Nodes(plugin_get_optional_features(self.plugin)) + Hosts MAY ignore optional plugin features for whatever reasons. + Plugins MUST operate (at least somewhat) if they are instantiated + without being passed optional features. + """ + return Nodes( + self.world, c.plugin_get_optional_features(self.plugin), True + ) def has_extension_data(self, uri): - """Return whether or not a plugin provides a specific extension data.""" - return plugin_has_extension_data(self.plugin, uri.node) + """Return whether or not a plugin provides specific extension data.""" + return c.plugin_has_extension_data(self.plugin, uri.node) def get_extension_data(self): """Get a sequence of all extension data provided by a plugin. @@ -380,34 +280,19 @@ This can be used to find which URIs get_extension_data() will return a value for without instantiating the plugin. """ - return Nodes(plugin_get_extension_data(self.plugin)) + return Nodes( + self.world, c.plugin_get_extension_data(self.plugin), True + ) def get_num_ports(self): """Get the number of ports on this plugin.""" - return plugin_get_num_ports(self.plugin) - - # def get_port_ranges_float(self, min_values, max_values, def_values): - # """Get the port ranges (minimum, maximum and default values) for all ports. - - # `min_values`, `max_values` and `def_values` must either point to an array - # of N floats, where N is the value returned by get_num_ports() - # for this plugin, or None. The elements of the array will be set to the - # the minimum, maximum and default values of the ports on this plugin, - # with array index corresponding to port index. If a port doesn't have a - # minimum, maximum or default value, or the port's type is not float, the - # corresponding array element will be set to NAN. - - # This is a convenience method for the common case of getting the range of - # all float ports on a plugin, and may be significantly faster than - # repeated calls to Port.get_range(). - # """ - # plugin_get_port_ranges_float(self.plugin, min_values, max_values, def_values) + return c.plugin_get_num_ports(self.plugin) def get_num_ports_of_class(self, *args): - """Get the number of ports on this plugin that are members of some class(es).""" - args = list(map(lambda x: x.node, args)) - args += (None,) - return plugin_get_num_ports_of_class(self.plugin, *args) + """Get the number of ports of some class(es) on this plugin.""" + return c.plugin_get_num_ports_of_class( + self.plugin, *(list(map(lambda n: n.node, args)) + [None]) + ) def has_latency(self): """Return whether or not the plugin introduces (and reports) latency. @@ -415,18 +300,23 @@ The index of the latency port can be found with get_latency_port() ONLY if this function returns true. """ - return plugin_has_latency(self.plugin) + return c.plugin_has_latency(self.plugin) def get_latency_port_index(self): """Return the index of the plugin's latency port. Returns None if the plugin has no latency port. - Any plugin that introduces unwanted latency that should be compensated for - (by hosts with the ability/need) MUST provide this port, which is a control - rate output port that reports the latency for each cycle in frames. - """ - return plugin_get_latency_port_index(self.plugin) if self.has_latency() else None + Any plugin that introduces unwanted latency that should be compensated + for (by hosts with the ability/need) MUST provide this port, which is a + control rate output port that reports the latency for each cycle in + frames. + """ + return ( + c.plugin_get_latency_port_index(self.plugin) + if self.has_latency() + else None + ) def get_port(self, key): """Get a port on `plugin` by index or symbol.""" @@ -437,7 +327,8 @@ def get_port_by_index(self, index): """Get a port on `plugin` by `index`.""" - return Port.wrap(self, plugin_get_port_by_index(self.plugin, index)) + assert type(index) == int + return Port.wrap(self, c.plugin_get_port_by_index(self.plugin, index)) def get_port_by_symbol(self, symbol): """Get a port on `plugin` by `symbol`. @@ -445,24 +336,32 @@ Note this function is slower than get_port_by_index(), especially on plugins with a very large number of ports. """ + assert type(symbol) == str or isinstance(symbol, Node) if type(symbol) == str: symbol = self.world.new_string(symbol) - return Port.wrap(self, plugin_get_port_by_symbol(self.plugin, symbol.node)) + + assert isinstance(symbol, Node) + assert symbol.node is not None + return Port.wrap( + self, c.plugin_get_port_by_symbol(self.plugin, symbol.node) + ) def get_port_by_designation(self, port_class, designation): """Get a port on `plugin` by its lv2:designation. - The designation of a port describes the meaning, assignment, allocation or - role of the port, e.g. "left channel" or "gain". If found, the port with - matching `port_class` and `designation` is be returned, otherwise None is - returned. The `port_class` can be used to distinguish the input and output - ports for a particular designation. If `port_class` is None, any port with - the given designation will be returned. - """ - return Port.wrap(self, - plugin_get_port_by_designation(self.plugin, - port_class.node, - designation.node)) + The designation of a port describes the meaning, assignment, allocation + or role of the port, e.g. "left channel" or "gain". If found, the port + with matching `port_class` and `designation` is be returned, otherwise + None is returned. The `port_class` can be used to distinguish the + input and output ports for a particular designation. If `port_class` + is None, any port with the given designation will be returned. + """ + return Port.wrap( + self, + c.plugin_get_port_by_designation( + self.plugin, port_class.node, designation.node + ), + ) def get_project(self): """Get the project the plugin is a part of. @@ -470,28 +369,28 @@ More information about the project can be read via find_nodes(), typically using properties from DOAP (e.g. doap:name). """ - return Node.wrap(plugin_get_project(self.plugin)) + return Node.wrap(self.world, c.plugin_get_project(self.plugin)) def get_author_name(self): """Get the full name of the plugin's author. Returns None if author name is not present. """ - return Node.wrap(plugin_get_author_name(self.plugin)) + return Node.wrap(self.world, c.plugin_get_author_name(self.plugin)) def get_author_email(self): """Get the email address of the plugin's author. Returns None if author email address is not present. """ - return Node.wrap(plugin_get_author_email(self.plugin)) + return Node.wrap(self.world, c.plugin_get_author_email(self.plugin)) def get_author_homepage(self): """Get the address of the plugin author's home page. Returns None if author homepage is not present. """ - return Node.wrap(plugin_get_author_homepage(self.plugin)) + return Node.wrap(self.world, c.plugin_get_author_homepage(self.plugin)) def is_replaced(self): """Return true iff `plugin` has been replaced by another plugin. @@ -499,29 +398,41 @@ The plugin will still be usable, but hosts should hide them from their user interfaces to prevent users from using deprecated plugins. """ - return plugin_is_replaced(self.plugin) + return c.plugin_is_replaced(self.plugin) def get_related(self, resource_type): """Get the resources related to `plugin` with lv2:appliesTo. - Some plugin-related resources are not linked directly to the plugin with - rdfs:seeAlso and thus will not be automatically loaded along with the plugin - data (usually for performance reasons). All such resources of the given @c - type related to `plugin` can be accessed with this function. - - If `resource_type` is None, all such resources will be returned, regardless of type. - - To actually load the data for each returned resource, use world.load_resource(). - """ - return Nodes(plugin_get_related(self.plugin, resource_type)) + Some plugin-related resources are not linked directly to the plugin + with rdfs:seeAlso and thus will not be automatically loaded along with + the plugin data (usually for performance reasons). All such resources + of the given @c type related to `plugin` can be accessed with this + function. + + If `resource_type` is None, all such resources will be returned, + regardless of type. + + To actually load the data for each returned resource, use + world.load_resource(). + """ + return Nodes( + self.world, c.plugin_get_related(self.plugin, resource_type), True + ) def get_uis(self): """Get all UIs for `plugin`.""" - return UIs(plugin_get_uis(self.plugin)) + return UIs(self.world, c.plugin_get_uis(self.plugin)) + class PluginClass(Structure): """Plugin Class (type/category).""" - def __init__(self, plugin_class): + + def __init__(self, world, plugin_class): + assert isinstance(world, World) + assert type(plugin_class) == POINTER(PluginClass) + assert plugin_class + + self.world = world self.plugin_class = plugin_class def __str__(self): @@ -530,90 +441,133 @@ def get_parent_uri(self): """Get the URI of this class' superclass. - May return None if class has no parent. + May return None if class has no parent. """ - return Node.wrap(node_duplicate(plugin_class_get_parent_uri(self.plugin_class))) + return Node.wrap( + self.world, + c.node_duplicate(c.plugin_class_get_parent_uri(self.plugin_class)), + ) def get_uri(self): """Get the URI of this plugin class.""" - return Node.wrap(node_duplicate(plugin_class_get_uri(self.plugin_class))) + return Node.wrap( + self.world, + c.node_duplicate(c.plugin_class_get_uri(self.plugin_class)), + ) def get_label(self): """Get the label of this plugin class, ie "Oscillators".""" - return Node.wrap(node_duplicate(plugin_class_get_label(self.plugin_class))) + return Node.wrap( + self.world, + c.node_duplicate(c.plugin_class_get_label(self.plugin_class)), + ) def get_children(self): """Get the subclasses of this plugin class.""" - return PluginClasses(plugin_class_get_children(self.plugin_class)) + return PluginClasses( + self.world, c.plugin_class_get_children(self.plugin_class), True + ) + class Port(Structure): """Port on a Plugin.""" + @classmethod def wrap(cls, plugin, port): - return Port(plugin, port) if plugin and port else None + if plugin is not None and port: + return Port(plugin, port) + + return None def __init__(self, plugin, port): + assert isinstance(plugin, Plugin) + assert type(port) == POINTER(Port) + assert port + self.plugin = plugin - self.port = port + self.port = port def get_node(self): """Get the RDF node of `port`. Ports nodes may be may be URIs or blank nodes. """ - return Node.wrap(node_duplicate(port_get_node(self.plugin, self.port))) + return Node.wrap( + self.plugin.world, + c.node_duplicate(c.port_get_node(self.plugin, self.port)), + ) def get_value(self, predicate): """Port analog of Plugin.get_value().""" - return Nodes(port_get_value(self.plugin.plugin, self.port, predicate.node)) + return Nodes( + self.plugin.world, + c.port_get_value(self.plugin.plugin, self.port, predicate.node), + True, + ) def get(self, predicate): """Get a single property value of a port. - This is equivalent to lilv_nodes_get_first(lilv_port_get_value(...)) but is - simpler to use in the common case of only caring about one value. The - caller is responsible for freeing the returned node. - """ - return Node.wrap(port_get(self.plugin.plugin, self.port, predicate.node)) + This is equivalent to lilv_nodes_get_first(lilv_port_get_value(...)) + but is simpler to use in the common case of only caring about one + value. The caller is responsible for freeing the returned node. + """ + return Node.wrap( + self.plugin.world, + c.port_get(self.plugin.plugin, self.port, predicate.node), + ) def get_properties(self): """Return the LV2 port properties of a port.""" - return Nodes(port_get_properties(self.plugin.plugin, self.port)) + return Nodes( + self.plugin.world, + c.port_get_properties(self.plugin.plugin, self.port), + True, + ) def has_property(self, property_uri): """Return whether a port has a certain property.""" - return port_has_property(self.plugin.plugin, self.port, property_uri.node) + return c.port_has_property( + self.plugin.plugin, self.port, property_uri.node + ) def supports_event(self, event_type): """Return whether a port supports a certain event type. - More precisely, this returns true iff the port has an atom:supports or an - ev:supportsEvent property with `event_type` as the value. + More precisely, this returns true iff the port has an atom:supports or + an ev:supportsEvent property with `event_type` as the value. """ - return port_supports_event(self.plugin.plugin, self.port, event_type.node) + return c.port_supports_event( + self.plugin.plugin, self.port, event_type.node + ) def get_index(self): """Get the index of a port. - The index is only valid for the life of the plugin and may change between - versions. For a stable identifier, use the symbol. + The index is only valid for the life of the plugin and may change + between versions. For a stable identifier, use the symbol. """ - return port_get_index(self.plugin.plugin, self.port) + return c.port_get_index(self.plugin.plugin, self.port) def get_symbol(self): """Get the symbol of a port. The 'symbol' is a short string, a valid C identifier. """ - return Node.wrap(node_duplicate(port_get_symbol(self.plugin.plugin, self.port))) + return Node.wrap( + self.plugin.world, + c.node_duplicate(c.port_get_symbol(self.plugin.plugin, self.port)), + ) def get_name(self): """Get the name of a port. - This is guaranteed to return the untranslated name (the doap:name in the - data file without a language tag). + This is guaranteed to return the untranslated name (the doap:name in + the data file without a language tag). """ - return Node.wrap(port_get_name(self.plugin.plugin, self.port)) + return Node.wrap( + self.plugin.world, c.port_get_name(self.plugin.plugin, self.port) + ) def get_classes(self): """Get all the classes of a port. @@ -623,65 +577,108 @@ The returned list does not include lv2:Port, which is implied. Returned value is shared and must not be destroyed by caller. """ - return Nodes(port_get_classes(self.plugin.plugin, self.port)) + return Nodes( + self.plugin.world, + c.port_get_classes(self.plugin.plugin, self.port), + False, + ) def is_a(self, port_class): """Determine if a port is of a given class (input, output, audio, etc). - For convenience/performance/extensibility reasons, hosts are expected to - create a LilvNode for each port class they "care about". Well-known type - URI strings are defined (e.g. LILV_URI_INPUT_PORT) for convenience, but - this function is designed so that Lilv is usable with any port types - without requiring explicit support in Lilv. + For convenience/performance/extensibility reasons, hosts are expected + to create a LilvNode for each port class they "care about". Well-known + type URI strings are defined (e.g. LILV_URI_INPUT_PORT) for + convenience, but this function is designed so that Lilv is usable with + any port types without requiring explicit support in Lilv. """ - return port_is_a(self.plugin.plugin, self.port, port_class.node) + return c.port_is_a(self.plugin.plugin, self.port, port_class.node) def get_range(self): - """Return the default, minimum, and maximum values of a port as a tuple.""" + """Return the default, minimum, and maximum values of a port as a tuple. + """ pdef = POINTER(Node)() pmin = POINTER(Node)() pmax = POINTER(Node)() - port_get_range(self.plugin.plugin, self.port, byref(pdef), byref(pmin), byref(pmax)) - return (Node(pdef.contents) if pdef else None, - Node(pmin.contents) if pmin else None, - Node(pmax.contents) if pmax else None) + c.port_get_range( + self.plugin.plugin, + self.port, + byref(pdef), + byref(pmin), + byref(pmax), + ) + return ( + Node.wrap(self.plugin.world, pdef), + Node.wrap(self.plugin.world, pmin), + Node.wrap(self.plugin.world, pmax), + ) def get_scale_points(self): - """Get the scale points (enumeration values) of a port. + """Get a list of the scale points (enumeration values) of a port. This returns a collection of 'interesting' named values of a port (e.g. appropriate entries for a UI selector associated with this port). - Returned value may be None if `port` has no scale points. """ - return ScalePoints(port_get_scale_points(self.plugin.plugin, self.port)) + + cpoints = c.port_get_scale_points(self.plugin.plugin, self.port) + points = [] + it = c.scale_points_begin(cpoints) + while not c.scale_points_is_end(cpoints, it): + points += [ + ScalePoint(self.plugin.world, c.scale_points_get(cpoints, it)) + ] + it = c.scale_points_next(cpoints, it) + + c.scale_points_free(cpoints) + return points + class ScalePoint(Structure): """Scale point (detent).""" - def __init__(self, point): - self.point = point + + def __init__(self, world, point): + assert isinstance(world, World) + assert type(point) == POINTER(ScalePoint) + assert point + + self.label = Node.wrap( + world, c.node_duplicate(c.scale_point_get_label(point)) + ) + self.value = Node.wrap( + world, c.node_duplicate(c.scale_point_get_value(point)) + ) def get_label(self): """Get the label of this scale point (enumeration value).""" - return Node.wrap(scale_point_get_label(self.point)) + return self.label def get_value(self): """Get the value of this scale point (enumeration value).""" - return Node.wrap(scale_point_get_value(self.point)) + return self.value + class UI(Structure): """Plugin UI.""" - def __init__(self, ui): + + def __init__(self, world, ui): + assert isinstance(world, World) + assert type(ui) == POINTER(UI) + assert ui + self.world = world self.ui = ui def __str__(self): return str(self.get_uri()) def __eq__(self, other): - return self.get_uri() == _as_uri(other) + if type(other) == str or type(other) == Node: + return self.get_uri() == other + + return self.get_uri() == other.get_uri() def get_uri(self): """Get the URI of a Plugin UI.""" - return Node.wrap(node_duplicate(ui_get_uri(self.ui))) + return Node.wrap(self.world, c.node_duplicate(c.ui_get_uri(self.ui))) def get_classes(self): """Get the types (URIs of RDF classes) of a Plugin UI. @@ -689,19 +686,24 @@ Note that in most cases is_supported() should be used, which avoids the need to use this function (and type specific logic). """ - return Nodes(ui_get_classes(self.ui)) + return Nodes(self.world, c.ui_get_classes(self.ui), False) def is_a(self, class_uri): """Check whether a plugin UI has a given type.""" - return ui_is_a(self.ui, class_uri.node) + return c.ui_is_a(self.ui, class_uri.node) def get_bundle_uri(self): """Get the URI of the UI's bundle.""" - return Node.wrap(node_duplicate(ui_get_bundle_uri(self.ui))) + return Node.wrap( + self.world, c.node_duplicate(c.ui_get_bundle_uri(self.ui)) + ) def get_binary_uri(self): """Get the URI for the UI's shared library.""" - return Node.wrap(node_duplicate(ui_get_binary_uri(self.ui))) + return Node.wrap( + self.world, c.node_duplicate(c.ui_get_binary_uri(self.ui)) + ) + class Node(Structure): """Data node (URI, string, integer, etc.). @@ -716,89 +718,128 @@ >>> int(i) * 2 84 """ - @classmethod - def wrap(cls, node): - return Node(node) if node else None - def __init__(self, node): + @classmethod + def wrap(cls, world, node): + assert isinstance(world, World) + assert (node is None) or (type(node) == POINTER(Node)) + if node: + return Node(world, node) + + return None + + def __init__(self, world, node): + assert type(node) == POINTER(Node) + assert node + self.world = world self.node = node def __del__(self): - if hasattr(self, 'node'): - node_free(self.node) + # Note that since Python 3.4, cycles are deleted and the world can be + # destroyed before nodes (which contain a pointer to it). This causes + # a crash, so we only free here if the world is still alive. It does + # not seem possible to enforce the right order (it happens even if + # everything has a reference to the world), but this normally only + # happens on exit anyway so it shouldn't matter much. + if self.world.world: + c.node_free(self.node) def __eq__(self, other): + if other is None: + return False + otype = type(other) - if otype in [str, int, float]: - return otype(self) == other - return node_equals(self.node, other.node) + if otype == Node: + return c.node_equals(self.node, other.node) + + return otype(self) == other def __ne__(self, other): - return not node_equals(self.node, other.node) + return not c.node_equals(self.node, other.node) def __str__(self): - return node_as_string(self.node).decode('utf-8') + return c.node_as_string(self.node).decode("utf-8") def __int__(self): if not self.is_int(): - raise ValueError('node %s is not an integer' % str(self)) - return node_as_int(self.node) + raise ValueError("node %s is not an integer" % str(self)) + return c.node_as_int(self.node) def __float__(self): if not self.is_float(): - raise ValueError('node %s is not a float' % str(self)) - return node_as_float(self.node) + raise ValueError("node %s is not a float" % str(self)) + return c.node_as_float(self.node) def __bool__(self): if not self.is_bool(): - raise ValueError('node %s is not a bool' % str(self)) - return node_as_bool(self.node) + raise ValueError("node %s is not a bool" % str(self)) + return c.node_as_bool(self.node) + __nonzero__ = __bool__ def get_turtle_token(self): """Return this value as a Turtle/SPARQL token.""" - return node_get_turtle_token(self.node).decode('utf-8') + c_str = c.node_get_turtle_token(self.node) + string = cast(c_str, c_char_p).value.decode("utf-8") + c.free(c_str) + return string def is_uri(self): """Return whether the value is a URI (resource).""" - return node_is_uri(self.node) + return c.node_is_uri(self.node) def is_blank(self): """Return whether the value is a blank node (resource with no URI).""" - return node_is_blank(self.node) + return c.node_is_blank(self.node) def is_literal(self): """Return whether this value is a literal (i.e. not a URI).""" - return node_is_literal(self.node) + return c.node_is_literal(self.node) def is_string(self): """Return whether this value is a string literal. Returns true if value is a string value (and not numeric). """ - return node_is_string(self.node) + return c.node_is_string(self.node) def get_path(self, hostname=None): """Return the path of a file URI node. Returns None if value is not a file URI.""" - return node_get_path(self.node, hostname).decode('utf-8') + c_str = c.node_get_path(self.node, hostname) + string = cast(c_str, c_char_p).value.decode("utf-8") + if sys.platform != 'win32': # TODO: Memory comes from libserd + c.free(c_str) + return string def is_float(self): """Return whether this value is a decimal literal.""" - return node_is_float(self.node) + return c.node_is_float(self.node) def is_int(self): """Return whether this value is an integer literal.""" - return node_is_int(self.node) + return c.node_is_int(self.node) def is_bool(self): """Return whether this value is a boolean.""" - return node_is_bool(self.node) + return c.node_is_bool(self.node) + class Iter(Structure): """Collection iterator.""" - def __init__(self, collection, iterator, constructor, iter_get, iter_next, iter_is_end): + + def __init__( + self, + collection, + iterator, + constructor, + iter_get, + iter_next, + iter_is_end, + ): + assert isinstance(collection, Collection) + self.collection = collection self.iterator = iterator self.constructor = constructor @@ -808,25 +849,42 @@ def get(self): """Get the current item.""" - return self.constructor(self.iter_get(self.collection, self.iterator)) + return self.constructor( + self.collection.world, + self.iter_get(self.collection.collection, self.iterator), + ) def next(self): """Move to and return the next item.""" if self.is_end(): raise StopIteration elem = self.get() - self.iterator = self.iter_next(self.collection, self.iterator) + self.iterator = self.iter_next( + self.collection.collection, self.iterator + ) return elem def is_end(self): """Return true if the end of the collection has been reached.""" - return self.iter_is_end(self.collection, self.iterator) + return self.iter_is_end(self.collection.collection, self.iterator) __next__ = next + class Collection(Structure): # Base class for all lilv collection wrappers. - def __init__(self, collection, iter_begin, constructor, iter_get, iter_next, is_end): + def __init__( + self, + world, + collection, + iter_begin, + constructor, + iter_get, + iter_next, + is_end, + ): + assert isinstance(world, World) + self.world = world self.collection = collection self.constructor = constructor self.iter_begin = iter_begin @@ -835,17 +893,29 @@ self.is_end = is_end def __iter__(self): - return Iter(self.collection, self.iter_begin(self.collection), self.constructor, - self.iter_get, self.iter_next, self.is_end) + return Iter( + self, + self.iter_begin(self.collection), + self.constructor, + self.iter_get, + self.iter_next, + self.is_end, + ) def __getitem__(self, index): - if index >= len(self): - raise IndexError pos = 0 - for i in self: + it = self.iter_begin(self.collection) + + while not self.is_end(self.collection, it): if pos == index: - return i - pos += 1 + return self.constructor( + self.world, self.iter_get(self.collection, it) + ) + + it = self.iter_next(self.collection, it) + pos = pos + 1 + + raise IndexError(index) def begin(self): return self.__iter__() @@ -853,103 +923,189 @@ def get(self, iterator): return iterator.get() + class Plugins(Collection): """Collection of plugins.""" + def __init__(self, world, collection): - def constructor(plugin): - return Plugin(world, plugin) + assert type(collection) == POINTER(Plugins) + assert collection - super(Plugins, self).__init__(collection, plugins_begin, constructor, plugins_get, plugins_next, plugins_is_end) + def constructor(world, plugin): + return Plugin.wrap(world, plugin) + + super(Plugins, self).__init__( + world, + collection, + c.plugins_begin, + constructor, + c.plugins_get, + c.plugins_next, + c.plugins_is_end, + ) self.world = world def __contains__(self, key): return bool(self.get_by_uri(_as_uri(key))) def __len__(self): - return plugins_size(self.collection) + return c.plugins_size(self.collection) def __getitem__(self, key): if type(key) == int: return super(Plugins, self).__getitem__(key) - return self.get_by_uri(key) + + plugin = self.get_by_uri(key) + if plugin is None: + raise KeyError("Plugin not found: " + str(key)) + + return plugin def get_by_uri(self, uri): - plugin = plugins_get_by_uri(self.collection, uri.node) - return Plugin(self.world, plugin) if plugin else None + if type(uri) == str: + uri = self.world.new_uri(uri) + + return Plugin.wrap( + self.world, c.plugins_get_by_uri(self.collection, uri.node) + ) + class PluginClasses(Collection): """Collection of plugin classes.""" - def __init__(self, collection): + + def __init__(self, world, collection, owning=False): + assert type(collection) == POINTER(PluginClasses) + assert collection + + self.owning = owning super(PluginClasses, self).__init__( - collection, plugin_classes_begin, PluginClass, - plugin_classes_get, plugin_classes_next, plugin_classes_is_end) + world, + collection, + c.plugin_classes_begin, + PluginClass, + c.plugin_classes_get, + c.plugin_classes_next, + c.plugin_classes_is_end, + ) + + def __del__(self): + if self.owning: + c.plugin_classes_free(self.collection) def __contains__(self, key): return bool(self.get_by_uri(_as_uri(key))) def __len__(self): - return plugin_classes_size(self.collection) + return c.plugin_classes_size(self.collection) def __getitem__(self, key): if type(key) == int: return super(PluginClasses, self).__getitem__(key) - return self.get_by_uri(key) + + klass = self.get_by_uri(key) + if klass is None: + raise KeyError("Plugin class not found: " + str(key)) + + return klass def get_by_uri(self, uri): - plugin_class = plugin_classes_get_by_uri(self.collection, uri.node) - return PluginClass(plugin_class) if plugin_class else None + if type(uri) == str: + uri = self.world.new_uri(uri) + + plugin_class = c.plugin_classes_get_by_uri(self.collection, uri.node) + return PluginClass(self.world, plugin_class) if plugin_class else None -class ScalePoints(Collection): + +class ScalePoints(Structure): """Collection of scale points.""" - def __init__(self, collection): - super(ScalePoints, self).__init__( - collection, scale_points_begin, ScalePoint, - scale_points_get, scale_points_next, scale_points_is_end) - def __len__(self): - return scale_points_size(self.collection) + pass + class UIs(Collection): """Collection of plugin UIs.""" - def __init__(self, collection): - super(UIs, self).__init__(collection, uis_begin, UI, - uis_get, uis_next, uis_is_end) + + def __init__(self, world, collection): + assert type(collection) == POINTER(UIs) + assert collection + super(UIs, self).__init__( + world, + collection, + c.uis_begin, + UI, + c.uis_get, + c.uis_next, + c.uis_is_end, + ) + + def __del__(self): + if self.world.world: + c.uis_free(self.collection) def __contains__(self, uri): return bool(self.get_by_uri(_as_uri(uri))) def __len__(self): - return uis_size(self.collection) + return c.uis_size(self.collection) def __getitem__(self, key): if type(key) == int: return super(UIs, self).__getitem__(key) - return self.get_by_uri(key) + + ui = self.get_by_uri(key) + if ui is None: + raise KeyError("Plugin UI not found: " + str(key)) + + return ui def get_by_uri(self, uri): - ui = uis_get_by_uri(self.collection, uri.node) - return UI(ui) if ui else None + if type(uri) == str: + uri = self.world.new_uri(uri) + + ui = c.uis_get_by_uri(self.collection, uri.node) + return UI(self.world, ui) if ui else None + class Nodes(Collection): """Collection of data nodes.""" + @classmethod - def constructor(ignore, node): - return Node(node_duplicate(node)) + def constructor(cls, world, node): + assert isinstance(world, World) + assert type(node) == POINTER(Node) + return Node.wrap(world, c.node_duplicate(node)) + + def __init__(self, world, collection, owning=False): + assert type(collection) == POINTER(Nodes) + + self.owning = owning + super(Nodes, self).__init__( + world, + collection, + c.nodes_begin, + Nodes.constructor, + c.nodes_get, + c.nodes_next, + c.nodes_is_end, + ) - def __init__(self, collection): - super(Nodes, self).__init__(collection, nodes_begin, Nodes.constructor, - nodes_get, nodes_next, nodes_is_end) + def __del__(self): + if self.owning and self.world.world: + c.nodes_free(self.collection) def __contains__(self, value): - return nodes_contains(self.collection, value.node) + return c.nodes_contains(self.collection, value.node) def __len__(self): - return nodes_size(self.collection) + return c.nodes_size(self.collection) def merge(self, b): - return Nodes(nodes_merge(self.collection, b.collection)) + return Nodes( + self.world, c.nodes_merge(self.collection, b.collection), True + ) + -class Namespace(): +class Namespace: """Namespace prefix. Use attribute syntax to easily create URIs within this namespace, for @@ -960,8 +1116,12 @@ >>> print(ns.foo) http://example.org/foo """ + def __init__(self, world, prefix): - self.world = world + assert isinstance(world, World) + assert type(prefix) == str + + self.world = world self.prefix = prefix def __eq__(self, other): @@ -973,41 +1133,49 @@ def __getattr__(self, suffix): return self.world.new_uri(self.prefix + suffix) + +class Namespaces: + """Set of namespaces. + + Use to easily construct uris, like: ns.lv2.InputPort""" + + def __init__(self, world): + assert isinstance(world, World) + self.world = world + self.atom = Namespace(world, "http://lv2plug.in/ns/ext/atom#") + self.doap = Namespace(world, "http://usefulinc.com/ns/doap#") + self.foaf = Namespace(world, "http://xmlns.com/foaf/0.1/") + self.lilv = Namespace(world, "http://drobilla.net/ns/lilv#") + self.lv2 = Namespace(world, "http://lv2plug.in/ns/lv2core#") + self.midi = Namespace(world, "http://lv2plug.in/ns/ext/midi#") + self.owl = Namespace(world, "http://www.w3.org/2002/07/owl#") + self.rdf = Namespace( + world, "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + ) + self.rdfs = Namespace(world, "http://www.w3.org/2000/01/rdf-schema#") + self.ui = Namespace(world, "http://lv2plug.in/ns/extensions/ui#") + self.xsd = Namespace(world, "http://www.w3.org/2001/XMLSchema#") + + class World(Structure): """Library context. - Includes a set of namespaces as the instance variable `ns`, so URIs can be constructed like:: + Includes a set of namespaces as the instance variable `ns`, so URIs can be + constructed like:: uri = world.ns.lv2.Plugin - :ivar ns: Common LV2 namespace prefixes: atom, doap, foaf, lilv, lv2, midi, owl, rdf, rdfs, ui, xsd. + Common LV2 namespace prefixes: atom, doap, foaf, lilv, lv2, midi, owl, rdf, + rdfs, ui, xsd. """ - def __init__(self): - world = self - - # Define Namespaces class locally so available prefixes are documented - class Namespaces(): - """Set of namespaces. - - Use to easily construct uris, like: ns.lv2.InputPort""" - - atom = Namespace(world, 'http://lv2plug.in/ns/ext/atom#') - doap = Namespace(world, 'http://usefulinc.com/ns/doap#') - foaf = Namespace(world, 'http://xmlns.com/foaf/0.1/') - lilv = Namespace(world, 'http://drobilla.net/ns/lilv#') - lv2 = Namespace(world, 'http://lv2plug.in/ns/lv2core#') - midi = Namespace(world, 'http://lv2plug.in/ns/ext/midi#') - owl = Namespace(world, 'http://www.w3.org/2002/07/owl#') - rdf = Namespace(world, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#') - rdfs = Namespace(world, 'http://www.w3.org/2000/01/rdf-schema#') - ui = Namespace(world, 'http://lv2plug.in/ns/extensions/ui#') - xsd = Namespace(world, 'http://www.w3.org/2001/XMLSchema#') - self.world = _lib.lilv_world_new() - self.ns = Namespaces() + def __init__(self): + self.world = c.world_new() + self.ns = Namespaces(self) def __del__(self): - world_free(self.world) + c.world_free(self.world) + self.world = None def set_option(self, uri, value): """Set a world option. @@ -1016,21 +1184,22 @@ lilv.OPTION_FILTER_LANG lilv.OPTION_DYN_MANIFEST """ - return world_set_option(self, uri, value.node) + return c.world_set_option(self.world, uri, value.node) def load_all(self): """Load all installed LV2 bundles on the system. - This is the recommended way for hosts to load LV2 data. It implements the - established/standard best practice for discovering all LV2 data on the - system. The environment variable LV2_PATH may be used to control where - this function will look for bundles. - - Hosts should use this function rather than explicitly load bundles, except - in special circumstances (e.g. development utilities, or hosts that ship - with special plugin bundles which are installed to a known location). + This is the recommended way for hosts to load LV2 data. It implements + the established/standard best practice for discovering all LV2 data on + the system. The environment variable LV2_PATH may be used to control + where this function will look for bundles. + + Hosts should use this function rather than explicitly load bundles, + except in special circumstances (e.g. development utilities, or hosts + that ship with special plugin bundles which are installed to a known + location). """ - world_load_all(self.world) + c.world_load_all(self.world) def load_bundle(self, bundle_uri): """Load a specific bundle. @@ -1045,7 +1214,7 @@ unchanged between (or even during) program invocations. Plugins (among other things) MUST be identified by URIs (not paths) in save files. """ - world_load_bundle(self.world, bundle_uri.node) + c.world_load_bundle(self.world, bundle_uri.node) def load_specifications(self): """Load all specifications from currently loaded bundles. @@ -1054,26 +1223,26 @@ necessary when using load_all(). This function parses the specifications and adds them to the model. """ - world_load_specifications(self.world) + c.world_load_specifications(self.world) def load_plugin_classes(self): """Load all plugin classes from currently loaded specifications. - Must be called after load_specifications(). This is for hosts - that explicitly load specific bundles, its use is not necessary when using + Must be called after load_specifications(). This is for hosts that + explicitly load specific bundles, its use is not necessary when using load_all(). """ - world_load_plugin_classes(self.world) + c.world_load_plugin_classes(self.world) def unload_bundle(self, bundle_uri): """Unload a specific bundle. - This unloads statements loaded by load_bundle(). Note that this - is not necessarily all information loaded from the bundle. If any resources + This unloads statements loaded by load_bundle(). Note that this is not + necessarily all information loaded from the bundle. If any resources have been separately loaded with load_resource(), they must be separately unloaded with unload_resource(). """ - return world_unload_bundle(self.world, bundle_uri.node) + return c.world_unload_bundle(self.world, bundle_uri.node) def load_resource(self, resource): """Load all the data associated with the given `resource`. @@ -1081,10 +1250,12 @@ The resource must be a subject (i.e. a URI or a blank node). Returns the number of files parsed, or -1 on error. - All accessible data files linked to `resource` with rdfs:seeAlso will be - loaded into the world model. + All accessible data files linked to `resource` with rdfs:seeAlso will + be loaded into the world model. """ - return world_load_resource(self.world, _as_uri(resource).node) + uri = _as_uri(resource) + ret = c.world_load_resource(self.world, uri.node) + return ret def unload_resource(self, resource): """Unload all the data associated with the given `resource`. @@ -1094,15 +1265,17 @@ This unloads all data loaded by a previous call to load_resource() with the given `resource`. """ - return world_unload_resource(self.world, _as_uri(resource).node) + uri = _as_uri(resource) + ret = c.world_unload_resource(self.world, uri.node) + return ret def get_plugin_class(self): """Get the parent of all other plugin classes, lv2:Plugin.""" - return PluginClass(world_get_plugin_class(self.world)) + return PluginClass(self, c.world_get_plugin_class(self.world)) def get_plugin_classes(self): """Return a list of all found plugin classes.""" - return PluginClasses(world_get_plugin_classes(self.world)) + return PluginClasses(self, c.world_get_plugin_classes(self.world)) def get_all_plugins(self): """Return a list of all found plugins. @@ -1116,18 +1289,24 @@ The returned list and the plugins it contains are owned by `world` and must not be freed by caller. """ - return Plugins(self, _lib.lilv_world_get_all_plugins(self.world)) + return Plugins(self, c.world_get_all_plugins(self.world)) def find_nodes(self, subject, predicate, obj): """Find nodes matching a triple pattern. - Either `subject` or `object` may be None (i.e. a wildcard), but not both. - Returns all matches for the wildcard field, or None. + Either `subject` or `object` may be None (i.e. a wildcard), but not + both. Returns all matches for the wildcard field, or None. """ - return Nodes(world_find_nodes(self.world, - subject.node if subject is not None else None, - predicate.node if predicate is not None else None, - obj.node if obj is not None else None)) + return Nodes( + self, + c.world_find_nodes( + self.world, + subject.node if subject is not None else None, + predicate.node if predicate is not None else None, + obj.node if obj is not None else None, + ), + True, + ) def get(self, subject, predicate, obj): """Find a single node that matches a pattern. @@ -1136,66 +1315,111 @@ Returns the first matching node, or None if no matches are found. """ - return Node.wrap(world_get(self.world, - subject.node if subject is not None else None, - predicate.node if predicate is not None else None, - obj.node if obj is not None else None)) + return Node.wrap( + self, + c.world_get( + self.world, + subject.node if subject is not None else None, + predicate.node if predicate is not None else None, + obj.node if obj is not None else None, + ), + ) def ask(self, subject, predicate, obj): """Return true iff a statement matching a certain pattern exists. - This is useful for checking if particular statement exists without having to - bother with collections and memory management. + This is useful for checking if particular statement exists without + having to bother with collections and memory management. """ - return world_ask(self.world, - subject.node if subject is not None else None, - predicate.node if predicate is not None else None, - obj.node if obj is not None else None) + return c.world_ask( + self.world, + subject.node if subject is not None else None, + predicate.node if predicate is not None else None, + obj.node if obj is not None else None, + ) + + def get_symbol(self, subject): + """Get an LV2 symbol for some subject. + + This will return the lv2:symbol property of the subject if it is given + explicitly, and otherwise will attempt to derive a symbol from the URI. + + Returns a string, which is possibly empty on error. + """ + if isinstance(subject, Port): + return subject.get_symbol() + + uri = _as_uri(subject) + ret = "" + if uri is not None: + node = c.world_get_symbol(self.world, uri.node) + ret = c.node_as_string(node).decode("ascii") if node else "" + c.node_free(node) + + return ret def new_uri(self, uri): """Create a new URI node.""" - return Node.wrap(_lib.lilv_new_uri(self.world, uri)) + c_node = c.new_uri(self.world, uri) + if not c_node: + raise ValueError("Invalid URI '%s'" % uri) + + return Node.wrap(self, c_node) def new_file_uri(self, host, path): """Create a new file URI node. The host may be None.""" - return Node.wrap(_lib.lilv_new_file_uri(self.world, host, path)) + return Node.wrap(self, c.new_file_uri(self.world, host, path)) def new_string(self, string): """Create a new string node.""" - return Node.wrap(_lib.lilv_new_string(self.world, string)) + return Node.wrap(self, c.new_string(self.world, string)) def new_int(self, val): """Create a new int node.""" - return Node.wrap(_lib.lilv_new_int(self.world, val)) + return Node.wrap(self, c.new_int(self.world, val)) def new_float(self, val): """Create a new float node.""" - return Node.wrap(_lib.lilv_new_float(self.world, val)) + return Node.wrap(self, c.new_float(self.world, val)) def new_bool(self, val): """Create a new bool node.""" - return Node.wrap(_lib.lilv_new_bool(self.world, val)) + return Node.wrap(self, c.new_bool(self.world, val)) + class Instance(Structure): """Plugin instance.""" - __slots__ = [ 'lv2_descriptor', 'lv2_handle', 'pimpl', 'plugin', 'rate', 'instance' ] - _fields_ = [ - ('lv2_descriptor', POINTER(LV2_Descriptor)), - ('lv2_handle', LV2_Handle), - ('pimpl', POINTER(None)), + + __slots__ = [ + "lv2_descriptor", + "lv2_handle", + "pimpl", + "plugin", + "rate", + "instance", + ] + _fields_ = [ + ("lv2_descriptor", POINTER(LV2_Descriptor)), + ("lv2_handle", LV2_Handle), + ("pimpl", POINTER(None)), ] def __init__(self, plugin, rate, features=None): - self.plugin = plugin - self.rate = rate - self.instance = plugin_instantiate(plugin.plugin, rate, features) + assert isinstance(plugin, Plugin) + self.plugin = plugin + self.rate = rate + self.instance = c.plugin_instantiate(plugin.plugin, rate, features) + + def __del__(self): + if hasattr(self, "instance"): + c.instance_free(self.instance[0]) def get_uri(self): """Get the URI of the plugin which `instance` is an instance of. Returned string is shared and must not be modified or deleted. """ - return self.get_descriptor().URI + return self.get_descriptor().URI.decode("utf-8") def connect_port(self, port_index, data): """Connect a port to a data location. @@ -1204,25 +1428,26 @@ activation and deactivation does not destroy port connections. """ import numpy + if data is None: self.get_descriptor().connect_port( - self.get_handle(), - port_index, - data) + self.get_handle(), port_index, data + ) elif type(data) == numpy.ndarray: self.get_descriptor().connect_port( self.get_handle(), port_index, - data.ctypes.data_as(POINTER(c_float))) + data.ctypes.data_as(POINTER(c_float)), + ) else: raise Exception("Unsupported data type") def activate(self): """Activate a plugin instance. - This resets all state information in the plugin, except for port data - locations (as set by connect_port()). This MUST be called - before calling run(). + This resets all state information in the plugin, except for port data + locations (as set by connect_port()). This MUST be called before + calling run(). """ if self.get_descriptor().activate: self.get_descriptor().activate(self.get_handle()) @@ -1230,16 +1455,16 @@ def run(self, sample_count): """Run `instance` for `sample_count` frames. - If the hint lv2:hardRTCapable is set for this plugin, this function is - guaranteed not to block. + If the hint lv2:hardRTCapable is set for this plugin, this function is + guaranteed not to block. """ self.get_descriptor().run(self.get_handle(), sample_count) def deactivate(self): """Deactivate a plugin instance. - Note that to run the plugin after this you must activate it, which will - reset all state information (except port connections). + Note that to run the plugin after this you must activate it, which will + reset all state information (except port connections). """ if self.get_descriptor().deactivate: self.get_descriptor().deactivate(self.get_handle()) @@ -1247,529 +1472,372 @@ def get_extension_data(self, uri): """Get extension data from the plugin instance. - The type and semantics of the data returned is specific to the particular - extension, though in all cases it is shared and must not be deleted. + The type and semantics of the data returned is specific to the + particular extension, though in all cases it is shared and must not be + deleted. """ if self.get_descriptor().extension_data: - return self.get_descriptor().extension_data(str(uri)) + return self.get_descriptor().extension_data( + str(uri).encode("utf-8") + ) def get_descriptor(self): """Get the LV2_Descriptor of the plugin instance. - Normally hosts should not need to access the LV2_Descriptor directly, - use the lilv_instance_* functions. + Normally hosts should not need to access the LV2_Descriptor directly, + use the lilv_instance_* functions. """ return self.instance[0].lv2_descriptor[0] def get_handle(self): """Get the LV2_Handle of the plugin instance. - Normally hosts should not need to access the LV2_Handle directly, - use the lilv_instance_* functions. + Normally hosts should not need to access the LV2_Handle directly, use + the lilv_instance_* functions. """ return self.instance[0].lv2_handle + class State(Structure): """Plugin state (TODO).""" + pass + class VariadicFunction(object): # Wrapper for calling C variadic functions def __init__(self, function, restype, argtypes): - self.function = function + self.function = function self.function.restype = restype - self.argtypes = argtypes + self.argtypes = argtypes def __call__(self, *args): fixed_args = [] - i = 0 + i = 0 for argtype in self.argtypes: fixed_args.append(argtype.from_param(args[i])) i += 1 - return self.function(*fixed_args + list(args[i:])) - -# Set return and argument types for lilv C functions - -free.argtypes = [POINTER(None)] -free.restype = None - -# uri_to_path.argtypes = [String] -# uri_to_path.restype = c_char_p - -file_uri_parse.argtypes = [String, POINTER(POINTER(c_char))] -file_uri_parse.restype = c_char_p - -new_uri.argtypes = [POINTER(World), String] -new_uri.restype = POINTER(Node) - -new_file_uri.argtypes = [POINTER(World), c_char_p, String] -new_file_uri.restype = POINTER(Node) - -new_string.argtypes = [POINTER(World), String] -new_string.restype = POINTER(Node) - -new_int.argtypes = [POINTER(World), c_int] -new_int.restype = POINTER(Node) - -new_float.argtypes = [POINTER(World), c_float] -new_float.restype = POINTER(Node) - -new_bool.argtypes = [POINTER(World), c_bool] -new_bool.restype = POINTER(Node) - -node_free.argtypes = [POINTER(Node)] -node_free.restype = None - -node_duplicate.argtypes = [POINTER(Node)] -node_duplicate.restype = POINTER(Node) + return self.function(*(fixed_args + list(args[i:]))) -node_equals.argtypes = [POINTER(Node), POINTER(Node)] -node_equals.restype = c_bool -node_get_turtle_token.argtypes = [POINTER(Node)] -node_get_turtle_token.restype = c_char_p +# Set up C bindings -node_is_uri.argtypes = [POINTER(Node)] -node_is_uri.restype = c_bool -node_as_uri.argtypes = [POINTER(Node)] -node_as_uri.restype = c_char_p - -node_is_blank.argtypes = [POINTER(Node)] -node_is_blank.restype = c_bool - -node_as_blank.argtypes = [POINTER(Node)] -node_as_blank.restype = c_char_p - -node_is_literal.argtypes = [POINTER(Node)] -node_is_literal.restype = c_bool - -node_is_string.argtypes = [POINTER(Node)] -node_is_string.restype = c_bool - -node_as_string.argtypes = [POINTER(Node)] -node_as_string.restype = c_char_p - -node_get_path.argtypes = [POINTER(Node), POINTER(POINTER(c_char))] -node_get_path.restype = c_char_p - -node_is_float.argtypes = [POINTER(Node)] -node_is_float.restype = c_bool - -node_as_float.argtypes = [POINTER(Node)] -node_as_float.restype = c_float - -node_is_int.argtypes = [POINTER(Node)] -node_is_int.restype = c_bool - -node_as_int.argtypes = [POINTER(Node)] -node_as_int.restype = c_int - -node_is_bool.argtypes = [POINTER(Node)] -node_is_bool.restype = c_bool - -node_as_bool.argtypes = [POINTER(Node)] -node_as_bool.restype = c_bool - -plugin_classes_free.argtypes = [POINTER(PluginClasses)] -plugin_classes_free.restype = None - -plugin_classes_size.argtypes = [POINTER(PluginClasses)] -plugin_classes_size.restype = c_uint - -plugin_classes_begin.argtypes = [POINTER(PluginClasses)] -plugin_classes_begin.restype = POINTER(Iter) - -plugin_classes_get.argtypes = [POINTER(PluginClasses), POINTER(Iter)] -plugin_classes_get.restype = POINTER(PluginClass) - -plugin_classes_next.argtypes = [POINTER(PluginClasses), POINTER(Iter)] -plugin_classes_next.restype = POINTER(Iter) - -plugin_classes_is_end.argtypes = [POINTER(PluginClasses), POINTER(Iter)] -plugin_classes_is_end.restype = c_bool - -plugin_classes_get_by_uri.argtypes = [POINTER(PluginClasses), POINTER(Node)] -plugin_classes_get_by_uri.restype = POINTER(PluginClass) - -scale_points_free.argtypes = [POINTER(ScalePoints)] -scale_points_free.restype = None - -scale_points_size.argtypes = [POINTER(ScalePoints)] -scale_points_size.restype = c_uint - -scale_points_begin.argtypes = [POINTER(ScalePoints)] -scale_points_begin.restype = POINTER(Iter) - -scale_points_get.argtypes = [POINTER(ScalePoints), POINTER(Iter)] -scale_points_get.restype = POINTER(ScalePoint) - -scale_points_next.argtypes = [POINTER(ScalePoints), POINTER(Iter)] -scale_points_next.restype = POINTER(Iter) - -scale_points_is_end.argtypes = [POINTER(ScalePoints), POINTER(Iter)] -scale_points_is_end.restype = c_bool - -uis_free.argtypes = [POINTER(UIs)] -uis_free.restype = None - -uis_size.argtypes = [POINTER(UIs)] -uis_size.restype = c_uint - -uis_begin.argtypes = [POINTER(UIs)] -uis_begin.restype = POINTER(Iter) - -uis_get.argtypes = [POINTER(UIs), POINTER(Iter)] -uis_get.restype = POINTER(UI) - -uis_next.argtypes = [POINTER(UIs), POINTER(Iter)] -uis_next.restype = POINTER(Iter) - -uis_is_end.argtypes = [POINTER(UIs), POINTER(Iter)] -uis_is_end.restype = c_bool - -uis_get_by_uri.argtypes = [POINTER(UIs), POINTER(Node)] -uis_get_by_uri.restype = POINTER(UI) - -nodes_free.argtypes = [POINTER(Nodes)] -nodes_free.restype = None - -nodes_size.argtypes = [POINTER(Nodes)] -nodes_size.restype = c_uint - -nodes_begin.argtypes = [POINTER(Nodes)] -nodes_begin.restype = POINTER(Iter) - -nodes_get.argtypes = [POINTER(Nodes), POINTER(Iter)] -nodes_get.restype = POINTER(Node) - -nodes_next.argtypes = [POINTER(Nodes), POINTER(Iter)] -nodes_next.restype = POINTER(Iter) - -nodes_is_end.argtypes = [POINTER(Nodes), POINTER(Iter)] -nodes_is_end.restype = c_bool - -nodes_get_first.argtypes = [POINTER(Nodes)] -nodes_get_first.restype = POINTER(Node) - -nodes_contains.argtypes = [POINTER(Nodes), POINTER(Node)] -nodes_contains.restype = c_bool - -nodes_merge.argtypes = [POINTER(Nodes), POINTER(Nodes)] -nodes_merge.restype = POINTER(Nodes) - -plugins_size.argtypes = [POINTER(Plugins)] -plugins_size.restype = c_uint - -plugins_begin.argtypes = [POINTER(Plugins)] -plugins_begin.restype = POINTER(Iter) - -plugins_get.argtypes = [POINTER(Plugins), POINTER(Iter)] -plugins_get.restype = POINTER(Plugin) - -plugins_next.argtypes = [POINTER(Plugins), POINTER(Iter)] -plugins_next.restype = POINTER(Iter) - -plugins_is_end.argtypes = [POINTER(Plugins), POINTER(Iter)] -plugins_is_end.restype = c_bool - -plugins_get_by_uri.argtypes = [POINTER(Plugins), POINTER(Node)] -plugins_get_by_uri.restype = POINTER(Plugin) - -world_new.argtypes = [] -world_new.restype = POINTER(World) - -world_set_option.argtypes = [POINTER(World), String, POINTER(Node)] -world_set_option.restype = None - -world_free.argtypes = [POINTER(World)] -world_free.restype = None - -world_load_all.argtypes = [POINTER(World)] -world_load_all.restype = None - -world_load_bundle.argtypes = [POINTER(World), POINTER(Node)] -world_load_bundle.restype = None - -world_load_specifications.argtypes = [POINTER(World)] -world_load_specifications.restype = None - -world_load_plugin_classes.argtypes = [POINTER(World)] -world_load_plugin_classes.restype = None - -world_unload_bundle.argtypes = [POINTER(World), POINTER(Node)] -world_unload_bundle.restype = c_int - -world_load_resource.argtypes = [POINTER(World), POINTER(Node)] -world_load_resource.restype = c_int - -world_unload_resource.argtypes = [POINTER(World), POINTER(Node)] -world_unload_resource.restype = c_int - -world_get_plugin_class.argtypes = [POINTER(World)] -world_get_plugin_class.restype = POINTER(PluginClass) - -world_get_plugin_classes.argtypes = [POINTER(World)] -world_get_plugin_classes.restype = POINTER(PluginClasses) - -world_get_all_plugins.argtypes = [POINTER(World)] -world_get_all_plugins.restype = POINTER(Plugins) - -world_find_nodes.argtypes = [POINTER(World), POINTER(Node), POINTER(Node), POINTER(Node)] -world_find_nodes.restype = POINTER(Nodes) - -world_get.argtypes = [POINTER(World), POINTER(Node), POINTER(Node), POINTER(Node)] -world_get.restype = POINTER(Node) - -world_ask.argtypes = [POINTER(World), POINTER(Node), POINTER(Node), POINTER(Node)] -world_ask.restype = c_bool - -plugin_verify.argtypes = [POINTER(Plugin)] -plugin_verify.restype = c_bool - -plugin_get_uri.argtypes = [POINTER(Plugin)] -plugin_get_uri.restype = POINTER(Node) - -plugin_get_bundle_uri.argtypes = [POINTER(Plugin)] -plugin_get_bundle_uri.restype = POINTER(Node) - -plugin_get_data_uris.argtypes = [POINTER(Plugin)] -plugin_get_data_uris.restype = POINTER(Nodes) - -plugin_get_library_uri.argtypes = [POINTER(Plugin)] -plugin_get_library_uri.restype = POINTER(Node) - -plugin_get_name.argtypes = [POINTER(Plugin)] -plugin_get_name.restype = POINTER(Node) - -plugin_get_class.argtypes = [POINTER(Plugin)] -plugin_get_class.restype = POINTER(PluginClass) - -plugin_get_value.argtypes = [POINTER(Plugin), POINTER(Node)] -plugin_get_value.restype = POINTER(Nodes) - -plugin_has_feature.argtypes = [POINTER(Plugin), POINTER(Node)] -plugin_has_feature.restype = c_bool - -plugin_get_supported_features.argtypes = [POINTER(Plugin)] -plugin_get_supported_features.restype = POINTER(Nodes) - -plugin_get_required_features.argtypes = [POINTER(Plugin)] -plugin_get_required_features.restype = POINTER(Nodes) - -plugin_get_optional_features.argtypes = [POINTER(Plugin)] -plugin_get_optional_features.restype = POINTER(Nodes) - -plugin_has_extension_data.argtypes = [POINTER(Plugin), POINTER(Node)] -plugin_has_extension_data.restype = c_bool - -plugin_get_extension_data.argtypes = [POINTER(Plugin)] -plugin_get_extension_data.restype = POINTER(Nodes) - -plugin_get_num_ports.argtypes = [POINTER(Plugin)] -plugin_get_num_ports.restype = c_uint32 - -plugin_get_port_ranges_float.argtypes = [POINTER(Plugin), POINTER(c_float), POINTER(c_float), POINTER(c_float)] -plugin_get_port_ranges_float.restype = None - -plugin_get_num_ports_of_class = VariadicFunction(_lib.lilv_plugin_get_num_ports_of_class, - c_uint32, - [POINTER(Plugin), POINTER(Node)]) - -plugin_has_latency.argtypes = [POINTER(Plugin)] -plugin_has_latency.restype = c_bool - -plugin_get_latency_port_index.argtypes = [POINTER(Plugin)] -plugin_get_latency_port_index.restype = c_uint32 - -plugin_get_port_by_index.argtypes = [POINTER(Plugin), c_uint32] -plugin_get_port_by_index.restype = POINTER(Port) - -plugin_get_port_by_symbol.argtypes = [POINTER(Plugin), POINTER(Node)] -plugin_get_port_by_symbol.restype = POINTER(Port) - -plugin_get_port_by_designation.argtypes = [POINTER(Plugin), POINTER(Node), POINTER(Node)] -plugin_get_port_by_designation.restype = POINTER(Port) - -plugin_get_project.argtypes = [POINTER(Plugin)] -plugin_get_project.restype = POINTER(Node) - -plugin_get_author_name.argtypes = [POINTER(Plugin)] -plugin_get_author_name.restype = POINTER(Node) - -plugin_get_author_email.argtypes = [POINTER(Plugin)] -plugin_get_author_email.restype = POINTER(Node) - -plugin_get_author_homepage.argtypes = [POINTER(Plugin)] -plugin_get_author_homepage.restype = POINTER(Node) - -plugin_is_replaced.argtypes = [POINTER(Plugin)] -plugin_is_replaced.restype = c_bool - -plugin_get_related.argtypes = [POINTER(Plugin), POINTER(Node)] -plugin_get_related.restype = POINTER(Nodes) - -port_get_node.argtypes = [POINTER(Plugin), POINTER(Port)] -port_get_node.restype = POINTER(Node) - -port_get_value.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)] -port_get_value.restype = POINTER(Nodes) - -port_get.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)] -port_get.restype = POINTER(Node) - -port_get_properties.argtypes = [POINTER(Plugin), POINTER(Port)] -port_get_properties.restype = POINTER(Nodes) - -port_has_property.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)] -port_has_property.restype = c_bool - -port_supports_event.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)] -port_supports_event.restype = c_bool - -port_get_index.argtypes = [POINTER(Plugin), POINTER(Port)] -port_get_index.restype = c_uint32 - -port_get_symbol.argtypes = [POINTER(Plugin), POINTER(Port)] -port_get_symbol.restype = POINTER(Node) - -port_get_name.argtypes = [POINTER(Plugin), POINTER(Port)] -port_get_name.restype = POINTER(Node) - -port_get_classes.argtypes = [POINTER(Plugin), POINTER(Port)] -port_get_classes.restype = POINTER(Nodes) - -port_is_a.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)] -port_is_a.restype = c_bool - -port_get_range.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(POINTER(Node)), POINTER(POINTER(Node)), POINTER(POINTER(Node))] -port_get_range.restype = None - -port_get_scale_points.argtypes = [POINTER(Plugin), POINTER(Port)] -port_get_scale_points.restype = POINTER(ScalePoints) - -state_new_from_world.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(Node)] -state_new_from_world.restype = POINTER(State) - -state_new_from_file.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(Node), String] -state_new_from_file.restype = POINTER(State) - -state_new_from_string.argtypes = [POINTER(World), POINTER(LV2_URID_Map), String] -state_new_from_string.restype = POINTER(State) - -LilvGetPortValueFunc = CFUNCTYPE(c_void_p, c_char_p, POINTER(None), POINTER(c_uint32), POINTER(c_uint32)) - -state_new_from_instance.argtypes = [POINTER(Plugin), POINTER(Instance), POINTER(LV2_URID_Map), c_char_p, c_char_p, c_char_p, String, LilvGetPortValueFunc, POINTER(None), c_uint32, POINTER(POINTER(LV2_Feature))] -state_new_from_instance.restype = POINTER(State) - -state_free.argtypes = [POINTER(State)] -state_free.restype = None - -state_equals.argtypes = [POINTER(State), POINTER(State)] -state_equals.restype = c_bool - -state_get_num_properties.argtypes = [POINTER(State)] -state_get_num_properties.restype = c_uint - -state_get_plugin_uri.argtypes = [POINTER(State)] -state_get_plugin_uri.restype = POINTER(Node) - -state_get_uri.argtypes = [POINTER(State)] -state_get_uri.restype = POINTER(Node) - -state_get_label.argtypes = [POINTER(State)] -state_get_label.restype = c_char_p - -state_set_label.argtypes = [POINTER(State), String] -state_set_label.restype = None - -state_set_metadata.argtypes = [POINTER(State), c_uint32, POINTER(None), c_size_t, c_uint32, c_uint32] -state_set_metadata.restype = c_int - -LilvSetPortValueFunc = CFUNCTYPE(None, c_char_p, POINTER(None), POINTER(None), c_uint32, c_uint32) -state_emit_port_values.argtypes = [POINTER(State), LilvSetPortValueFunc, POINTER(None)] -state_emit_port_values.restype = None - -state_restore.argtypes = [POINTER(State), POINTER(Instance), LilvSetPortValueFunc, POINTER(None), c_uint32, POINTER(POINTER(LV2_Feature))] -state_restore.restype = None - -state_save.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(LV2_URID_Unmap), POINTER(State), c_char_p, c_char_p, String] -state_save.restype = c_int - -state_to_string.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(LV2_URID_Unmap), POINTER(State), c_char_p, String] -state_to_string.restype = c_char_p - -state_delete.argtypes = [POINTER(World), POINTER(State)] -state_delete.restype = c_int - -scale_point_get_label.argtypes = [POINTER(ScalePoint)] -scale_point_get_label.restype = POINTER(Node) - -scale_point_get_value.argtypes = [POINTER(ScalePoint)] -scale_point_get_value.restype = POINTER(Node) - -plugin_class_get_parent_uri.argtypes = [POINTER(PluginClass)] -plugin_class_get_parent_uri.restype = POINTER(Node) - -plugin_class_get_uri.argtypes = [POINTER(PluginClass)] -plugin_class_get_uri.restype = POINTER(Node) - -plugin_class_get_label.argtypes = [POINTER(PluginClass)] -plugin_class_get_label.restype = POINTER(Node) - -plugin_class_get_children.argtypes = [POINTER(PluginClass)] -plugin_class_get_children.restype = POINTER(PluginClasses) - -plugin_instantiate.argtypes = [POINTER(Plugin), c_double, POINTER(POINTER(LV2_Feature))] -plugin_instantiate.restype = POINTER(Instance) - -instance_free.argtypes = [POINTER(Instance)] -instance_free.restype = None - -plugin_get_uis.argtypes = [POINTER(Plugin)] -plugin_get_uis.restype = POINTER(UIs) +class String(str): + # Wrapper for string parameters to pass as raw C UTF-8 strings + def from_param(cls, obj): + assert isinstance(obj, str) + return obj.encode("utf-8") -ui_get_uri.argtypes = [POINTER(UI)] -ui_get_uri.restype = POINTER(Node) + from_param = classmethod(from_param) -ui_get_classes.argtypes = [POINTER(UI)] -ui_get_classes.restype = POINTER(Nodes) -ui_is_a.argtypes = [POINTER(UI), POINTER(Node)] -ui_is_a.restype = c_bool +def _cfunc(name, restype, *argtypes): + """Set the `name` attribute of the `c` global to a C function""" + assert isinstance(c, _LilvLib) + f = getattr(c.lib, "lilv_" + name) + f.restype = restype + f.argtypes = argtypes + setattr(c, name, f) + + +def P(x): + """Shorthand for ctypes.POINTER""" + return POINTER(x) + + +_cfunc("free", None, c_void_p) + +# Node + +_cfunc("file_uri_parse", c_char_p, String, P(c_char_p)) +_cfunc("new_uri", P(Node), P(World), String) +_cfunc("new_file_uri", P(Node), P(World), c_char_p, String) +_cfunc("new_string", P(Node), P(World), String) +_cfunc("new_int", P(Node), P(World), c_int) +_cfunc("new_float", P(Node), P(World), c_float) +_cfunc("new_bool", P(Node), P(World), c_bool) +_cfunc("node_free", None, P(Node)) +_cfunc("node_duplicate", P(Node), P(Node)) +_cfunc("node_equals", c_bool, P(Node), P(Node)) +_cfunc("node_get_turtle_token", P(c_char), P(Node)) +_cfunc("node_is_uri", c_bool, P(Node)) +_cfunc("node_as_uri", c_char_p, P(Node)) +_cfunc("node_is_blank", c_bool, P(Node)) +_cfunc("node_as_blank", c_char_p, P(Node)) +_cfunc("node_is_literal", c_bool, P(Node)) +_cfunc("node_is_string", c_bool, P(Node)) +_cfunc("node_as_string", c_char_p, P(Node)) +_cfunc("node_get_path", P(c_char), P(Node), P(P(c_char))) +_cfunc("node_is_float", c_bool, P(Node)) +_cfunc("node_as_float", c_float, P(Node)) +_cfunc("node_is_int", c_bool, P(Node)) +_cfunc("node_as_int", c_int, P(Node)) +_cfunc("node_is_bool", c_bool, P(Node)) +_cfunc("node_as_bool", c_bool, P(Node)) + +# Collections + +_cfunc("plugin_classes_free", None, P(PluginClasses)) +_cfunc("plugin_classes_size", c_uint, P(PluginClasses)) +_cfunc("plugin_classes_begin", P(Iter), P(PluginClasses)) +_cfunc("plugin_classes_get", P(PluginClass), P(PluginClasses), P(Iter)) +_cfunc("plugin_classes_next", P(Iter), P(PluginClasses), P(Iter)) +_cfunc("plugin_classes_is_end", c_bool, P(PluginClasses), P(Iter)) +_cfunc("plugin_classes_get_by_uri", P(PluginClass), P(PluginClasses), P(Node)) +_cfunc("scale_points_free", None, P(ScalePoints)) +_cfunc("scale_points_size", c_uint, P(ScalePoints)) +_cfunc("scale_points_begin", P(Iter), P(ScalePoints)) +_cfunc("scale_points_get", P(ScalePoint), P(ScalePoints), P(Iter)) +_cfunc("scale_points_next", P(Iter), P(ScalePoints), P(Iter)) +_cfunc("scale_points_is_end", c_bool, P(ScalePoints), P(Iter)) +_cfunc("uis_free", None, P(UIs)) +_cfunc("uis_size", c_uint, P(UIs)) +_cfunc("uis_begin", P(Iter), P(UIs)) +_cfunc("uis_get", P(UI), P(UIs), P(Iter)) +_cfunc("uis_next", P(Iter), P(UIs), P(Iter)) +_cfunc("uis_is_end", c_bool, P(UIs), P(Iter)) +_cfunc("uis_get_by_uri", P(UI), P(UIs), P(Node)) +_cfunc("nodes_free", None, P(Nodes)) +_cfunc("nodes_size", c_uint, P(Nodes)) +_cfunc("nodes_begin", P(Iter), P(Nodes)) +_cfunc("nodes_get", P(Node), P(Nodes), P(Iter)) +_cfunc("nodes_next", P(Iter), P(Nodes), P(Iter)) +_cfunc("nodes_is_end", c_bool, P(Nodes), P(Iter)) +_cfunc("nodes_get_first", P(Node), P(Nodes)) +_cfunc("nodes_contains", c_bool, P(Nodes), P(Node)) +_cfunc("nodes_merge", P(Nodes), P(Nodes), P(Nodes)) +_cfunc("plugins_size", c_uint, P(Plugins)) +_cfunc("plugins_begin", P(Iter), P(Plugins)) +_cfunc("plugins_get", P(Plugin), P(Plugins), P(Iter)) +_cfunc("plugins_next", P(Iter), P(Plugins), P(Iter)) +_cfunc("plugins_is_end", c_bool, P(Plugins), P(Iter)) +_cfunc("plugins_get_by_uri", P(Plugin), P(Plugins), P(Node)) + +# World + +_cfunc("world_new", P(World)) +_cfunc("world_set_option", None, P(World), String, P(Node)) +_cfunc("world_free", None, P(World)) +_cfunc("world_load_all", None, P(World)) +_cfunc("world_load_bundle", None, P(World), P(Node)) +_cfunc("world_load_specifications", None, P(World)) +_cfunc("world_load_plugin_classes", None, P(World)) +_cfunc("world_unload_bundle", c_int, P(World), P(Node)) +_cfunc("world_load_resource", c_int, P(World), P(Node)) +_cfunc("world_unload_resource", c_int, P(World), P(Node)) +_cfunc("world_get_plugin_class", P(PluginClass), P(World)) +_cfunc("world_get_plugin_classes", P(PluginClasses), P(World)) +_cfunc("world_get_all_plugins", P(Plugins), P(World)) +_cfunc("world_find_nodes", P(Nodes), P(World), P(Node), P(Node), P(Node)) +_cfunc("world_get", P(Node), P(World), P(Node), P(Node), P(Node)) +_cfunc("world_ask", c_bool, P(World), P(Node), P(Node), P(Node)) +_cfunc("world_get_symbol", P(Node), P(World), P(Node)) + +# Plugin + +_cfunc("plugin_verify", c_bool, P(Plugin)) +_cfunc("plugin_get_uri", P(Node), P(Plugin)) +_cfunc("plugin_get_bundle_uri", P(Node), P(Plugin)) +_cfunc("plugin_get_data_uris", P(Nodes), P(Plugin)) +_cfunc("plugin_get_library_uri", P(Node), P(Plugin)) +_cfunc("plugin_get_name", P(Node), P(Plugin)) +_cfunc("plugin_get_class", P(PluginClass), P(Plugin)) +_cfunc("plugin_get_value", P(Nodes), P(Plugin), P(Node)) +_cfunc("plugin_has_feature", c_bool, P(Plugin), P(Node)) +_cfunc("plugin_get_supported_features", P(Nodes), P(Plugin)) +_cfunc("plugin_get_required_features", P(Nodes), P(Plugin)) +_cfunc("plugin_get_optional_features", P(Nodes), P(Plugin)) +_cfunc("plugin_has_extension_data", c_bool, P(Plugin), P(Node)) +_cfunc("plugin_get_extension_data", P(Nodes), P(Plugin)) +_cfunc("plugin_get_num_ports", c_uint32, P(Plugin)) + +c.plugin_get_num_ports_of_class = VariadicFunction( + c.lib.lilv_plugin_get_num_ports_of_class, c_uint32, [P(Plugin), P(Node)] +) + +_cfunc("plugin_has_latency", c_bool, P(Plugin)) +_cfunc("plugin_get_latency_port_index", c_uint32, P(Plugin)) +_cfunc("plugin_get_port_by_index", P(Port), P(Plugin), c_uint32) +_cfunc("plugin_get_port_by_symbol", P(Port), P(Plugin), P(Node)) +_cfunc("plugin_get_port_by_designation", P(Port), P(Plugin), P(Node), P(Node)) +_cfunc("plugin_get_project", P(Node), P(Plugin)) +_cfunc("plugin_get_author_name", P(Node), P(Plugin)) +_cfunc("plugin_get_author_email", P(Node), P(Plugin)) +_cfunc("plugin_get_author_homepage", P(Node), P(Plugin)) +_cfunc("plugin_is_replaced", c_bool, P(Plugin)) +_cfunc("plugin_get_related", P(Nodes), P(Plugin), P(Node)) + +# Port + +_cfunc("port_get_node", P(Node), P(Plugin), P(Port)) +_cfunc("port_get_value", P(Nodes), P(Plugin), P(Port), P(Node)) +_cfunc("port_get", P(Node), P(Plugin), P(Port), P(Node)) +_cfunc("port_get_properties", P(Nodes), P(Plugin), P(Port)) +_cfunc("port_has_property", c_bool, P(Plugin), P(Port), P(Node)) +_cfunc("port_supports_event", c_bool, P(Plugin), P(Port), P(Node)) +_cfunc("port_get_index", c_uint32, P(Plugin), P(Port)) +_cfunc("port_get_symbol", P(Node), P(Plugin), P(Port)) +_cfunc("port_get_name", P(Node), P(Plugin), P(Port)) +_cfunc("port_get_classes", P(Nodes), P(Plugin), P(Port)) +_cfunc("port_is_a", c_bool, P(Plugin), P(Port), P(Node)) + +_cfunc( + "port_get_range", + None, + P(Plugin), + P(Port), + P(P(Node)), + P(P(Node)), + P(P(Node)), +) + +_cfunc("port_get_scale_points", P(ScalePoints), P(Plugin), P(Port)) + +# Plugin State + +_cfunc("state_new_from_world", P(State), P(World), P(LV2_URID_Map), P(Node)) + +_cfunc( + "state_new_from_file", P(State), P(World), P(LV2_URID_Map), P(Node), String +) + +_cfunc("state_new_from_string", P(State), P(World), P(LV2_URID_Map), String) + +LilvGetPortValueFunc = CFUNCTYPE( + c_void_p, c_char_p, P(None), P(c_uint32), P(c_uint32) +) + +_cfunc( + "state_new_from_instance", + P(State), + P(Plugin), + P(Instance), + P(LV2_URID_Map), + c_char_p, + c_char_p, + c_char_p, + String, + LilvGetPortValueFunc, + P(None), + c_uint32, + P(P(LV2_Feature)), +) + +_cfunc("state_free", None, P(State)) +_cfunc("state_equals", c_bool, P(State), P(State)) +_cfunc("state_get_num_properties", c_uint, P(State)) +_cfunc("state_get_plugin_uri", P(Node), P(State)) +_cfunc("state_get_uri", P(Node), P(State)) +_cfunc("state_get_label", c_char_p, P(State)) +_cfunc("state_set_label", None, P(State), String) + +_cfunc( + "state_set_metadata", + c_int, + P(State), + c_uint32, + P(None), + c_size_t, + c_uint32, + c_uint32, +) + +LilvSetPortValueFunc = CFUNCTYPE( + None, c_char_p, P(None), P(None), c_uint32, c_uint32 +) +_cfunc("state_emit_port_values", None, P(State), LilvSetPortValueFunc, P(None)) + +_cfunc( + "state_restore", + None, + P(State), + P(Instance), + LilvSetPortValueFunc, + P(None), + c_uint32, + P(P(LV2_Feature)), +) + +_cfunc( + "state_save", + c_int, + P(World), + P(LV2_URID_Map), + P(LV2_URID_Unmap), + P(State), + c_char_p, + c_char_p, + String, +) + +_cfunc( + "state_to_string", + c_char_p, + P(World), + P(LV2_URID_Map), + P(LV2_URID_Unmap), + P(State), + c_char_p, + String, +) + +_cfunc("state_delete", c_int, P(World), P(State)) + +# Scale Point + +_cfunc("scale_point_get_label", P(Node), P(ScalePoint)) +_cfunc("scale_point_get_value", P(Node), P(ScalePoint)) + +# Plugin Class + +_cfunc("plugin_class_get_parent_uri", P(Node), P(PluginClass)) +_cfunc("plugin_class_get_uri", P(Node), P(PluginClass)) +_cfunc("plugin_class_get_label", P(Node), P(PluginClass)) +_cfunc("plugin_class_get_children", P(PluginClasses), P(PluginClass)) + +# Plugin Instance + +_cfunc( + "plugin_instantiate", P(Instance), P(Plugin), c_double, P(P(LV2_Feature)) +) + +_cfunc("instance_free", None, P(Instance)) +_cfunc("plugin_get_uis", P(UIs), P(Plugin)) + +# Plugin UI + +_cfunc("ui_get_uri", P(Node), P(UI)) +_cfunc("ui_get_classes", P(Nodes), P(UI)) +_cfunc("ui_is_a", c_bool, P(UI), P(Node)) LilvUISupportedFunc = CFUNCTYPE(c_uint, c_char_p, c_char_p) -ui_is_supported.argtypes = [POINTER(UI), LilvUISupportedFunc, POINTER(Node), POINTER(POINTER(Node))] -ui_is_supported.restype = c_uint - -ui_get_bundle_uri.argtypes = [POINTER(UI)] -ui_get_bundle_uri.restype = POINTER(Node) - -ui_get_binary_uri.argtypes = [POINTER(UI)] -ui_get_binary_uri.restype = POINTER(Node) +_cfunc( + "ui_is_supported", c_uint, P(UI), LilvUISupportedFunc, P(Node), P(P(Node)) +) -OPTION_FILTER_LANG = 'http://drobilla.net/ns/lilv#filter-lang' -OPTION_DYN_MANIFEST = 'http://drobilla.net/ns/lilv#dyn-manifest' +_cfunc("ui_get_bundle_uri", P(Node), P(UI)) +_cfunc("ui_get_binary_uri", P(Node), P(UI)) # Define URI constants for compatibility with old Python bindings -LILV_NS_DOAP = 'http://usefulinc.com/ns/doap#' -LILV_NS_FOAF = 'http://xmlns.com/foaf/0.1/' -LILV_NS_LILV = 'http://drobilla.net/ns/lilv#' -LILV_NS_LV2 = 'http://lv2plug.in/ns/lv2core#' -LILV_NS_OWL = 'http://www.w3.org/2002/07/owl#' -LILV_NS_RDF = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#' -LILV_NS_RDFS = 'http://www.w3.org/2000/01/rdf-schema#' -LILV_NS_XSD = 'http://www.w3.org/2001/XMLSchema#' -LILV_URI_ATOM_PORT = 'http://lv2plug.in/ns/ext/atom#AtomPort' -LILV_URI_AUDIO_PORT = 'http://lv2plug.in/ns/lv2core#AudioPort' -LILV_URI_CONTROL_PORT = 'http://lv2plug.in/ns/lv2core#ControlPort' -LILV_URI_CV_PORT = 'http://lv2plug.in/ns/lv2core#CVPort' -LILV_URI_EVENT_PORT = 'http://lv2plug.in/ns/ext/event#EventPort' -LILV_URI_INPUT_PORT = 'http://lv2plug.in/ns/lv2core#InputPort' -LILV_URI_MIDI_EVENT = 'http://lv2plug.in/ns/ext/midi#MidiEvent' -LILV_URI_OUTPUT_PORT = 'http://lv2plug.in/ns/lv2core#OutputPort' -LILV_URI_PORT = 'http://lv2plug.in/ns/lv2core#Port' -LILV_OPTION_FILTER_LANG = 'http://drobilla.net/ns/lilv#filter-lang' -LILV_OPTION_DYN_MANIFEST = 'http://drobilla.net/ns/lilv#dyn-manifest' +LILV_NS_DOAP = "http://usefulinc.com/ns/doap#" +LILV_NS_FOAF = "http://xmlns.com/foaf/0.1/" +LILV_NS_LILV = "http://drobilla.net/ns/lilv#" +LILV_NS_LV2 = "http://lv2plug.in/ns/lv2core#" +LILV_NS_OWL = "http://www.w3.org/2002/07/owl#" +LILV_NS_RDF = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" +LILV_NS_RDFS = "http://www.w3.org/2000/01/rdf-schema#" +LILV_NS_XSD = "http://www.w3.org/2001/XMLSchema#" +LILV_URI_ATOM_PORT = "http://lv2plug.in/ns/ext/atom#AtomPort" +LILV_URI_AUDIO_PORT = "http://lv2plug.in/ns/lv2core#AudioPort" +LILV_URI_CONTROL_PORT = "http://lv2plug.in/ns/lv2core#ControlPort" +LILV_URI_CV_PORT = "http://lv2plug.in/ns/lv2core#CVPort" +LILV_URI_EVENT_PORT = "http://lv2plug.in/ns/ext/event#EventPort" +LILV_URI_INPUT_PORT = "http://lv2plug.in/ns/lv2core#InputPort" +LILV_URI_MIDI_EVENT = "http://lv2plug.in/ns/ext/midi#MidiEvent" +LILV_URI_OUTPUT_PORT = "http://lv2plug.in/ns/lv2core#OutputPort" +LILV_URI_PORT = "http://lv2plug.in/ns/lv2core#Port" +LILV_OPTION_FILTER_LANG = "http://drobilla.net/ns/lilv#filter-lang" +LILV_OPTION_DYN_MANIFEST = "http://drobilla.net/ns/lilv#dyn-manifest" diff -Nru lilv-0.24.4~dfsg0/bindings/python/lv2_list_presets.py lilv-0.24.6/bindings/python/lv2_list_presets.py --- lilv-0.24.4~dfsg0/bindings/python/lv2_list_presets.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/bindings/python/lv2_list_presets.py 2019-11-09 19:18:09.000000000 +0000 @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import sys +import lilv + + +NS_PRESETS = "http://lv2plug.in/ns/ext/presets#" + + +def print_presets(uri): + """Print all presets of an LV2 plugin to stdout.""" + + world = lilv.World() + world.load_all() + world.ns.presets = lilv.Namespace(world, NS_PRESETS) + plugins = world.get_all_plugins() + plugin = plugins[uri] + presets = plugin.get_related(world.ns.presets.Preset) + + preset_list = [] + for preset in presets: + world.load_resource(preset) + labels = world.find_nodes(preset, world.ns.rdfs.label, None) + label = str(labels[0]) if len(labels) > 0 else "" + + if not label: + sys.stderr.write("warning: Preset <%s> has no label\n" % preset) + + preset_list.append((str(preset), str(label))) + + for preset in sorted(preset_list): + print('<%s> "%s"' % preset) + + +if __name__ == "__main__": + if len(sys.argv) != 2: + sys.stderr.write("Usage: %s PLUGIN_URI\n" % (sys.argv[0])) + sys.exit(1) + + try: + print_presets(sys.argv[1]) + except ValueError as e: + sys.stderr.write("error: %s\n" % e) + except KeyError as e: + sys.stderr.write("error: %s\n" % str(e).strip("'")) diff -Nru lilv-0.24.4~dfsg0/bindings/test/bindings_test_plugin.c lilv-0.24.6/bindings/test/bindings_test_plugin.c --- lilv-0.24.4~dfsg0/bindings/test/bindings_test_plugin.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/bindings/test/bindings_test_plugin.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2006-2011 David Robillard + Copyright 2006-2019 David Robillard Copyright 2006 Steve Harris Permission to use, copy, modify, and/or distribute this software for any @@ -15,10 +15,6 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -/** Include standard C headers */ -#include -#include - /** LV2 headers are based on the URI of the specification they come from, so a consistent convention can be used even for unofficial extensions. The URI @@ -26,7 +22,11 @@ replacing `http:/` with `lv2` any header in the specification bundle can be included, in this case `lv2.h`. */ -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include "lv2/core/lv2.h" + +/** Include standard C headers */ +#include +#include /** The URI is the identifier for a plugin, and how the host associates this diff -Nru lilv-0.24.4~dfsg0/bindings/test/bindings_test_plugin.ttl.in lilv-0.24.6/bindings/test/bindings_test_plugin.ttl.in --- lilv-0.24.4~dfsg0/bindings/test/bindings_test_plugin.ttl.in 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/bindings/test/bindings_test_plugin.ttl.in 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ # Lilv Bindings Test Plugin -# Copyright 2011 David Robillard +# Copyright 2011-2016 David Robillard # # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above diff -Nru lilv-0.24.4~dfsg0/bindings/test/python/test_api.py lilv-0.24.6/bindings/test/python/test_api.py --- lilv-0.24.4~dfsg0/bindings/test/python/test_api.py 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/bindings/test/python/test_api.py 2019-11-10 16:19:29.000000000 +0000 @@ -1,4 +1,4 @@ -# Copyright 2016 David Robillard +# Copyright 2016-2019 David Robillard # Copyright 2013 Kaspar Emanuel # # Permission to use, copy, modify, and/or distribute this software for any @@ -14,25 +14,39 @@ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import lilv -import unittest import os +import sys +import unittest + +path = os.path.abspath("bindings/bindings_test_plugin.lv2/") + +if sys.version_info[0] == 2: + import urllib + import urlparse + + location = urlparse.urljoin("file:", urllib.pathname2url(path) + "/") +else: + from urllib.parse import urljoin + from urllib.request import pathname2url + + location = urljoin("file:", pathname2url(path) + "/") -location = "file://" + os.getcwd() + "/bindings/bindings_test_plugin.lv2/" class NodeTests(unittest.TestCase): def setUp(self): self.world = lilv.World() + def testNodes(self): - aint = self.world.new_int(1) - aint2 = self.world.new_int(1) - aint3 = self.world.new_int(3) - afloat = self.world.new_float(2.0) - atrue = self.world.new_bool(True) - afalse = self.world.new_bool(False) - auri = self.world.new_uri("http://example.org") - afile = self.world.new_file_uri(None, "/foo/bar") + aint = self.world.new_int(1) + aint2 = self.world.new_int(1) + aint3 = self.world.new_int(3) + afloat = self.world.new_float(2.0) + atrue = self.world.new_bool(True) + afalse = self.world.new_bool(False) + auri = self.world.new_uri("http://example.org") + afile = self.world.new_file_uri(None, "/foo/bar") astring = self.world.new_string("hello") - self.assertEqual(auri.get_turtle_token(), '') + self.assertEqual(auri.get_turtle_token(), "") self.assertTrue(aint.is_int()) self.assertTrue(afloat.is_float()) self.assertTrue(auri.is_uri()) @@ -54,29 +68,38 @@ with self.assertRaises(ValueError): bool(astring) + class UriTests(unittest.TestCase): def setUp(self): self.world = lilv.World() - self.world.load_all(); + self.world.load_all() + def testInvalidURI(self): - self.plugin_uri = self.world.new_uri("invalid_uri") - self.assertIsNone(self.plugin_uri) + with self.assertRaises(ValueError): + self.plugin_uri = self.world.new_uri("invalid_uri") + def testNonExistentURI(self): self.plugin_uri = self.world.new_uri("exist:does_not") self.plugin = self.world.get_all_plugins().get_by_uri(self.plugin_uri) self.assertEqual(self.plugin, None) + def testPortTypes(self): self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_INPUT_PORT)) + def testPortTypes2(self): self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_OUTPUT_PORT)) + def testPortTypes3(self): self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_AUDIO_PORT)) + def testPortTypes4(self): self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_CONTROL_PORT)) + class PluginClassTests(unittest.TestCase): def setUp(self): self.world = lilv.World() + def testPluginClasses(self): pclass = self.world.get_plugin_class() self.assertIsNotNone(pclass) @@ -89,10 +112,12 @@ self.assertIsNotNone(i.get_uri()) self.assertIsNotNone(i.get_label()) + class PluginClassesTests(unittest.TestCase): def setUp(self): self.world = lilv.World() self.world.load_all() + def testPluginClasses(self): classes = self.world.get_plugin_classes() pclass = self.world.get_plugin_class() @@ -103,6 +128,9 @@ self.assertGreater(len(classes), 1) self.assertIsNotNone(classes[0]) self.assertIsNotNone(classes[pclass.get_uri()]) + with self.assertRaises(KeyError): + classes["http://example.org/notaclass"].get_uri() + class LoadTests(unittest.TestCase): def setUp(self): @@ -110,65 +138,92 @@ self.bundle_uri = self.world.new_uri(location) self.world.load_specifications() self.world.load_plugin_classes() - def tearDown(self): - del self.world + def testLoadUnload(self): self.world.load_bundle(self.bundle_uri) plugins = self.world.get_all_plugins() - plugin = plugins.get(plugins.begin()) + plugin = plugins.get(plugins.begin()) self.world.load_resource(plugin) self.world.unload_resource(plugin) self.world.unload_bundle(self.bundle_uri) + class PluginTests(unittest.TestCase): def setUp(self): self.world = lilv.World() - self.world.set_option(lilv.OPTION_FILTER_LANG, self.world.new_bool(True)) + self.world.set_option( + lilv.OPTION_FILTER_LANG, self.world.new_bool(True) + ) self.bundle_uri = self.world.new_uri(location) - self.assertIsNotNone(self.bundle_uri, "Invalid URI: '" + location + "'") + self.assertIsNotNone( + self.bundle_uri, "Invalid URI: '" + location + "'" + ) self.world.load_bundle(self.bundle_uri) self.plugins = self.world.get_all_plugins() - self.plugin = self.plugins.get(self.plugins.begin()) + self.plugin = self.plugins.get(self.plugins.begin()) self.assertTrue(self.plugin.verify()) self.assertTrue(self.plugin in self.plugins) self.assertTrue(self.plugin.get_uri() in self.plugins) self.assertEqual(self.plugins[self.plugin.get_uri()], self.plugin) - self.assertIsNotNone(self.plugin, msg="Test plugin not found at location: '" + location + "'") + with self.assertRaises(KeyError): + self.plugins["http://example.org/notaplugin"].get_uri() + + self.assertIsNotNone( + self.plugin, + msg="Test plugin not found at location: '" + location + "'", + ) self.assertEqual(location, str(self.plugin.get_bundle_uri())) self.plugin_uri = self.plugin.get_uri() - self.assertEqual(self.plugin.get_uri(), self.plugin_uri, "URI equality broken") - self.instance = lilv.Instance(self.plugin, 48000, None) - self.assertIsNotNone(self.instance) - self.lv2_InputPort = self.world.new_uri(lilv.LILV_URI_INPUT_PORT) - self.lv2_OutputPort = self.world.new_uri(lilv.LILV_URI_OUTPUT_PORT) - self.lv2_AudioPort = self.world.new_uri(lilv.LILV_URI_AUDIO_PORT) - self.lv2_ControlPort = self.world.new_uri(lilv.LILV_URI_CONTROL_PORT) + self.assertEqual( + self.plugin.get_uri(), self.plugin_uri, "URI equality broken" + ) + self.lv2_InputPort = self.world.new_uri(lilv.LILV_URI_INPUT_PORT) + self.lv2_OutputPort = self.world.new_uri(lilv.LILV_URI_OUTPUT_PORT) + self.lv2_AudioPort = self.world.new_uri(lilv.LILV_URI_AUDIO_PORT) + self.lv2_ControlPort = self.world.new_uri(lilv.LILV_URI_CONTROL_PORT) + def testGetters(self): + self.assertEqual( + self.world.get_symbol(self.plugin), "lilv_bindings_test_plugin" + ) self.assertIsNotNone(self.plugin.get_bundle_uri()) self.assertGreater(len(self.plugin.get_data_uris()), 0) self.assertIsNotNone(self.plugin.get_library_uri()) self.assertTrue(self.plugin.get_name().is_string()) self.assertTrue(self.plugin.get_class().get_uri().is_uri()) - self.assertEqual(len(self.plugin.get_value(self.world.ns.doap.license)), 1) + self.assertEqual( + len(self.plugin.get_value(self.world.ns.doap.license)), 1 + ) licenses = self.plugin.get_value(self.world.ns.doap.license) features = self.plugin.get_value(self.world.ns.lv2.optionalFeature) self.assertEqual(len(licenses), 1) self.assertTrue(licenses[0] in licenses) with self.assertRaises(IndexError): self.assertIsNone(licenses[len(licenses)]) - self.assertEqual(len(licenses) + len(features), - len(licenses.merge(features))) - self.assertEqual(licenses.get(licenses.begin()), self.world.new_uri('http://opensource.org/licenses/isc')) + self.assertEqual( + len(licenses) + len(features), len(licenses.merge(features)) + ) + self.assertEqual( + licenses.get(licenses.begin()), + self.world.new_uri("http://opensource.org/licenses/isc"), + ) self.assertEqual(licenses[0], licenses.get(licenses.begin())) - self.assertTrue(self.plugin.has_feature(self.world.ns.lv2.hardRTCapable)) + self.assertTrue( + self.plugin.has_feature(self.world.ns.lv2.hardRTCapable) + ) self.assertEqual(len(self.plugin.get_supported_features()), 1) self.assertEqual(len(self.plugin.get_optional_features()), 1) self.assertEqual(len(self.plugin.get_required_features()), 0) - self.assertFalse(self.plugin.has_extension_data(self.world.new_uri('http://example.org/nope'))) + self.assertFalse( + self.plugin.has_extension_data( + self.world.new_uri("http://example.org/nope") + ) + ) self.assertEqual(len(self.plugin.get_extension_data()), 0) self.assertEqual(len(self.plugin.get_extension_data()), 0) self.assertFalse(self.plugin.has_latency()) self.assertIsNone(self.plugin.get_latency_port_index()) + def testPorts(self): self.assertEqual(self.plugin.get_num_ports(), 4) self.assertIsNotNone(self.plugin.get_port(0)) @@ -181,15 +236,32 @@ self.assertIsNotNone(self.plugin.get_port("audio_input")) self.assertIsNotNone(self.plugin.get_port("audio_output")) self.assertIsNone(self.plugin.get_port_by_symbol("nonexistent")) - self.assertIsNone(self.plugin.get_port_by_designation(self.world.ns.lv2.InputPort, self.world.ns.lv2.control)) + self.assertIsNone( + self.plugin.get_port_by_designation( + self.world.ns.lv2.InputPort, self.world.ns.lv2.control + ) + ) self.assertIsNone(self.plugin.get_project()) self.assertIsNone(self.plugin.get_author_name()) self.assertIsNone(self.plugin.get_author_email()) self.assertIsNone(self.plugin.get_author_homepage()) self.assertFalse(self.plugin.is_replaced()) - self.assertEqual(0, len(self.plugin.get_related(self.world.new_uri("http://example.org/Type")))) - self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_InputPort, self.lv2_AudioPort)) + self.assertEqual( + 0, + len( + self.plugin.get_related( + self.world.new_uri("http://example.org/Type") + ) + ), + ) + self.assertEqual( + 1, + self.plugin.get_num_ports_of_class( + self.lv2_InputPort, self.lv2_AudioPort + ), + ) port = self.plugin.get_port("input") + self.assertEqual(self.world.get_symbol(port), "input") self.assertTrue(port.get_node().is_blank()) self.assertEqual(0, port.get(self.world.ns.lv2.index)) self.assertEqual(1, len(port.get_value(self.world.ns.lv2.symbol))) @@ -199,24 +271,54 @@ self.assertEqual(0, port.get_index()) self.assertEqual("input", port.get_symbol()) self.assertEqual("Input", port.get_name()) - self.assertEqual([self.world.ns.lv2.ControlPort, self.world.ns.lv2.InputPort], - list(port.get_classes())) + self.assertEqual( + [ + str(self.world.ns.lv2.ControlPort), + str(self.world.ns.lv2.InputPort), + ], + sorted(list(map(str, port.get_classes()))), + ) self.assertTrue(port.is_a(self.world.ns.lv2.ControlPort)) self.assertFalse(port.is_a(self.world.ns.lv2.AudioPort)) - self.assertEquals((0.5, 0.0, 1.0), port.get_range()) - self.assertEquals(0, len(port.get_properties())) + self.assertEqual((0.5, 0.0, 1.0), port.get_range()) + self.assertEqual(0, len(port.get_properties())) + def testScalePoints(self): port = self.plugin.get_port("input") points = port.get_scale_points() - self.assertEqual(points[0].get_label(), "off") - self.assertEqual(points[0].get_value(), 0.0) - self.assertEqual(points[1].get_label(), "on") - self.assertEqual(points[1].get_value(), 1.0) + point_dict = { + float(points[0].get_value()): points[0].get_label(), + float(points[1].get_value()): points[1].get_label(), + } + + self.assertEqual(point_dict, {0.0: "off", 1.0: "on"}) + def testPortCount(self): - self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_OutputPort, self.lv2_AudioPort)) - self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_OutputPort, self.lv2_ControlPort)) - self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_InputPort, self.lv2_AudioPort)) - self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_InputPort, self.lv2_ControlPort)) + self.assertEqual( + 1, + self.plugin.get_num_ports_of_class( + self.lv2_OutputPort, self.lv2_AudioPort + ), + ) + self.assertEqual( + 1, + self.plugin.get_num_ports_of_class( + self.lv2_OutputPort, self.lv2_ControlPort + ), + ) + self.assertEqual( + 1, + self.plugin.get_num_ports_of_class( + self.lv2_InputPort, self.lv2_AudioPort + ), + ) + self.assertEqual( + 1, + self.plugin.get_num_ports_of_class( + self.lv2_InputPort, self.lv2_ControlPort + ), + ) + class QueryTests(unittest.TestCase): def setUp(self): @@ -225,20 +327,35 @@ self.bundle_uri = self.world.new_uri(location) self.world.load_bundle(self.bundle_uri) self.plugins = self.world.get_all_plugins() - self.plugin = self.plugins.get(self.plugins.begin()) + self.plugin = self.plugins.get(self.plugins.begin()) + def testNamespaces(self): self.assertEqual(self.world.ns.lv2, "http://lv2plug.in/ns/lv2core#") - self.assertEqual(self.world.ns.lv2.Plugin, "http://lv2plug.in/ns/lv2core#Plugin") + self.assertEqual( + self.world.ns.lv2.Plugin, "http://lv2plug.in/ns/lv2core#Plugin" + ) + def testQuery(self): - self.assertTrue(self.world.ask(None, - self.world.ns.rdf.type, - self.world.ns.lv2.Plugin)) - self.assertLess(0, len(self.world.find_nodes(None, - self.world.ns.rdf.type, - self.world.ns.lv2.Plugin))) - self.assertEqual(self.plugin.get_uri(), self.world.get(None, - self.world.ns.rdf.type, - self.world.ns.lv2.Plugin)) + self.assertTrue( + self.world.ask( + None, self.world.ns.rdf.type, self.world.ns.lv2.Plugin + ) + ) + self.assertLess( + 0, + len( + self.world.find_nodes( + None, self.world.ns.rdf.type, self.world.ns.lv2.Plugin + ) + ), + ) + self.assertEqual( + self.plugin.get_uri(), + self.world.get( + None, self.world.ns.rdf.type, self.world.ns.lv2.Plugin + ), + ) + class InstanceTests(unittest.TestCase): def setUp(self): @@ -249,10 +366,22 @@ self.plugin = self.plugins[0] self.instance = lilv.Instance(self.plugin, 48000) self.assertEqual(self.plugin.get_uri(), self.instance.get_uri()) - self.assertIsNone(self.instance.get_extension_data(self.world.new_uri("http://example.org/ext"))) - self.assertIsNone(self.instance.get_extension_data("http://example.org/ext")) + self.assertIsNone( + self.instance.get_extension_data( + self.world.new_uri("http://example.org/ext") + ) + ) + self.assertIsNone( + self.instance.get_extension_data("http://example.org/ext") + ) + def testRun(self): - import numpy + try: + import numpy + except ImportError: + sys.stderr.write("warning: Missing numpy, not testing instance\n") + return + n_samples = 100 buf = numpy.zeros(n_samples) with self.assertRaises(Exception): @@ -265,6 +394,7 @@ self.instance.run(n_samples) self.instance.deactivate() + class UITests(unittest.TestCase): def setUp(self): self.world = lilv.World() @@ -272,19 +402,25 @@ self.world.load_bundle(self.bundle_uri) self.plugins = self.world.get_all_plugins() self.plugin = self.plugins[0] + def testUI(self): uis = self.plugin.get_uis() - ui_uri = self.world.new_uri('http://example.org/lilv-bindings-test-plugin-ui') + ui_uri = self.world.new_uri( + "http://example.org/lilv-bindings-test-plugin-ui" + ) self.assertEqual(1, len(uis)) self.assertEqual(str(uis[0]), str(ui_uri)) + with self.assertRaises(KeyError): + uis["http://example.org/notaui"].get_uri() + self.assertEqual(uis[0], str(ui_uri)) self.assertEqual(uis[0].get_uri(), ui_uri) self.assertEqual(uis[0].get_bundle_uri(), self.bundle_uri) - self.assertEqual(uis[0].get_binary_uri(), str(self.bundle_uri) + "TODO") + self.assertEqual( + uis[0].get_binary_uri(), str(self.bundle_uri) + "TODO" + ) self.assertEqual(uis[uis[0].get_uri()], uis[0]) self.assertTrue(uis[0].is_a(self.world.ns.ui.GtkUI)) self.assertTrue(uis[0] in uis) self.assertTrue(uis[0].get_uri() in uis) self.assertEqual([self.world.ns.ui.GtkUI], list(uis[0].get_classes())) - for ui in uis: - print(ui) diff -Nru lilv-0.24.4~dfsg0/COPYING lilv-0.24.6/COPYING --- lilv-0.24.4~dfsg0/COPYING 2017-02-07 22:10:15.000000000 +0000 +++ lilv-0.24.6/COPYING 2019-10-19 21:34:09.000000000 +0000 @@ -1,4 +1,4 @@ -Copyright 2011-2017 David Robillard +Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff -Nru lilv-0.24.4~dfsg0/debian/changelog lilv-0.24.6/debian/changelog --- lilv-0.24.4~dfsg0/debian/changelog 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/changelog 2020-02-02 14:04:42.000000000 +0000 @@ -1,3 +1,23 @@ +lilv (0.24.6-1) unstable; urgency=medium + + * Team upload + + [ Helmut Grohne ] + * Fix FTCBFS: (Closes: #941710) + + Export suitable cross environment from dpkg's buildtools.mk. + + [ Debian Janitor ] + * Set upstream metadata fields: Repository, Repository-Browse. + + [ Dennis Braun ] + * New upstream release 0.24.6 + * d/control: Bump Standards-Version to 4.5.0 + * d/copyright: Update year, remove comment and repack entry + * Update 1001-dont_run_ldconfig.patch + * Remove d/repack-waf and d/unpack_waf.sh + + -- Dennis Braun Sun, 02 Feb 2020 15:04:42 +0100 + lilv (0.24.4~dfsg0-2) unstable; urgency=medium [ Stuart Prescott ] diff -Nru lilv-0.24.4~dfsg0/debian/control lilv-0.24.6/debian/control --- lilv-0.24.4~dfsg0/debian/control 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/control 2020-02-02 13:57:19.000000000 +0000 @@ -15,7 +15,7 @@ lv2-dev (>= 1.14.0~), pkg-config, python3 -Standards-Version: 4.4.1 +Standards-Version: 4.5.0 Homepage: https://drobilla.net/software/lilv/ Vcs-Git: https://salsa.debian.org/multimedia-team/lilv.git Vcs-Browser: https://salsa.debian.org/multimedia-team/lilv diff -Nru lilv-0.24.4~dfsg0/debian/copyright lilv-0.24.6/debian/copyright --- lilv-0.24.4~dfsg0/debian/copyright 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/copyright 2020-02-02 14:03:27.000000000 +0000 @@ -2,17 +2,11 @@ Upstream-Name: Lilv Upstream-Contact: David Robillard Source: https://download.drobilla.net -Copyright: 2007-2012 David Robillard License: ISC -Comment: - The tarball is repacked because upstream uses the waf build system, which is - shipped in the tarball as a binary blob. The binary includes the source, - but Debian considers that backwards, so we run a debian/repack-waf script to - fix that. See also: https://wiki.debian.org/UnpackWaf Files: * Copyright: - 2007-2012 David Robillard + 2007-2019 David Robillard 2008 Krzysztof Foltman License: ISC @@ -26,12 +20,6 @@ 2013-2016 Jaromír Mikeš License: ISC -Files: debian/repack-waf -Copyright: 2015 Ximin Luo - 2016-2017 Richard Laager - 2019 IOhannes m zmölnig -License: CC0-1.0 - License: ISC Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -71,7 +59,3 @@ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -License: CC0-1.0 - On Debian systems the full text of the CC0-1.0 license can be found in - /usr/share/common-licenses/CC0-1.0 diff -Nru lilv-0.24.4~dfsg0/debian/patches/1001-dont_run_ldconfig.patch lilv-0.24.6/debian/patches/1001-dont_run_ldconfig.patch --- lilv-0.24.4~dfsg0/debian/patches/1001-dont_run_ldconfig.patch 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/patches/1001-dont_run_ldconfig.patch 2020-02-02 13:57:19.000000000 +0000 @@ -8,11 +8,12 @@ --- lilv.orig/wscript +++ lilv/wscript -@@ -443,7 +443,6 @@ +@@ -440,7 +440,6 @@ bld.install_as( '${SYSCONFDIR}/bash_completion.d/lilv', 'utils/lilv.bash_completion') - bld.add_post_fun(autowaf.run_ldconfig) - if bld.env.DOCS: - bld.add_post_fun(fix_docs) + + def test(tst): + with tst.group('unit') as check: diff -Nru lilv-0.24.4~dfsg0/debian/repack-waf lilv-0.24.6/debian/repack-waf --- lilv-0.24.4~dfsg0/debian/repack-waf 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/repack-waf 1970-01-01 00:00:00.000000000 +0000 @@ -1,106 +0,0 @@ -#!/bin/sh -x -# Repack an upstream tarball, unpacking waf files inside it. -# -# Meant to be run by uscan(1) as the "command param", after repacking -# (if any) by mk-origtargz. So you shouldn't give "repacksuffix" to -# debian/watch; instead you should set it below; however this should -# still match the dversionmangle in that file. -# -# Notes from Richard Laager : -# -# On 11/27/2016 08:28 PM, Ximin Luo wrote: -# > Richard Laager: -# >> > I believe you wrote the repack-waf script linked from here: -# >> > https://wiki.debian.org/UnpackWaf -# >> > -# >> > What is the license on that? -# >> > -# > Hi Richard, I don't think that is even copyrightable, but in case it is, -# > I say it's CC0-Licensed. Enjoy! -# > -# > X -# -# This version of repack-waf has been modified to call waf using python3. -# If we use python here (explicitly or implicitly) when python is python2.7, -# we end up with a version of waf that raises SyntaxError on python3. If we -# use python3 here, the result runs in both Python 3 and Python 2.7. -# -# Also, this version detects whether the upstream tarball extracts 1) into -# a PACKAGE-VERSION directory, 2) a PACKAGE directory, or 3) the current -# directory. It normalizes that to PACKAGE-VERSION when repacking. - -echo "$0 $@" 1>&2 - -zipext=tar.bz2 -#repacksuffix="~dfsg1" -unwaf_paths=. - -# You shouldn't need to change anything below here. - -USAGE="Usage: $0 --upstream-version version filename" - -test "$1" = "--upstream-version" || { echo >&2 "$USAGE"; exit 2; } -upstream="$2" -filename="$3" - -source="$(dpkg-parsechangelog -SSource)" -newups="${upstream}${repacksuffix}" - -basedir="$(dirname "$filename")" -if [ -z "$filename" ] -then - # For whatever reason, a filename is not always passed. - if [ -f "${source}_${upstream}.orig.${zipext}" ] - then - basedir=. - elif [ -f "../${source}_${upstream}.orig.${zipext}" ] - then - basedir=.. - fi -fi - -echo "BASEDIR ${basedir} .. $(pwd)" - -unpack_waf() { - local olddir="$PWD" - cd "$1" - test -x ./waf || return 1 - python3 waf --help > /dev/null - mv .waf3-*/* . - sed -i '/^#==>$/,$d' waf - rmdir .waf3-* - find waf* -name "*.pyc" -delete - find waf* -name "__pycache__" -exec rm -rf {} + - cd "$olddir" -} - -set -e - -rm -rf "${source}-${upstream}" -mkdir "${source}-${upstream}" -tar -xf "${basedir}/${source}_${upstream}.orig.${zipext}" -C "${source}-${upstream}" -if [ "x${upstream}" = "x${newups}" ]; then - mv "${basedir}/${source}_${upstream}.orig.${zipext}" "${basedir}/${source}_${upstream}.orig.${zipext}.0" -fi -cd "${source}-${upstream}" -subdir= -if [ -d "${source}-${upstream}" ]; then - subdir="${source}-${upstream}" -elif [ -d "${source}" ]; then - subdir="${source}" -fi -if [ -n "${subdir}" ]; then - cd .. - mv "${source}-${upstream}" "${source}-${upstream}-tmp" - mv "${source}-${upstream}-tmp/${subdir}" "${source}-${upstream}" - rmdir "${source}-${upstream}-tmp" - cd "${source}-${upstream}" -fi - -for i in $unwaf_paths; do unpack_waf "$i"; done -cd .. -if [ "x${upstream}" != "x${newups}" ]; then - mv "${source}-${upstream}" "${source}-${newups}" -fi -GZIP="-9fn" tar -caf "$basedir/${source}_${newups}.orig.${zipext}" "${source}-${newups}" -rm -rf "${source}-${newups}" diff -Nru lilv-0.24.4~dfsg0/debian/rules lilv-0.24.6/debian/rules --- lilv-0.24.4~dfsg0/debian/rules 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/rules 2020-02-02 13:57:19.000000000 +0000 @@ -1,10 +1,13 @@ #!/usr/bin/make -f include /usr/share/dpkg/architecture.mk +include /usr/share/dpkg/buildtools.mk export DEB_BUILD_MAINT_OPTIONS = hardening=+bindnow export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed export LINKFLAGS += $(LDFLAGS) +export CC +export PKGCONFIG = $(PKG_CONFIG) WAF = python3 ./waf diff -Nru lilv-0.24.4~dfsg0/debian/unpack_waf.sh lilv-0.24.6/debian/unpack_waf.sh --- lilv-0.24.4~dfsg0/debian/unpack_waf.sh 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/unpack_waf.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ -#!/bin/bash - -# unpack_waf - generate an unpacked instance of the waf all-in-one blob -# Copyright (C) 2012 Alessio Treglia -# Based on: http://wiki.debian.org/UnpackWaf -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -set -e - -echo -n "Unpacking waf... " -./waf --help &>/dev/null -WAFDIR=`ls .waf-*/` -mv .waf-*/${WAFDIR} ${WAFDIR} -sed -i '/^#==>$/,$d' waf -rmdir .waf-* -echo "OK." - -echo -n "Purging .pyc files... " -find ${WAFDIR} -name "*.pyc" -delete -echo "OK." diff -Nru lilv-0.24.4~dfsg0/debian/upstream/metadata lilv-0.24.6/debian/upstream/metadata --- lilv-0.24.4~dfsg0/debian/upstream/metadata 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/debian/upstream/metadata 2020-02-02 13:57:19.000000000 +0000 @@ -0,0 +1,2 @@ +Repository: https://github.com/x42/lilv.git +Repository-Browse: https://github.com/x42/lilv diff -Nru lilv-0.24.4~dfsg0/debian/watch lilv-0.24.6/debian/watch --- lilv-0.24.4~dfsg0/debian/watch 2020-01-16 12:50:55.000000000 +0000 +++ lilv-0.24.6/debian/watch 2020-02-02 13:57:19.000000000 +0000 @@ -1,3 +1,3 @@ version=4 -opts="pgpmode=auto,uversionmangle=s/-/./,dversionmangle=s/~dfsg.*//" \ -https://download.drobilla.net/lilv-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) debian debian/repack-waf +opts="pgpmode=auto" \ +https://download.drobilla.net/lilv-(.*)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) diff -Nru lilv-0.24.4~dfsg0/doc/footer.html lilv-0.24.6/doc/footer.html --- lilv-0.24.4~dfsg0/doc/footer.html 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/doc/footer.html 2019-10-19 21:34:09.000000000 +0000 @@ -0,0 +1,20 @@ + + + + + + + + + + diff -Nru lilv-0.24.4~dfsg0/doc/header.html lilv-0.24.6/doc/header.html --- lilv-0.24.4~dfsg0/doc/header.html 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/doc/header.html 2019-10-19 21:34:09.000000000 +0000 @@ -0,0 +1,37 @@ + + + + + $projectname: $title + $title + + $extrastylesheet + + +
+ + +
+ +
+ + diff -Nru lilv-0.24.4~dfsg0/doc/layout.xml lilv-0.24.6/doc/layout.xml --- lilv-0.24.4~dfsg0/doc/layout.xml 2015-11-14 19:49:15.000000000 +0000 +++ lilv-0.24.6/doc/layout.xml 2019-10-19 21:34:09.000000000 +0000 @@ -1,4 +1,5 @@ + @@ -8,17 +9,31 @@ + + + + + - + + + + + + + + + + - + @@ -27,10 +42,11 @@ - + + @@ -65,6 +81,8 @@ + + @@ -72,6 +90,7 @@ + @@ -81,8 +100,14 @@ + + + + + + @@ -92,6 +117,8 @@ + + @@ -107,10 +134,16 @@ + + + + + + @@ -121,6 +154,8 @@ + + @@ -130,9 +165,9 @@ - - + + @@ -141,6 +176,8 @@ + + @@ -159,6 +196,8 @@ + + diff -Nru lilv-0.24.4~dfsg0/doc/mainpage.md lilv-0.24.6/doc/mainpage.md --- lilv-0.24.4~dfsg0/doc/mainpage.md 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/doc/mainpage.md 2019-10-19 21:34:09.000000000 +0000 @@ -0,0 +1,6 @@ +Lilv is a C library to make the use of [LV2] plugins as simple as possible for +applications. + +The complete API is documented in the [lilv](@ref lilv) group. + +[LV2]: http://lv2plug.in/ diff -Nru lilv-0.24.4~dfsg0/doc/reference.doxygen.in lilv-0.24.6/doc/reference.doxygen.in --- lilv-0.24.4~dfsg0/doc/reference.doxygen.in 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/doc/reference.doxygen.in 2019-10-19 21:34:09.000000000 +0000 @@ -1,4 +1,4 @@ -# Doxyfile 1.8.12 +# Doxyfile 1.8.15 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. @@ -17,11 +17,11 @@ # Project related configuration options #--------------------------------------------------------------------------- -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all text -# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv -# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv -# for the list of possible encodings. +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 @@ -44,7 +44,7 @@ # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. -PROJECT_BRIEF = +PROJECT_BRIEF = "A library for simple use of LV2 plugins" # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 @@ -93,6 +93,14 @@ OUTPUT_LANGUAGE = English +# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all generated output in the proper direction. +# Possible values are: None, LTR, RTL and Context. +# The default value is: None. + +OUTPUT_TEXT_DIRECTION = None + # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. @@ -226,7 +234,12 @@ # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert -# newlines. +# newlines (in the resulting output). You can put ^^ in the value part of an +# alias to insert a newline as if a physical newline was in the original file. +# When you need a literal { or } or , in the value part of an alias you have to +# escape them by means of a backslash (\), this can lead to conflicts with the +# commands \{ and \} for these it is advised to use the version @{ and @} or use +# a double escape (\\{ and \\}) ALIASES = @@ -264,17 +277,26 @@ OPTIMIZE_OUTPUT_VHDL = NO +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: -# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: -# Fortran. In the later case the parser tries to guess whether the code is fixed -# or free formatted code, this is the default for Fortran type files), VHDL. For -# instance to make doxygen treat .inc files as Fortran files (default is PHP), -# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, +# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is +# Fortran), use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # @@ -285,7 +307,7 @@ # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable -# documentation. See http://daringfireball.net/projects/markdown/ for details. +# documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. @@ -327,7 +349,7 @@ CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: -# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. @@ -698,7 +720,7 @@ # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool -# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. @@ -743,7 +765,8 @@ # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong or incomplete -# parameter documentation, but not about the absence of documentation. +# parameter documentation, but not about the absence of documentation. If +# EXTRACT_ALL is set to YES then this flag will automatically be disabled. # The default value is: NO. WARN_NO_PARAMDOC = YES @@ -780,12 +803,13 @@ # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. -INPUT = @LILV_SRCDIR@/lilv/lilv.h +INPUT = @LILV_SRCDIR@/lilv/lilv.h \ + @LILV_SRCDIR@/doc/mainpage.md # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv -# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# documentation (see: https://www.gnu.org/software/libiconv/) for the list of # possible encodings. # The default value is: UTF-8. @@ -803,7 +827,7 @@ # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, # *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, -# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf. +# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = @@ -929,7 +953,7 @@ # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. -USE_MDFILE_AS_MAINPAGE = +USE_MDFILE_AS_MAINPAGE = @LILV_SRCDIR@/doc/mainpage.md #--------------------------------------------------------------------------- # Configuration options related to source browsing @@ -958,10 +982,10 @@ STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented -# function all documented functions referencing it will be listed. +# entity all documented functions referencing it will be listed. # The default value is: NO. -REFERENCED_BY_RELATION = YES +REFERENCED_BY_RELATION = NO # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. @@ -985,17 +1009,17 @@ # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. -SOURCE_TOOLTIPS = YES +SOURCE_TOOLTIPS = NO # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system -# (see http://www.gnu.org/software/global/global.html). You will need version +# (see https://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global -# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # @@ -1085,7 +1109,7 @@ # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_HEADER = +HTML_HEADER = @LILV_SRCDIR@/doc/header.html # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard @@ -1095,7 +1119,7 @@ # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_FOOTER = +HTML_FOOTER = @LILV_SRCDIR@/doc/footer.html # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of @@ -1135,7 +1159,7 @@ # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see -# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. @@ -1171,6 +1195,17 @@ HTML_TIMESTAMP = NO +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via Javascript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have Javascript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = NO + # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. @@ -1194,13 +1229,13 @@ # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development -# environment (see: http://developer.apple.com/tools/xcode/), introduced with -# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# environment (see: https://developer.apple.com/xcode/), introduced with OSX +# 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at -# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1239,7 +1274,7 @@ # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop -# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output @@ -1315,7 +1350,7 @@ # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace -# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1323,7 +1358,7 @@ # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# Folders (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1332,7 +1367,7 @@ # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1340,7 +1375,7 @@ # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1348,7 +1383,7 @@ # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# http://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = @@ -1389,7 +1424,7 @@ # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. -DISABLE_INDEX = YES +DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag @@ -1441,7 +1476,7 @@ FORMULA_FONTSIZE = 10 -# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# Use the FORMULA_TRANSPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # @@ -1453,7 +1488,7 @@ FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# http://www.mathjax.org) which uses client side Javascript for the rendering +# https://www.mathjax.org) which uses client side Javascript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path @@ -1480,8 +1515,8 @@ # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of -# MathJax from http://www.mathjax.org before deployment. -# The default value is: http://cdn.mathjax.org/mathjax/latest. +# MathJax from https://www.mathjax.org before deployment. +# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest @@ -1542,7 +1577,7 @@ # # Doxygen ships with an example indexer (doxyindexer) and search engine # (doxysearch.cgi) which are based on the open source search engine library -# Xapian (see: http://xapian.org/). +# Xapian (see: https://xapian.org/). # # See the section "External Indexing and Searching" for details. # The default value is: NO. @@ -1555,7 +1590,7 @@ # # Doxygen ships with an example indexer (doxyindexer) and search engine # (doxysearch.cgi) which are based on the open source search engine library -# Xapian (see: http://xapian.org/). See the section "External Indexing and +# Xapian (see: https://xapian.org/). See the section "External Indexing and # Searching" for details. # This tag requires that the tag SEARCHENGINE is set to YES. @@ -1607,21 +1642,34 @@ # The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be # invoked. # -# Note that when enabling USE_PDFLATEX this option is only used for generating -# bitmaps for formulas in the HTML output, but not in the Makefile that is -# written to the output directory. -# The default file is: latex. +# Note that when not enabling USE_PDFLATEX the default is latex when enabling +# USE_PDFLATEX the default is pdflatex and when in the later case latex is +# chosen this is overwritten by pdflatex. For specific output languages the +# default can have been set differently, this depends on the implementation of +# the output language. # This tag requires that the tag GENERATE_LATEX is set to YES. LATEX_CMD_NAME = latex # The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate # index for LaTeX. +# Note: This tag is used in the Makefile / make.bat. +# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file +# (.tex). # The default file is: makeindex. # This tag requires that the tag GENERATE_LATEX is set to YES. MAKEINDEX_CMD_NAME = makeindex +# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to +# generate index for LaTeX. +# Note: This tag is used in the generated output file (.tex). +# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat. +# The default value is: \makeindex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_MAKEINDEX_CMD = \makeindex + # If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX # documents. This may be useful for small projects and may help to save some # trees in general. @@ -1742,7 +1790,7 @@ # The LATEX_BIB_STYLE tag can be used to specify the style to use for the # bibliography, e.g. plainnat, or ieeetr. See -# http://en.wikipedia.org/wiki/BibTeX and \cite for more info. +# https://en.wikipedia.org/wiki/BibTeX and \cite for more info. # The default value is: plain. # This tag requires that the tag GENERATE_LATEX is set to YES. @@ -1756,6 +1804,14 @@ LATEX_TIMESTAMP = NO +# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute) +# path from which the emoji images will be read. If a relative path is entered, +# it will be relative to the LATEX_OUTPUT directory. If left blank the +# LATEX_OUTPUT directory will be used. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EMOJI_DIRECTORY = + #--------------------------------------------------------------------------- # Configuration options related to the RTF output #--------------------------------------------------------------------------- @@ -1795,9 +1851,9 @@ RTF_HYPERLINKS = NO -# Load stylesheet definitions from file. Syntax is similar to doxygen's config -# file, i.e. a series of assignments. You only have to provide replacements, -# missing definitions are set to their default value. +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# configuration file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. # # See also section "Doxygen usage" for information on how to generate the # default style sheet that doxygen normally uses. @@ -1806,8 +1862,8 @@ RTF_STYLESHEET_FILE = # Set optional variables used in the generation of an RTF document. Syntax is -# similar to doxygen's config file. A template extensions file can be generated -# using doxygen -e rtf extensionFile. +# similar to doxygen's configuration file. A template extensions file can be +# generated using doxygen -e rtf extensionFile. # This tag requires that the tag GENERATE_RTF is set to YES. RTF_EXTENSIONS_FILE = @@ -1893,6 +1949,13 @@ XML_PROGRAMLISTING = YES +# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include +# namespace members in file scope as well, matching the HTML output. +# The default value is: NO. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_NS_MEMB_FILE_SCOPE = NO + #--------------------------------------------------------------------------- # Configuration options related to the DOCBOOK output #--------------------------------------------------------------------------- @@ -1925,9 +1988,9 @@ #--------------------------------------------------------------------------- # If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an -# AutoGen Definitions (see http://autogen.sf.net) file that captures the -# structure of the code including all documentation. Note that this feature is -# still experimental and incomplete at the moment. +# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures +# the structure of the code including all documentation. Note that this feature +# is still experimental and incomplete at the moment. # The default value is: NO. GENERATE_AUTOGEN_DEF = NO @@ -1987,7 +2050,7 @@ # The default value is: NO. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -MACRO_EXPANSION = NO +MACRO_EXPANSION = YES # If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then # the macro expansion is limited to the macros specified with the PREDEFINED and @@ -1995,7 +2058,7 @@ # The default value is: NO. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -EXPAND_ONLY_PREDEF = NO +EXPAND_ONLY_PREDEF = YES # If the SEARCH_INCLUDES tag is set to YES, the include files in the # INCLUDE_PATH will be searched if a #include is found. @@ -2027,7 +2090,7 @@ # recursively expanded use the := operator instead of the = operator. # This tag requires that the tag ENABLE_PREPROCESSING is set to YES. -PREDEFINED = +PREDEFINED = LILV_API # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # tag can be used to specify a list of macro names that should be expanded. The @@ -2349,6 +2412,11 @@ PLANTUML_JAR_PATH = +# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a +# configuration file for plantuml. + +PLANTUML_CFG_FILE = + # When using plantuml, the specified paths are searched for files specified by # the !include statement in a plantuml block. diff -Nru lilv-0.24.4~dfsg0/doc/style.css lilv-0.24.6/doc/style.css --- lilv-0.24.4~dfsg0/doc/style.css 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/doc/style.css 2019-10-19 21:34:09.000000000 +0000 @@ -1,68 +1,113 @@ body { - max-width: 80em; - margin: 0; + background: #FFF; + color: #222; + font-style: normal; + line-height: 1.6em; margin-left: auto; margin-right: auto; - background: #FFF; - color: #000; + padding: 1em; + max-width: 60em; + font-family: "DejaVu Serif",Palatino,serif; + text-rendering: optimizeLegibility; } -#titlearea { - display: none; +h1, .title, #projectname, h2, h3, h4, h5, h6 { + line-height: 1.0125em; + color: #444; + font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; + margin: 1em 0 0.5em 0; +} + +h1, .titlearea .header .titlebox, #projectname { + font-size: 300%; + font-weight: 400; + margin-bottom: 0.25em; + margin-top: 0; } -h1 { +.header .headertitle .title { font-size: 180%; - font-weight: 900; + font-weight: 400; + margin: 0.75em 0.25em 0.5em 0; } -h2 { - font-size: 140%; - font-weight: 700; +.ingroups { + display: inline; +} +.title .ingroups a { + font-size: small; + margin-left: 1em; } -h3 { - font-size: 120%; - font-weight: 700; +#titlebox, #metabox { + display: inline-block; +} +#titlebox{ + display: inline-block; + width: 75%; + left: 0; + top: 0; } -h4 { - font-size: 110%; - font-weight: 700; +#title { + margin-bottom: 0.25em; } -h5 { - font-size: 100%; - font-weight: 700; +#shortdesc { + margin: 0; + color: #666; + display: inline-block; + font-style: italic; + padding: 0; } -h6 { - font-size: 100%; - font-weight: 600; +#titlearea { + margin: 0.25em auto 0.25em auto; + padding: 0; + position: relative; + clear: both; + line-height: 1.0em; } -p { - margin: 0 0 1em 0; +h2 { + font-size: 160%; + font-weight: 400; } -dt { - font-weight: 700; +h3 { + font-size: 140%; + font-weight: 400; } -p.startli,p.startdd,p.starttd { - margin-top: 2px; +h4 { + font-size: 120%; + font-weight: 500; } -p.endli { - margin-bottom: 0; +h5, h6 { + font-size: 110%; + font-weight: 600; } -p.enddd { - margin-bottom: 4px; +h1 a, h1 a:link, h1 a:visited , +h2 a, h2 a:link, h2 a:visited , +h3 a, h3 a:link, h3 a:visited , +h4 a, h4 a:link, h4 a:visited , +h5 a, h5 a:link, h5 a:visited , +h6 a, h6 a:link, h6 a:visited { + color: #444; } -p.endtd { - margin-bottom: 2px; +p { + margin: 0.5em 0 0.5em 0; +} + +dt { + font-weight: 700; +} + +dd { + margin-left: 2em; } caption { @@ -115,30 +160,20 @@ color: #444; } -a.code { - color: #4665A2; -} - -a.codeRef { - color: #4665A2; -} - /* @end */ dl.el { margin-left: -1cm; } .fragment { - font-family: monospace, fixed; + font-family: "DejaVu Sans Mono",monospace,fixed; } pre.fragment { border: 1px solid #C4C4C4; background-color: #F9F9F9; - padding: 4px 6px; - margin: 4px 8px 4px 2px; + padding: 0.5em; overflow: auto; - line-height: 125%; } div.ah { @@ -167,10 +202,11 @@ font-style: italic; } -div.contents { - margin-top: 10px; - margin-left: 10px; - margin-right: 10px; +div.contents, #content { + padding: 0 0.25em 0 0.25em; + max-width: 60em; + margin-left: auto; + margin-right: auto; } td.indexkey { @@ -188,6 +224,10 @@ margin: 2px 0; } +table.memname { + font-family: "DejaVu Sans Mono",monospace,fixed; +} + tr.memlist { background-color: #EEF1F7; } @@ -213,7 +253,6 @@ address.footer { text-align: right; - padding-right: 12px; } img.footer { @@ -223,43 +262,31 @@ /* @group Code Colorization */ span.keyword { - color: green; + color: #586E75; } span.keywordtype { - color: #3E873E; + color: #546E00; } span.keywordflow { - color: #e08000; + color: #586E75; } span.comment { - color: maroon; + color: #6C71C4; } span.preprocessor { - color: #806020; + color: #D33682; } span.stringliteral { - color: #002080; + color: #CB4B16; } span.charliteral { - color: teal; -} - -span.vhdldigit { - color: #F0F; -} - -span.vhdlkeyword { - color: #700070; -} - -span.vhdllogic { - color: red; + color: #CB4B16; } /* @end */ @@ -285,17 +312,20 @@ margin: 2em 0 1em; } -hr.footer { - height: 1px; +#footer { + bottom: 0; + clear: both; + font-size: x-small; + margin: 2em 0 0; + padding: 0 1em 1em 1em; + vertical-align: top; + color: #888; } /* @group Member Descriptions */ table.memberdecls { border-spacing: 0.125em; -} - -h2.groupheader { - margin: 0.5em 0 0.25em 0; + line-height: 1.3em; } .mdescLeft,.mdescRight,.memItemLeft,.memItemRight,.memTemplItemLeft,.memTemplItemRight,.memTemplParams { @@ -309,7 +339,7 @@ .memItemLeft,.memItemRight,.memTemplParams { border: 0; - font-family: monospace, fixed; + font-family: "DejaVu Sans Mono",monospace,fixed; } .memItemLeft,.memTemplItemLeft { @@ -334,7 +364,7 @@ td.mlabels-right { vertical-align: top; padding-top: 4px; - color: #AA6; + color: #B4C342; } .memtitle { @@ -345,13 +375,15 @@ /* @group Member Details */ /* Styles for detailed member documentation */ .memtemplate { - color: #4665A2; - font-weight: bold; + color: #888; + font-style: italic; + font-family: "DejaVu Sans Mono",monospace,fixed; + font-size: small; } .memnav { - background-color: #EBEFF6; - border: 1px solid #A3B4D7; + background-color: #EEE; + border: 1px solid #B4C342; text-align: center; margin: 2px; margin-right: 15px; @@ -359,23 +391,25 @@ } .memitem { - padding: 0; - margin: 1em 0 1em 0; + padding: 0.25em 0.5em 0.25em 0.5em; + margin: 0 0 1em 0; + border-radius: 6px; + border: 1px solid #DDD; } .memproto { - padding: 0; - font-weight: bold; - color: #000; + font-size: 110%; + font-weight: 400; + line-height: 1em; + color: #000; } .memproto .paramname { - color: #444; font-style: normal; } .memdoc { - padding: 0 0 0.5em 2em; + padding: 0 0.25em 0 0.25em; } .paramkey { @@ -383,37 +417,54 @@ } .paramtype { - color: #3E873E; + color: #666; + padding-right: 0.5em; white-space: nowrap; } .paramname { - color: #444; + color: #111; white-space: nowrap; - font-weight: bold; -} - -td.paramname { - vertical-align: top; + font-family: "DejaVu Sans Mono",monospace,fixed; + font-style: italic; + padding-right: 0.5em; } .fieldname { color: #000; } +.fieldtable { + padding-top: 0.25em; + border-top: 1px dashed #DDD; +} + +.fieldtable tbody tr:first-child { + display: none; +} + td.fieldname { - padding-right: 1em; + padding: 0 0.5em 0 0.25em; vertical-align: top; + font-family: "DejaVu Sans Mono",monospace,fixed; } td.fieldtype { + color: #666; + padding: 0 0.5em 0 0; vertical-align: top; - color: #444; - padding-right: 0.5em; + font-family: "DejaVu Sans Mono",monospace,fixed; } td.fielddoc p { margin: 0; + vertical-align: top; + padding: 0 0.5em 0 0; +} + +p.reference { + font-size: x-small; + font-style: italic; } /* @end */ @@ -454,6 +505,22 @@ vertical-align: -30%; } +td.entry { + font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; + font-weight: 400; + padding-right: 1em; +} + +td.entry .arrow { + display: none; +} + +td.entry b { + font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; + font-weight: 400; + font-size: 130%; +} + /* these are for tree view when not used as main index */ .directory-alt { font-size: 100%; @@ -491,7 +558,7 @@ address { font-style: normal; - color: #2A3D61; + color: #444; } table.doxtable { @@ -523,23 +590,20 @@ } div.navpath { - padding: 0.25em; + color: #DDD; } .navpath ul { - font-size: x-small; - color: #8AA0CC; overflow: hidden; margin: 0; padding: 0; } .navpath li { - list-style-type: none; float: left; - padding-left: 10px; - padding-right: 15px; - color: #364D7C; + padding-left: 0; + margin-left: 0.5em; + padding-right: 1em; } .navpath a { @@ -548,62 +612,68 @@ outline: none; } -.navpath a:hover { - color: #6884BD; -} - div.summary { - float: right; - font-size: x-small; - padding: 0.25em 0.5em 0 0; - width: 50%; - text-align: right; + font-size: small; + font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; + margin: 0; + color: #FFF; /* Hide separator bars */ + border-bottom: 1px solid #DDD; + padding: 0.25em 0; } div.summary a { white-space: nowrap; } -div.header { - background-color: #F3F3F3; - margin: 0; +/* Metadata box (right aligned next to title) */ + +#metabox { + display: inline-block; + font-size: x-small; + margin: 0 0 0.25em 0; + position: absolute; + right: 0; + top: 0; + color: #666; + font-style: italic; + padding: 0 1em; +} + +#meta { + border-style: hidden; + margin-right: 0.25em; +} + +#meta tr, #meta th, #meta td { + background-color: transparent; border: 0; + margin: 0; + font-weight: normal; } -div.headertitle { - font-size: 180%; - font-weight: bold; - color: #FFF; - padding: 0.125em 0.25em 0.125em 0.25em; - background-color: #333; - background: linear-gradient(to bottom, #333 0%, #111 100%); - border: solid 1px #444; - border-top: 0; - border-radius: 0 0 6px 6px; +#meta th { + text-align: right; +} + +#meta th:after { + content: ":"; } div.line { - font-family: monospace, fixed; - font-size: 13px; - min-height: 13px; - line-height: 1.0; - text-wrap: avoid; + font-family: "DejaVu Sans Mono",monospace,fixed; + line-height: 1.4em; white-space: pre-wrap; - text-indent: -53px; - padding-left: 53px; - padding-bottom: 0; - margin: 0; } .glow { - background-color: cyan; - box-shadow: 0 0 10px cyan; + background-color: #2AA198; + box-shadow: 0 0 10px #2AA198; } span.lineno { padding-right: 4px; text-align: right; - border-right: 2px solid #0F0; + border-right: 2px solid #546E00; background-color: #E8E8E8; white-space: pre; } @@ -616,22 +686,17 @@ } .tabs, .tabs2, .navpath { - background-image: none; - background-color: #333; - background: linear-gradient(to bottom, #333 0%, #111 100%); - border: 0; - border-bottom: solid 2px #000; - padding: 0; - padding-top: 2px; - font-size: small; -} - -#navrow1 { - border: 0; + padding: 0.25em 0; + border-bottom: 1px solid #DDD; + font-size: small; + font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; + margin: 0; } th { text-align: left; + font-size: 110%; + font-weight: 500; } .mlabel { @@ -650,40 +715,23 @@ display: table-cell; line-height: 2em; list-style: none; - background-color: #333; - background: linear-gradient(to bottom, #444 0%, #222 100%); - border: 1px solid #222; border-bottom: 0; - border-radius: 6px 6px 0 0; - color: #DDD; } .tablist a { display: block; - padding: 0 20px; - font-weight: bold; - color: #859900; + padding: 0 1em 0 0; + font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; text-decoration: none; outline: none; } -.header a { - color: #859900; -} - .tabs3 .tablist a { padding: 0 10px; } -.tablist a:hover { - color: #fff; - text-shadow: 0 1px 1px rgba(0, 0, 0, 1.0); - text-decoration: none; -} - .tablist li.current a { - color: #fff; - text-shadow: 0 1px 1px rgba(0, 0, 0, 1.0); + color: #222; } span.icon { diff -Nru lilv-0.24.4~dfsg0/.gitattributes lilv-0.24.6/.gitattributes --- lilv-0.24.4~dfsg0/.gitattributes 2018-07-22 16:11:07.000000000 +0000 +++ lilv-0.24.6/.gitattributes 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -waf binary \ No newline at end of file diff -Nru lilv-0.24.4~dfsg0/.gitmodules lilv-0.24.6/.gitmodules --- lilv-0.24.4~dfsg0/.gitmodules 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.gitmodules 2019-10-19 21:34:09.000000000 +0000 @@ -0,0 +1,3 @@ +[submodule "waflib"] + path = waflib + url = ../../drobilla/autowaf diff -Nru lilv-0.24.4~dfsg0/lilv/lilv.h lilv-0.24.6/lilv/lilv.h --- lilv-0.24.4~dfsg0/lilv/lilv.h 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/lilv/lilv.h 2019-10-21 19:16:47.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -21,14 +21,15 @@ #ifndef LILV_LILV_H #define LILV_LILV_H +#include "lv2/core/lv2.h" +#include "lv2/urid/urid.h" + #include +#include #include #include #include -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" -#include "lv2/lv2plug.in/ns/ext/urid/urid.h" - #ifdef LILV_SHARED # ifdef _WIN32 # define LILV_LIB_IMPORT __declspec(dllimport) @@ -45,7 +46,8 @@ #else # define LILV_API #endif -#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1) +#if defined(__GNUC__) && (__GNUC__ > 3 || \ + (__GNUC__ == 3 && __GNUC_MINOR__ >= 1)) # define LILV_DEPRECATED __attribute__((__deprecated__)) #else # define LILV_DEPRECATED @@ -53,8 +55,6 @@ #ifdef __cplusplus extern "C" { -#else -# include #endif #define LILV_NS_DOAP "http://usefulinc.com/ns/doap#" @@ -352,7 +352,7 @@ @endcode */ #define LILV_FOREACH(colltype, iter, collection) \ - for (LilvIter* (iter) = lilv_ ## colltype ## _begin(collection); \ + for (LilvIter* iter = lilv_ ## colltype ## _begin(collection); \ !lilv_ ## colltype ## _is_end(collection, iter); \ (iter) = lilv_ ## colltype ## _next(collection, iter)) @@ -531,11 +531,19 @@ #define LILV_OPTION_DYN_MANIFEST "http://drobilla.net/ns/lilv#dyn-manifest" /** + Set application-specific LV2_PATH. This overrides the LV2_PATH from the + environment, so that lilv will only look inside the given path. This can be + used to make self-contained applications. +*/ +#define LILV_OPTION_LV2_PATH "http://drobilla.net/ns/lilv#lv2-path" + +/** Set an option option for `world`. Currently recognized options: @ref LILV_OPTION_FILTER_LANG @ref LILV_OPTION_DYN_MANIFEST + @ref LILV_OPTION_LV2_PATH */ LILV_API void lilv_world_set_option(LilvWorld* world, @@ -918,7 +926,6 @@ lilv_plugin_get_num_ports_of_class(const LilvPlugin* plugin, const LilvNode* class_1, ...); -#ifndef SWIG /** Variant of lilv_plugin_get_num_ports_of_class() that takes a va_list. @@ -928,7 +935,6 @@ lilv_plugin_get_num_ports_of_class_va(const LilvPlugin* plugin, const LilvNode* class_1, va_list args); -#endif /** Return whether or not the plugin introduces (and reports) latency. @@ -1278,20 +1284,31 @@ @param map The map to use for mapping URIs in state. - @param file_dir Directory of files created by the plugin earlier (or NULL). - This is for hosts that support file creation at any time with state + @param scratch_dir Directory of files created by the plugin earlier, or + NULL. This is for hosts that support file creation at any time with state state:makePath. These files will be copied as necessary to `copy_dir` and not be referred to directly in state (a temporary directory is appropriate). - @param copy_dir Directory of copies of files in `file_dir` (or NULL). This - directory will have the same structure as `file_dir` but with possibly - modified file names to distinguish different revisions. If you only care - about saving one state snapshot, it can be the same as `save_dir`. Plugin - state will refer to files in this directory. + @param copy_dir Directory of copies of files in `scratch_dir`, or NULL. + This directory will have the same structure as `scratch_dir` but with + possibly modified file names to distinguish revisions. This allows the + saved state to contain the exact contents of the scratch file at save time, + so that the state is not ruined if the file is later modified (for example, + by the plugin continuing to record). This can be the same as `save_dir` to + create a copy in the state bundle, but can also be a separate directory + which allows multiple state snapshots to share a single copy if the file has + not changed. + + @param link_dir Directory of links to external files, or NULL. A link will + be made in this directory to any external files referred to in plugin state. + In turn, links will be created in the save directory to these links (e.g. + save_dir/file => link_dir/file => /foo/bar/file). This allows many state + snapshots to share a single link to an external file, so archival (e.g. with + tar -h) will not create several copies of the file. If this is not + required, it can be the same as `save_dir`. @param save_dir Directory of files created by plugin during save (or NULL). - If the state will be saved, this should be the bundle directory later passed - to lilv_state_save(). + This is typically the bundle directory later passed to lilv_state_save(). @param get_value Function to get port values (or NULL). If NULL, the returned state will not represent port values. This should only be NULL in @@ -1299,14 +1316,6 @@ @param user_data User data to pass to `get_value`. - @param link_dir Directory of links to external files (or NULL). A link will - be made in this directory to any external files referred to in plugin state. - In turn, links will be created in the save directory to these links (e.g. - save_dir/file => link_dir/file => /foo/bar/file). This allows many state - snapshots to share a single link to an external file, so archival - (e.g. with tar -h) will not create several copies of the file. If this is - not required, it can be the same as save_dir. - @param flags Bitwise OR of LV2_State_Flags values. @param features Features to pass LV2_State_Interface.save(). @@ -1324,12 +1333,12 @@ saving an instances state many times while avoiding any duplication of data. If supported (via state:makePath passed to LV2_Descriptor::instantiate()), - `file_dir` should be the directory where any files created by the plugin + `scratch_dir` should be the directory where any files created by the plugin (not during save time, e.g. during instantiation) are stored. These files - will be copied to preserve their state at this time.plugin-created files are stored. - Lilv will assume any files within this directory (recursively) are created - by the plugin and all other files are immutable. Note that this function - does not save the state, use lilv_state_save() for that. + will be copied to preserve their state at this time.plugin-created files are + stored. Lilv will assume any files within this directory (recursively) are + created by the plugin and all other files are immutable. Note that this + function does not save the state, use lilv_state_save() for that. See state.h from the LV2 State extension for details on the `flags` and `features` parameters. @@ -1338,7 +1347,7 @@ lilv_state_new_from_instance(const LilvPlugin* plugin, LilvInstance* instance, LV2_URID_Map* map, - const char* file_dir, + const char* scratch_dir, const char* copy_dir, const char* link_dir, const char* save_dir, diff -Nru lilv-0.24.4~dfsg0/lilv/lilvmm.hpp lilv-0.24.6/lilv/lilvmm.hpp --- lilv-0.24.4~dfsg0/lilv/lilvmm.hpp 2018-07-10 19:55:39.000000000 +0000 +++ lilv-0.24.6/lilv/lilvmm.hpp 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2014 David Robillard + Copyright 2007-2017 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -19,20 +19,28 @@ #include "lilv/lilv.h" -#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1) -# define LILV_DEPRECATED __attribute__((__deprecated__)) -#else -# define LILV_DEPRECATED -#endif - namespace Lilv { +#if defined(__clang__) +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wdeprecated-declarations" +#elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#endif + LILV_DEPRECATED static inline const char* uri_to_path(const char* uri) { return lilv_uri_to_path(uri); } +#if defined(__clang__) +# pragma clang diagnostic pop +#elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6) +# pragma GCC diagnostic pop +#endif + #define LILV_WRAP0(RT, prefix, name) \ inline RT name() { return lilv_ ## prefix ## _ ## name (me); } diff -Nru lilv-0.24.4~dfsg0/lilv.pc.in lilv-0.24.6/lilv.pc.in --- lilv-0.24.4~dfsg0/lilv.pc.in 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/lilv.pc.in 2019-11-09 11:29:45.000000000 +0000 @@ -6,6 +6,6 @@ Name: Lilv Version: @LILV_VERSION@ Description: Simple C library for hosting LV2 plugins -Requires: lv2 @PKG_serd_0@ @PKG_sord_0@ @PKG_sratom_0@ +Requires: @LILV_PKG_DEPS@ Libs: -L${libdir} -l@LIB_LILV@ @LILV_PKG_LIBS@ Cflags: -I${includedir}/lilv-@LILV_MAJOR_VERSION@ diff -Nru lilv-0.24.4~dfsg0/NEWS lilv-0.24.6/NEWS --- lilv-0.24.4~dfsg0/NEWS 2018-07-22 18:42:40.000000000 +0000 +++ lilv-0.24.6/NEWS 2019-11-10 21:39:44.000000000 +0000 @@ -1,222 +1,236 @@ +lilv (0.24.6) stable; + + * Add more strict error detection when storing plugin state properties + * Add option to override LV2_PATH in applications + * Don't print errors when saving state if correct links already exist + * Fix GCC8 warnings + * Fix creating directories across drives on Windows + * Fix issues with loading state with saved files from the model + * Fix memory errors and Python 3.4+ compatibility in Python bindings + * Fix unit tests on Windows + * Make Python bindings more Pythonic + + -- David Robillard Sun, 10 Nov 2019 21:39:37 +0000 + lilv (0.24.4) stable; - * Fix saving state when broken links are encountered * Don't attempt to load remote or non-Turtle files + * Fix saving state when broken links are encountered + * Gracefully handle plugins with missing binary URIs + * Install Python bindings when configured without tests (thanks Clement + Skau) + * Remove use of deprecated readdir_r * lv2apply: Activate plugin before running * lv2apply: Use default values when they are not nan * lv2bench: Improve support for plugins with sequence ports * lv2bench: Support running a single plugin given on the command line - * Gracefully handle plugins with missing binary URIs - * Remove use of deprecated readdir_r - * Install Python bindings when configured without tests - (thanks Clement Skau) - -- David Robillard Sun, 22 Jul 2018 20:42:00 +0200 + -- David Robillard Sun, 22 Jul 2018 18:42:00 +0000 lilv (0.24.2) stable; - * Fix saving state to paths that contain URI delimiters (#, ?, etc) * Fix comparison of restored states with paths + * Fix saving state to paths that contain URI delimiters (#, ?, etc) - -- David Robillard Wed, 04 Jan 2017 11:48:08 -0500 + -- David Robillard Wed, 04 Jan 2017 16:48:08 +0000 lilv (0.24.0) stable; - * Add new hand-crafted Pythonic bindings with full test coverage - * Add lv2apply utility for applying plugins to audio files + * Add LILV_URI_ATOM_PORT and LILV_URI_CV_PORT defines + * Add lilv_state_set_metadata() for adding state banks/comments/etc (based + on patch from Hanspeter Portner) * Add lilv_world_get_symbol() - * Add lilv_state_set_metadata() for adding state banks/comments/etc - (based on patch from Hanspeter Portner) - * Fix crash when state contains non-POD properties - * Fix crash when NULL predicate is passed to lilv_world_find_nodes() - * Fix state file versioning - * Unload contained resources when bundle is unloaded - * Do not instantiate plugin when data fails to parse - * Support re-loading plugins - * Replace bundles if bundle with newer plugin version is loaded - (based on patch from Robin Gareus) - * Fix loading dyn-manifest from bundles with spaces in their path + * Add lv2apply utility for applying plugins to audio files + * Add new hand-crafted Pythonic bindings with full test coverage * Check lv2:binary predicate for UIs - * Add LILV_URI_ATOM_PORT and LILV_URI_CV_PORT defines + * Do not instantiate plugin when data fails to parse + * Fix crash when NULL predicate is passed to lilv_world_find_nodes() + * Fix crash when state contains non-POD properties * Fix documentation installation + * Fix loading dyn-manifest from bundles with spaces in their path * Fix outdated comment references to lilv_uri_to_path() + * Fix state file versioning + * Replace bundles if bundle with newer plugin version is loaded (based on + patch from Robin Gareus) + * Support re-loading plugins + * Unload contained resources when bundle is unloaded - -- David Robillard Mon, 19 Sep 2016 22:24:57 -0400 + -- David Robillard Tue, 20 Sep 2016 02:24:57 +0000 lilv (0.22.0) stable; - * Fix loading files with spaces in their path * Add lilv_file_uri_parse() for correct URI to path conversion + * Add lilv_free() for systems picky about such things * Add lilv_node_get_path() for convenient file URI path access + * Add lilv_state_delete() for deleting user saved presets * Add lilv_state_emit_port_values() for special port value handling * Add lilv_state_get_uri() - * Add lilv_state_delete() for deleting user saved presets - * Add lilv_free() for systems picky about such things - * Fix lilv_world_ask() to work with wildcards - * Fix creation of duplicate manifest entries when saving state - * Fix bindings for Python 3 - * Load discovered owl ontologies as specifications + * Configure based on compiler target OS for cross-compilation * Expose lilv_world_load_specifications() and lilv_world_load_plugin_classes() - * Tolerate passing NULL to lilv_state_restore() - * Preserve absolute paths in state if no link directory is given * Fix a few minor/unlikely memory errors - * Configure based on compiler target OS for cross-compilation - * Fix lilv_realpath() on pre-POSIX-2008 systems + * Fix bindings for Python 3 + * Fix creation of duplicate manifest entries when saving state * Fix directory walking on some systems (thanks Matt Fischer) - * Windows fixes (thanks John Emmas) + * Fix lilv_realpath() on pre-POSIX-2008 systems + * Fix lilv_world_ask() to work with wildcards + * Fix loading files with spaces in their path + * Load discovered owl ontologies as specifications * Minor documentation improvements + * Preserve absolute paths in state if no link directory is given + * Tolerate passing NULL to lilv_state_restore() * Upgrade to waf 1.8.14 + * Windows fixes (thanks John Emmas) - -- David Robillard Thu, 08 Oct 2015 15:39:29 -0400 + -- David Robillard Thu, 08 Oct 2015 19:39:29 +0000 lilv (0.20.0) stable; - * Don't load files multiple times if they are listed as rdfs:seeAlso for - several plugins - * Call lv2_lib_descriptor separately for different bundle paths - (fix loading several dynamic plugins like Ingen at once) - * Tolerate calling lilv_node_as_uri or lilv_node_as_blank on NULL * Add convenient lilv_new_file_uri for creating file URIs - * Fix use of lv2info -m and -p options to write plugin data - (useful for porting plugins bridges with NASPRO) - * Fix issues with lilv_plugin_get_author_name and friends - (thanks Filipe Coelho) - * Improved/working lv2_apply.py to apply plugin to a .wav - (thanks Joe Button) * Add lilv_world_unload_bundle() and lilv_world_unload_resource() + * Call lv2_lib_descriptor separately for different bundle paths (fix loading + several dynamic plugins like Ingen at once) + * Don't load files multiple times if they are listed as rdfs:seeAlso for + several plugins + * Fix issues with lilv_plugin_get_author_name and friends (thanks Filipe + Coelho) * Fix several minor memory leaks + * Fix use of lv2info -m and -p options to write plugin data (useful for + porting plugins bridges with NASPRO) * Improve test coverage + * Improved/working lv2_apply.py to apply plugin to a .wav (thanks Joe + Button) + * Tolerate calling lilv_node_as_uri or lilv_node_as_blank on NULL * Upgrade to waf 1.7.16 - -- David Robillard Fri, 08 Aug 2014 18:21:32 -0400 + -- David Robillard Fri, 08 Aug 2014 22:21:32 +0000 lilv (0.18.0) stable; - * Allow lilv_state_restore() to be used without passing an instance, - for restoring port values via a callback only - * Fix unlikely memory leak in lilv_plugin_instantiate() - * Support denoting latency ports with lv2:designation lv2:latency + * Add lilv_port_get_node() for using world query functions with ports + * Add support for running plugins from Python by Kaspar Emanuel + * Allow lilv_state_restore() to be used without passing an instance, for + restoring port values via a callback only * Allow passing NULL port_class to lilv_plugin_get_port_by_designation * Call GetProcAddress with correct calling convention on Windows - * Add support for running plugins from Python by Kaspar Emanuel * Clean up after test suite so multiple runs are successful - * Add lilv_port_get_node() for using world query functions with ports - * lv2info: Don't display invalid control maxes and defaults - (patch from Robin Gareus) + * Fix unlikely memory leak in lilv_plugin_instantiate() + * Support denoting latency ports with lv2:designation lv2:latency * lilvmm.hpp: Add wrappers for UI API + * lv2info: Don't display invalid control maxes and defaults (patch from + Robin Gareus) - -- David Robillard Sat, 04 Jan 2014 16:06:42 -0500 + -- David Robillard Sat, 04 Jan 2014 21:06:42 +0000 lilv (0.16.0) stable; + * Add lilv_nodes_merge() * Add lilv_world_ask() for easily checking if a statement exists * Add lilv_world_get() and lilv_port_get() for easily getting one value - * Add lilv_nodes_merge() - * Make lilv_plugin_get_port_by_designation() return a const pointer - * Require a URI for lilv_state_to_string() and fail gracefully otherwise - * Fail gracefully when lilv_state_new_from_string() is called on NULL - * Make state loading functions fall back to lv2:default for port values, - so a plugin description can be loaded as default state - * Ignore state ports with no value instead of printing an error - * Support atom:supports in lilv_port_supports_event() * Add va_list variant of lilv_plugin_get_num_ports_of_class() - * Fix several plugin functions that failed to load data if called first * Correctly depend on serd at build time (fix compilation in odd cases) * Disable timestamps in HTML documentation for reproducible build - * lilvmm.hpp: Support varargs for Plugin::get_num_ports_of_class() - * lilvmm.hpp: Add several missing methods + * Fail gracefully when lilv_state_new_from_string() is called on NULL + * Fix several plugin functions that failed to load data if called first + * Ignore state ports with no value instead of printing an error + * Make lilv_plugin_get_port_by_designation() return a const pointer + * Make state loading functions fall back to lv2:default for port values, so + a plugin description can be loaded as default state + * Require a URI for lilv_state_to_string() and fail gracefully otherwise + * Support atom:supports in lilv_port_supports_event() * Update to waf 1.7.8 and autowaf r90 (install docs to versioned directory) + * lilvmm.hpp: Add several missing methods + * lilvmm.hpp: Support varargs for Plugin::get_num_ports_of_class() - -- David Robillard Mon, 18 Feb 2013 16:43:10 -0500 + -- David Robillard Mon, 18 Feb 2013 21:43:10 +0000 lilv (0.14.4) stable; + * Add lv2bench utility + * Add option to build utilities as static binaries * Deprecate old flawed Lilv::Instance constructors - * Fix documentation for ui_type parameter of lilv_ui_is_supported() + * Do not require a C++ compiler to build * Fix crash when lv2info is run with an invalid URI argument + * Fix documentation for ui_type parameter of lilv_ui_is_supported() + * Fix various hyper-strict warnings * Gracefully handle failure to save plugin state and print error message + * Make all 'zix' symbols private to avoid symbol clashes in static builds * Reduce memory usage (per node) * Simpler node implementation always backed by a SordNode - * Make all 'zix' symbols private to avoid symbol clashes in static builds - * Add lv2bench utility - * Fix various hyper-strict warnings - * Do not require a C++ compiler to build - * Add option to build utilities as static binaries * Upgrade to waf 1.7.2 - * lilvmm.hpp: Make Lilv::Instance handle features and failed instantiations - * lilvmm.hpp: Add Lilv::Instance::get_handle() * lilvmm.hpp: Add Lilv::Instance::get_extension_data() + * lilvmm.hpp: Add Lilv::Instance::get_handle() + * lilvmm.hpp: Make Lilv::Instance handle features and failed instantiations - -- David Robillard Thu, 23 Aug 2012 01:38:29 -0400 + -- David Robillard Thu, 23 Aug 2012 05:38:29 +0000 lilv (0.14.2) stable; * Fix dynmanifest support - -- David Robillard Thu, 19 Apr 2012 16:11:31 -0400 + -- David Robillard Thu, 19 Apr 2012 20:11:31 +0000 lilv (0.14.0) stable; + * Add LilvState API for handling plugin state. This makes it simple to save + and restore plugin state both in memory and on disk, as well as save + presets in a host-sharable way since the disk format is identical to the + LV2 presets format. * Add lilv_plugin_get_extension_data - * Use path variables in pkgconfig files + * Add lilv_plugin_get_port_by_designation() and lilv_port_get_index() as an + improved generic alternative to lilv_plugin_get_latency_port_index(). + * Add lilv_plugin_get_project() and get author information from project if + it is not given directly on the plugin. + * Add lilv_plugin_get_related to get resources related to plugins that are + not directly rdfs:seeAlso linked (e.g. presets) + * Add lilv_world_load_resource for related resources (e.g. presets) + * Fix compilation on BSD + * Fix lilv_world_find_nodes to work with wildcard subjects * Install man page to DATADIR (e.g. PREFIX/share/man, not PREFIX/man) * Make Lilv::uri_to_path static inline (fix linking errors) - * Use correct URI for dcterms:replaces (for hiding old plugins): - "http://purl.org/dc/terms/replaces" - * Fix compilation on BSD * Only load dynmanifest libraries once per bundle, not once per plugin - * Fix lilv_world_find_nodes to work with wildcard subjects - * Add lilv_plugin_get_related to get resources related to plugins that - are not directly rdfs:seeAlso linked (e.g. presets) - * Add lilv_world_load_resource for related resources (e.g. presets) * Print presets in lv2info * Remove locale smashing kludges and use new serd functions for converting nodes to/from numbers. - * Add LilvState API for handling plugin state. This makes it simple to - save and restore plugin state both in memory and on disk, as well as - save presets in a host-sharable way since the disk format is identical - to the LV2 presets format. - * Update old references to lv2_list (now lv2ls) - * Support compilation as C++ under MSVC++. * Remove use of wordexp. - * Add lilv_plugin_get_port_by_designation() and lilv_port_get_index() as an - improved generic alternative to lilv_plugin_get_latency_port_index(). - * Add lilv_plugin_get_project() and get author information from project if - it is not given directly on the plugin. + * Support compilation as C++ under MSVC++. + * Update old references to lv2_list (now lv2ls) + * Use correct URI for dcterms:replaces (for hiding old plugins): + "http://purl.org/dc/terms/replaces" + * Use path variables in pkgconfig files - -- David Robillard Wed, 18 Apr 2012 20:06:28 -0400 + -- David Robillard Thu, 19 Apr 2012 00:06:28 +0000 lilv (0.5.0) stable; - * Remove glib dependency + * Add ability to build static library * Add lv2core as a pkg-config dependency (for lv2.h header include) * Obey prefix when installing bash completion script + * Remove glib dependency * Support integer minimum, maximum, and default port values in lilv_plugin_get_port_ranges_float - * Add ability to build static library - -- David Robillard Thu, 29 Sep 2011 00:00:00 -0400 + -- David Robillard Thu, 29 Sep 2011 04:00:00 +0000 lilv (0.4.4) stable; * Fix building python bindings + * Fix lilv_ui_is_supported when Lilv is built independently + * Fix parsing extra plugin data files in other bundles * Make free methods tolerate being called on NULL * Remove lv2jack (replaced by jalv) - * Fix parsing extra plugin data files in other bundles - * Fix lilv_ui_is_supported when Lilv is built independently - -- David Robillard Sat, 11 Jun 2011 11:20:11 -0400 + -- David Robillard Sat, 11 Jun 2011 15:20:11 +0000 lilv (0.4.2) stable; - * Fix compilation issues on some systems * Fix build system Python 3 compatibility + * Fix compilation issues on some systems - -- David Robillard Wed, 25 May 2011 19:00:00 -0400 + -- David Robillard Wed, 25 May 2011 23:00:00 +0000 lilv (0.4.0) stable; * Initial version (forked from SLV2) - -- David Robillard Tue, 24 May 2011 23:00:00 -0400 + -- David Robillard Wed, 25 May 2011 03:00:00 +0000 diff -Nru lilv-0.24.4~dfsg0/README lilv-0.24.6/README --- lilv-0.24.4~dfsg0/README 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -Lilv -==== - -Lilv is a C library to make the use of LV2 plugins as simple as possible for -applications. -For more information, see . - - -- David Robillard diff -Nru lilv-0.24.4~dfsg0/README.md lilv-0.24.6/README.md --- lilv-0.24.4~dfsg0/README.md 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/README.md 2019-10-19 21:34:09.000000000 +0000 @@ -0,0 +1,8 @@ +Lilv +==== + +Lilv is a C library to make the use of LV2 plugins as simple as possible for +applications. +For more information, see . + + -- David Robillard diff -Nru lilv-0.24.4~dfsg0/src/collections.c lilv-0.24.6/src/collections.c --- lilv-0.24.4~dfsg0/src/collections.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/collections.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2008-2014 David Robillard + Copyright 2008-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -16,6 +16,15 @@ #include "lilv_internal.h" +#include "lilv/lilv.h" +#include "sord/sord.h" +#include "zix/common.h" +#include "zix/tree.h" + +#include +#include +#include + int lilv_ptr_cmp(const void* a, const void* b, void* user_data) { diff -Nru lilv-0.24.4~dfsg0/src/instance.c lilv-0.24.6/src/instance.c --- lilv-0.24.4~dfsg0/src/instance.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/instance.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,12 +14,17 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "lv2/core/lv2.h" + +#include +#include #include #include #include -#include "lilv_internal.h" - LILV_API LilvInstance* lilv_plugin_instantiate(const LilvPlugin* plugin, double sample_rate, @@ -42,7 +47,7 @@ LilvLib* lib = lilv_lib_open(plugin->world, lib_uri, bundle_path, features); if (!lib) { - lilv_free(bundle_path); + serd_free(bundle_path); return NULL; } @@ -76,7 +81,7 @@ } free(local_features); - lilv_free(bundle_path); + serd_free(bundle_path); if (result) { if (result->lv2_handle == NULL) { diff -Nru lilv-0.24.4~dfsg0/src/lib.c lilv-0.24.6/src/lib.c --- lilv-0.24.4~dfsg0/src/lib.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/src/lib.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2012-2016 David Robillard + Copyright 2012-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -16,6 +16,13 @@ #include "lilv_internal.h" +#include "lilv/lilv.h" +#include "lv2/core/lv2.h" +#include "zix/tree.h" + +#include +#include + LilvLib* lilv_lib_open(LilvWorld* world, const LilvNode* uri, @@ -33,7 +40,8 @@ } const char* const lib_uri = lilv_node_as_uri(uri); - char* const lib_path = lilv_file_uri_parse(lib_uri, NULL); + char* const lib_path = (char*)serd_file_uri_parse( + (const uint8_t*)lib_uri, NULL); if (!lib_path) { return NULL; } @@ -42,7 +50,7 @@ void* lib = dlopen(lib_path, RTLD_NOW); if (!lib) { LILV_ERRORF("Failed to open library %s (%s)\n", lib_path, dlerror()); - lilv_free(lib_path); + serd_free(lib_path); return NULL; } @@ -58,17 +66,17 @@ if (!desc) { LILV_ERRORF("Call to %s:lv2_lib_descriptor failed\n", lib_path); dlclose(lib); - lilv_free(lib_path); + serd_free(lib_path); return NULL; } } else if (!df) { LILV_ERRORF("No `lv2_descriptor' or `lv2_lib_descriptor' in %s\n", lib_path); dlclose(lib); - lilv_free(lib_path); + serd_free(lib_path); return NULL; } - lilv_free(lib_path); + serd_free(lib_path); LilvLib* llib = (LilvLib*)malloc(sizeof(LilvLib)); llib->world = world; diff -Nru lilv-0.24.4~dfsg0/src/lilv_internal.h lilv-0.24.6/src/lilv_internal.h --- lilv-0.24.4~dfsg0/src/lilv_internal.h 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/lilv_internal.h 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -21,10 +21,17 @@ extern "C" { #endif +#include "lilv_config.h" + +#include "lilv/lilv.h" +#include "serd/serd.h" +#include "sord/sord.h" +#include "zix/tree.h" + +#include #include #include #include -#include #ifdef _WIN32 # include @@ -52,16 +59,8 @@ # include #endif -#include "serd/serd.h" -#include "sord/sord.h" - -#include "zix/tree.h" - -#include "lilv_config.h" -#include "lilv/lilv.h" - #ifdef LILV_DYN_MANIFEST -# include "lv2/lv2plug.in/ns/ext/dynmanifest/dynmanifest.h" +# include "lv2/dynmanifest/dynmanifest.h" #endif /* @@ -147,8 +146,9 @@ }; typedef struct { - bool dyn_manifest; - bool filter_language; + bool dyn_manifest; + bool filter_language; + char* lv2_path; } LilvOptions; struct LilvWorldImpl { @@ -383,7 +383,7 @@ char* lilv_expand(const char* path); char* lilv_dirname(const char* path); int lilv_copy_file(const char* src, const char* dst); -bool lilv_path_exists(const char* path, void* ignored); +bool lilv_path_exists(const char* path, const void* ignored); char* lilv_path_absolute(const char* path); bool lilv_path_is_absolute(const char* path); char* lilv_get_latest_copy(const char* path, const char* copy_path); @@ -398,7 +398,8 @@ char* lilv_find_free_path(const char* in_path, - bool (*exists)(const char*, void*), void* user_data); + bool (*exists)(const char*, const void*), + const void* user_data); void lilv_dir_for_each(const char* path, diff -Nru lilv-0.24.4~dfsg0/src/node.c lilv-0.24.6/src/node.c --- lilv-0.24.4~dfsg0/src/node.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/node.c 2019-10-21 11:53:03.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,13 +14,19 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "serd/serd.h" +#include "sord/sord.h" + #include +#include +#include #include #include #include -#include "lilv_internal.h" - static void lilv_node_set_numerics_from_string(LilvNode* val) { @@ -277,11 +283,13 @@ break; case LILV_VALUE_INT: node = serd_node_new_integer(value->val.int_val); - result = (char*)node.buf; + result = lilv_strdup((char*)node.buf); + serd_node_free(&node); break; case LILV_VALUE_FLOAT: node = serd_node_new_decimal(value->val.float_val, 8); - result = (char*)node.buf; + result = lilv_strdup((char*)node.buf); + serd_node_free(&node); break; } diff -Nru lilv-0.24.4~dfsg0/src/plugin.c lilv-0.24.6/src/plugin.c --- lilv-0.24.4~dfsg0/src/plugin.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/plugin.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -16,19 +16,30 @@ #define __STDC_LIMIT_MACROS -#include +#include "lilv_config.h" +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "serd/serd.h" +#include "sord/sord.h" +#include "zix/tree.h" + +#include "lv2/core/lv2.h" +#include "lv2/ui/ui.h" + +#ifdef LILV_DYN_MANIFEST +# include "lv2/dynmanifest/dynmanifest.h" +# include +#endif + #include #include +#include #include #include #include #include -#include "lilv_config.h" -#include "lilv_internal.h" - -#include "lv2/lv2plug.in/ns/extensions/ui/ui.h" - #define NS_DOAP (const uint8_t*)"http://usefulinc.com/ns/doap#" #define NS_FOAF (const uint8_t*)"http://xmlns.com/foaf/0.1/" @@ -720,10 +731,19 @@ LILV_API uint32_t lilv_plugin_get_latency_port_index(const LilvPlugin* plugin) { + LilvNode* lv2_OutputPort = + lilv_new_uri(plugin->world, LV2_CORE__OutputPort); + LilvNode* lv2_latency = + lilv_new_uri(plugin->world, LV2_CORE__latency); + const LilvPort* prop_port = lilv_plugin_get_port_by_property( plugin, plugin->world->uris.lv2_reportsLatency); - const LilvPort* des_port = lilv_plugin_get_port_by_property( - plugin, plugin->world->uris.lv2_latency); + const LilvPort* des_port = lilv_plugin_get_port_by_designation( + plugin, lv2_OutputPort, lv2_latency); + + lilv_node_free(lv2_latency); + lilv_node_free(lv2_OutputPort); + if (prop_port) { return prop_port->index; } else if (des_port) { diff -Nru lilv-0.24.4~dfsg0/src/pluginclass.c lilv-0.24.6/src/pluginclass.c --- lilv-0.24.4~dfsg0/src/pluginclass.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/pluginclass.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2015 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,11 +14,15 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include -#include - #include "lilv_internal.h" +#include "lilv/lilv.h" +#include "sord/sord.h" +#include "zix/tree.h" + +#include +#include + LilvPluginClass* lilv_plugin_class_new(LilvWorld* world, const SordNode* parent_node, diff -Nru lilv-0.24.4~dfsg0/src/port.c lilv-0.24.6/src/port.c --- lilv-0.24.4~dfsg0/src/port.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/port.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2014 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,16 +14,21 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "lilv_internal.h" + +#include "lv2/atom/atom.h" +#include "lv2/core/lv2.h" +#include "lv2/event/event.h" + +#include "lilv/lilv.h" +#include "sord/sord.h" +#include "zix/tree.h" + #include -#include +#include +#include #include #include -#include - -#include "lv2/lv2plug.in/ns/ext/atom/atom.h" -#include "lv2/lv2plug.in/ns/ext/event/event.h" - -#include "lilv_internal.h" LilvPort* lilv_port_new(LilvWorld* world, diff -Nru lilv-0.24.4~dfsg0/src/query.c lilv-0.24.6/src/query.c --- lilv-0.24.4~dfsg0/src/query.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/query.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2015 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,12 +14,15 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "sord/sord.h" +#include "zix/tree.h" + #include #include -#include "lilv_internal.h" - typedef enum { LILV_LANG_MATCH_NONE, ///< Language does not match at all LILV_LANG_MATCH_PARTIAL, ///< Partial (language, but not country) match diff -Nru lilv-0.24.4~dfsg0/src/scalepoint.c lilv-0.24.6/src/scalepoint.c --- lilv-0.24.4~dfsg0/src/scalepoint.c 2018-07-10 19:55:39.000000000 +0000 +++ lilv-0.24.6/src/scalepoint.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2014 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -16,6 +16,10 @@ #include "lilv_internal.h" +#include "lilv/lilv.h" + +#include + /** Ownership of value and label is taken */ LilvScalePoint* lilv_scale_point_new(LilvNode* value, LilvNode* label) diff -Nru lilv-0.24.4~dfsg0/src/state.c lilv-0.24.6/src/state.c --- lilv-0.24.4~dfsg0/src/state.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/state.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2017 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,19 +14,29 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "serd/serd.h" +#include "sord/sord.h" +#include "sratom/sratom.h" +#include "zix/tree.h" + +#include "lv2/atom/atom.h" +#include "lv2/atom/forge.h" +#include "lv2/core/lv2.h" +#include "lv2/presets/presets.h" +#include "lv2/state/state.h" +#include "lv2/urid/urid.h" + +#include #include +#include +#include #include +#include #include -#include "lv2/lv2plug.in/ns/ext/atom/atom.h" -#include "lv2/lv2plug.in/ns/ext/atom/forge.h" -#include "lv2/lv2plug.in/ns/ext/presets/presets.h" -#include "lv2/lv2plug.in/ns/ext/state/state.h" - -#include "lilv_config.h" -#include "lilv_internal.h" -#include "sratom/sratom.h" - #define USTR(s) ((const uint8_t*)(s)) typedef struct { @@ -38,10 +48,8 @@ } Property; typedef struct { - char* symbol; ///< Symbol of port - void* value; ///< Value of port - uint32_t size; ///< Size of value - uint32_t type; ///< Type of value (URID) + char* symbol; ///< Symbol of port + LV2_Atom* atom; ///< Value in port } PortValue; typedef struct { @@ -58,7 +66,7 @@ LilvNode* plugin_uri; ///< Plugin URI LilvNode* uri; ///< State/preset URI char* dir; ///< Save directory (if saved) - char* file_dir; ///< Directory for files created by plugin + char* scratch_dir; ///< Directory for files created by plugin char* copy_dir; ///< Directory for snapshots of external files char* link_dir; ///< Directory for links to external files char* label; ///< State/Preset label @@ -115,12 +123,13 @@ if (value) { state->values = (PortValue*)realloc( state->values, (++state->n_values) * sizeof(PortValue)); - pv = &state->values[state->n_values - 1]; - pv->symbol = lilv_strdup(port_symbol); - pv->value = malloc(size); - pv->size = size; - pv->type = type; - memcpy(pv->value, value, size); + + pv = &state->values[state->n_values - 1]; + pv->symbol = lilv_strdup(port_symbol); + pv->atom = (LV2_Atom*)malloc(sizeof(LV2_Atom) + size); + pv->atom->size = size; + pv->atom->type = type; + memcpy(pv->atom + 1, value, size); } return pv; } @@ -162,6 +171,18 @@ prop->flags = flags; } +static const Property* +find_property(const LilvState* const state, const uint32_t key) +{ + const Property search_key = {NULL, 0, key, 0, 0}; + + return (const Property*)bsearch(&search_key, + state->props.props, + state->props.n, + sizeof(Property), + property_cmp); +} + static LV2_State_Status store_callback(LV2_State_Handle handle, uint32_t key, @@ -171,6 +192,15 @@ uint32_t flags) { LilvState* const state = (LilvState*)handle; + + if (!key) { + return LV2_STATE_ERR_UNKNOWN; // TODO: Add status for bad arguments + } + + if (find_property((const LilvState*)handle, key)) { + return LV2_STATE_ERR_UNKNOWN; // TODO: Add status for duplicate keys + } + append_property(state, &state->props, key, value, size, type, flags); return LV2_STATE_SUCCESS; } @@ -182,11 +212,7 @@ uint32_t* type, uint32_t* flags) { - const LilvState* const state = (LilvState*)handle; - const Property search_key = { NULL, 0, key, 0, 0 }; - const Property* const prop = (Property*)bsearch( - &search_key, state->props.props, state->props.n, - sizeof(Property), property_cmp); + const Property* const prop = find_property((const LilvState*)handle, key); if (prop) { *size = prop->size; @@ -198,9 +224,9 @@ } static bool -lilv_state_has_path(const char* path, void* state) +lilv_state_has_path(const char* path, const void* state) { - return lilv_state_rel2abs((LilvState*)state, path) != path; + return lilv_state_rel2abs((const LilvState*)state, path) != path; } static char* @@ -232,9 +258,9 @@ } else if (lilv_path_is_child(real_path, state->dir)) { // File in state directory (loaded, or created by plugin during save) path = lilv_path_relative_to(real_path, state->dir); - } else if (lilv_path_is_child(real_path, state->file_dir)) { + } else if (lilv_path_is_child(real_path, state->scratch_dir)) { // File created by plugin earlier - path = lilv_path_relative_to(real_path, state->file_dir); + path = lilv_path_relative_to(real_path, state->scratch_dir); if (state->copy_dir) { int st = lilv_mkdir_p(state->copy_dir); if (st) { @@ -347,7 +373,7 @@ lilv_state_new_from_instance(const LilvPlugin* plugin, LilvInstance* instance, LV2_URID_Map* map, - const char* file_dir, + const char* scratch_dir, const char* copy_dir, const char* link_dir, const char* save_dir, @@ -359,14 +385,14 @@ const LV2_Feature** sfeatures = NULL; LilvWorld* const world = plugin->world; LilvState* const state = (LilvState*)calloc(1, sizeof(LilvState)); - state->plugin_uri = lilv_node_duplicate(lilv_plugin_get_uri(plugin)); - state->abs2rel = zix_tree_new(false, abs_cmp, NULL, path_rel_free); - state->rel2abs = zix_tree_new(false, rel_cmp, NULL, NULL); - state->file_dir = file_dir ? absolute_dir(file_dir) : NULL; - state->copy_dir = copy_dir ? absolute_dir(copy_dir) : NULL; - state->link_dir = link_dir ? absolute_dir(link_dir) : NULL; - state->dir = save_dir ? absolute_dir(save_dir) : NULL; - state->atom_Path = map->map(map->handle, LV2_ATOM__Path); + state->plugin_uri = lilv_node_duplicate(lilv_plugin_get_uri(plugin)); + state->abs2rel = zix_tree_new(false, abs_cmp, NULL, path_rel_free); + state->rel2abs = zix_tree_new(false, rel_cmp, NULL, NULL); + state->scratch_dir = scratch_dir ? absolute_dir(scratch_dir) : NULL; + state->copy_dir = copy_dir ? absolute_dir(copy_dir) : NULL; + state->link_dir = link_dir ? absolute_dir(link_dir) : NULL; + state->dir = save_dir ? absolute_dir(save_dir) : NULL; + state->atom_Path = map->map(map->handle, LV2_ATOM__Path); LV2_State_Map_Path pmap = { state, abstract_path, absolute_path }; LV2_Feature pmap_feature = { LV2_STATE__mapPath, &pmap }; @@ -424,8 +450,9 @@ void* user_data) { for (uint32_t i = 0; i < state->n_values; ++i) { - const PortValue* val = &state->values[i]; - set_value(val->symbol, user_data, val->value, val->size, val->type); + const PortValue* value = &state->values[i]; + const LV2_Atom* atom = value->atom; + set_value(value->symbol, user_data, atom + 1, atom->size, atom->type); } } @@ -470,6 +497,16 @@ } } +static void +set_state_dir_from_model(LilvState* state, const SordNode* graph) +{ + if (!state->dir && graph) { + const char* uri = (const char*)sord_node_get_string(graph); + state->dir = lilv_file_uri_parse(uri, NULL); + } + assert(!state->dir || lilv_path_is_absolute(state->dir)); +} + static LilvState* new_state_from_model(LilvWorld* world, LV2_URID_Map* map, @@ -494,9 +531,7 @@ const SordNode* object = sord_iter_get_node(i, SORD_OBJECT); const SordNode* graph = sord_iter_get_node(i, SORD_GRAPH); state->plugin_uri = lilv_node_new_from_node(world, object); - if (!state->dir && graph) { - state->dir = lilv_strdup((const char*)sord_node_get_string(graph)); - } + set_state_dir_from_model(state, graph); sord_iter_free(i); } else if (sord_ask(model, node, @@ -515,9 +550,7 @@ const SordNode* object = sord_iter_get_node(i, SORD_OBJECT); const SordNode* graph = sord_iter_get_node(i, SORD_GRAPH); state->label = lilv_strdup((const char*)sord_node_get_string(object)); - if (!state->dir && graph) { - state->dir = lilv_strdup((const char*)sord_node_get_string(graph)); - } + set_state_dir_from_model(state, graph); sord_iter_free(i); } @@ -602,7 +635,7 @@ sord_node_free(world->world, state_node); sord_node_free(world->world, statep); - free((void*)chunk.buf); + serd_free((void*)chunk.buf); sratom_free(sratom); if (state->props.props) { @@ -872,7 +905,7 @@ } static bool -link_exists(const char* path, void* data) +link_exists(const char* path, const void* data) { const char* target = (const char*)data; if (!lilv_path_exists(path, NULL)) { @@ -884,6 +917,12 @@ return !matches; } +static int +maybe_symlink(const char* oldpath, const char* newpath) +{ + return link_exists(newpath, oldpath) ? 0 : lilv_symlink(oldpath, newpath); +} + static void write_property_array(const LilvState* state, const PropertyArray* array, @@ -988,7 +1027,7 @@ // _:symbol pset:value value p = serd_node_from_string(SERD_URI, USTR(LV2_PRESETS__value)); sratom_write(sratom, unmap, SERD_ANON_CONT, &port, &p, - value->type, value->size, value->value); + value->atom->type, value->atom->size, value->atom + 1); serd_writer_end_anon(writer, &port); } @@ -1026,7 +1065,7 @@ && strcmp(state->copy_dir, dir)) { // Link directly to snapshot in the copy directory char* target = lilv_path_relative_to(pm->abs, dir); - lilv_symlink(target, path); + maybe_symlink(target, path); free(target); } else if (!lilv_path_is_child(pm->abs, dir)) { const char* link_dir = state->link_dir ? state->link_dir : dir; @@ -1034,7 +1073,7 @@ if (!strcmp(dir, link_dir)) { // Link directory is save directory, make link at exact path remove(pat); - lilv_symlink(pm->abs, pat); + maybe_symlink(pm->abs, pat); } else { // Make a link in the link directory to external file char* lpath = lilv_find_free_path(pat, link_exists, pm->abs); @@ -1044,7 +1083,7 @@ // Make a link in the save directory to the external link char* target = lilv_path_relative_to(lpath, dir); - lilv_symlink(target, path); + maybe_symlink(target, path); free(target); free(lpath); } @@ -1089,10 +1128,9 @@ world, map, unmap, state, ttl, (const char*)node.buf, dir); // Set saved dir and uri (FIXME: const violation) - SerdNode dir_uri = serd_node_new_file_uri(USTR(abs_dir), NULL, NULL, true); free(state->dir); lilv_node_free(state->uri); - ((LilvState*)state)->dir = (char*)dir_uri.buf; + ((LilvState*)state)->dir = lilv_strdup(abs_dir); ((LilvState*)state)->uri = lilv_new_uri(world, (const char*)node.buf); serd_node_free(&file); @@ -1132,7 +1170,18 @@ serd_writer_free(writer); serd_env_free(env); - return (char*)serd_chunk_sink_finish(&chunk); + char* str = (char*)serd_chunk_sink_finish(&chunk); + char* result = lilv_strdup(str); + serd_free(str); + return result; +} + +static void +try_unlink(const char* path) +{ + if (unlink(path)) { + LILV_ERRORF("Failed to remove %s (%s)\n", path, strerror(errno)); + } } LILV_API int @@ -1144,7 +1193,7 @@ return -1; } - LilvNode* bundle = lilv_new_uri(world, state->dir); + LilvNode* bundle = lilv_new_file_uri(world, NULL, state->dir); LilvNode* manifest = lilv_world_get_manifest_uri(world, bundle); char* manifest_path = lilv_node_get_path(manifest, NULL); SordModel* model = sord_new(world->world, SORD_SPO, false); @@ -1162,12 +1211,10 @@ model, state->uri->node, world->uris.rdfs_seeAlso, NULL, NULL); if (file) { // Remove state file - char* path = lilv_file_uri_parse( - (const char*)sord_node_get_string(file), NULL); - if (unlink(path)) { - LILV_ERRORF("Failed to remove %s (%s)\n", path, strerror(errno)); - } - lilv_free(path); + char* path = + (char*)serd_file_uri_parse(sord_node_get_string(file), NULL); + try_unlink(path); + serd_free(path); } // Remove any existing manifest entries for this state @@ -1181,16 +1228,22 @@ if (sord_num_quads(model) == 0) { // Manifest is empty, attempt to remove bundle entirely - if (unlink(manifest_path)) { - LILV_ERRORF("Failed to remove %s (%s)\n", - manifest_path, strerror(errno)); + try_unlink(manifest_path); + + // Remove all known files from state bundle + for (ZixTreeIter* i = zix_tree_begin(state->abs2rel); + i != zix_tree_end(state->abs2rel); + i = zix_tree_iter_next(i)) { + const PathMap* pm = (const PathMap*)zix_tree_get(i); + char* path = lilv_path_join(state->dir, pm->rel); + try_unlink(path); + free(path); } - char* dir_path = lilv_file_uri_parse(state->dir, NULL); - if (rmdir(dir_path)) { - LILV_ERRORF("Failed to remove %s (%s)\n", - dir_path, strerror(errno)); + + if (rmdir(state->dir)) { + LILV_ERRORF("Failed to remove directory %s (%s)\n", + state->dir, strerror(errno)); } - lilv_free(dir_path); } else { // Still something in the manifest, reload bundle lilv_world_load_bundle(world, bundle); @@ -1224,7 +1277,7 @@ free_property_array(state, &state->props); free_property_array(state, &state->metadata); for (uint32_t i = 0; i < state->n_values; ++i) { - free(state->values[i].value); + free(state->values[i].atom); free(state->values[i].symbol); } lilv_node_free(state->plugin_uri); @@ -1234,7 +1287,7 @@ free(state->values); free(state->label); free(state->dir); - free(state->file_dir); + free(state->scratch_dir); free(state->copy_dir); free(state->link_dir); free(state); @@ -1256,9 +1309,10 @@ for (uint32_t i = 0; i < a->n_values; ++i) { PortValue* const av = &a->values[i]; PortValue* const bv = &b->values[i]; - if (av->size != bv->size || av->type != bv->type - || strcmp(av->symbol, bv->symbol) - || memcmp(av->value, bv->value, av->size)) { + if (av->atom->size != bv->atom->size || + av->atom->type != bv->atom->type || + strcmp(av->symbol, bv->symbol) || + memcmp(av->atom + 1, bv->atom + 1, av->atom->size)) { return false; } } diff -Nru lilv-0.24.4~dfsg0/src/ui.c lilv-0.24.6/src/ui.c --- lilv-0.24.4~dfsg0/src/ui.c 2018-07-10 19:55:39.000000000 +0000 +++ lilv-0.24.6/src/ui.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2014 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,12 +14,16 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "zix/tree.h" + #include +#include #include #include -#include "lilv_internal.h" - LilvUI* lilv_ui_new(LilvWorld* world, LilvNode* uri, diff -Nru lilv-0.24.4~dfsg0/src/util.c lilv-0.24.6/src/util.c --- lilv-0.24.4~dfsg0/src/util.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/util.c 2019-10-21 11:53:03.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -22,13 +22,11 @@ # define _DARWIN_C_SOURCE 1 /* for flock */ #endif -#include -#include -#include -#include -#include -#include -#include +#include "lilv_config.h" +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "serd/serd.h" #ifdef _WIN32 #ifndef _WIN32_WINNT @@ -39,9 +37,12 @@ # include # define F_OK 0 # define mkdir(path, flags) _mkdir(path) -# if defined(_MSC_VER) && _MSC_VER <= 1400 +# if (defined(_MSC_VER) && _MSC_VER <= 1400) || defined(__MINGW64__) || defined(__MINGW32__) /** Implement 'CreateSymbolicLink()' for MSVC 8 or earlier */ -extern "C" BOOLEAN WINAPI +#ifdef __cplusplus +extern "C" +#endif +static BOOLEAN WINAPI CreateSymbolicLink(LPCTSTR linkpath, LPCTSTR targetpath, DWORD flags) { typedef BOOLEAN (WINAPI* PFUNC)(LPCTSTR, LPCTSTR, DWORD); @@ -53,19 +54,26 @@ # endif #else # include -# include # include #endif -#include -#include - -#include "lilv_internal.h" - #if defined(HAVE_FLOCK) && defined(HAVE_FILENO) # include #endif +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + #ifndef PAGE_SIZE # define PAGE_SIZE 4096 #endif @@ -172,6 +180,8 @@ return lang; } +#ifndef _WIN32 + /** Append suffix to dst, update dst_len, and return the realloc'd result. */ static char* strappend(char* dst, size_t* dst_len, const char* suffix, size_t suffix_len) @@ -196,6 +206,8 @@ } } +#endif + /** Expand variables (e.g. POSIX ~ or $FOO, Windows %FOO%) in `path`. */ char* lilv_expand(const char* path) @@ -270,7 +282,7 @@ } bool -lilv_path_exists(const char* path, void* ignored) +lilv_path_exists(const char* path, const void* ignored) { #ifdef HAVE_LSTAT struct stat st; @@ -282,7 +294,8 @@ char* lilv_find_free_path(const char* in_path, - bool (*exists)(const char*, void*), void* user_data) + bool (*exists)(const char*, const void*), + const void* user_data) { const size_t in_path_len = strlen(in_path); char* path = (char*)malloc(in_path_len + 7); @@ -333,6 +346,13 @@ return st; } +static inline bool +is_windows_path(const char* path) +{ + return (isalpha(path[0]) && (path[1] == ':' || path[1] == '|') && + (path[2] == '/' || path[2] == '\\')); +} + bool lilv_path_is_absolute(const char* path) { @@ -341,7 +361,7 @@ } #ifdef _WIN32 - if (isalpha(path[0]) && path[1] == ':' && lilv_is_dir_sep(path[2])) { + if (is_windows_path(path)) { return true; } #endif @@ -443,19 +463,9 @@ char* out = (char*)malloc(MAX_PATH); GetFullPathName(path, MAX_PATH, out, NULL); return out; -#elif _POSIX_VERSION >= 200809L +#else char* real_path = realpath(path, NULL); return real_path ? real_path : lilv_strdup(path); -#else - // OSX <= 10.5, if anyone cares. I sure don't. - char* out = (char*)malloc(PATH_MAX); - char* real_path = realpath(path, out); - if (!real_path) { - free(out); - return lilv_strdup(path); - } else { - return real_path; - } #endif } @@ -572,14 +582,23 @@ { char* path = lilv_strdup(dir_path); const size_t path_len = strlen(path); - for (size_t i = 1; i <= path_len; ++i) { - if (path[i] == LILV_DIR_SEP[0] || path[i] == '\0') { + size_t i = 1; + +#ifdef _WIN32 + if (is_windows_path(dir_path)) { + i = 3; + } +#endif + + for (; i <= path_len; ++i) { + const char c = path[i]; + if (c == LILV_DIR_SEP[0] || c == '/' || c == '\0') { path[i] = '\0'; if (mkdir(path, 0755) && errno != EEXIST) { free(path); return errno; } - path[i] = LILV_DIR_SEP[0]; + path[i] = c; } } diff -Nru lilv-0.24.4~dfsg0/src/world.c lilv-0.24.6/src/world.c --- lilv-0.24.4~dfsg0/src/world.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/src/world.c 2019-11-10 14:26:13.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,14 +14,29 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "lilv_config.h" +#include "lilv_internal.h" + +#include "lilv/lilv.h" +#include "serd/serd.h" +#include "sord/sord.h" +#include "zix/common.h" +#include "zix/tree.h" + +#include "lv2/core/lv2.h" +#include "lv2/presets/presets.h" + +#ifdef LILV_DYN_MANIFEST +# include "lv2/dynmanifest/dynmanifest.h" +# include +#endif + #include -#include +#include #include #include - -#include "lv2/lv2plug.in/ns/ext/presets/presets.h" - -#include "lilv_internal.h" +#include +#include static int lilv_world_drop_graph(LilvWorld* world, const SordNode* graph); @@ -29,7 +44,7 @@ LILV_API LilvWorld* lilv_world_new(void) { - LilvWorld* world = (LilvWorld*)malloc(sizeof(LilvWorld)); + LilvWorld* world = (LilvWorld*)calloc(1, sizeof(LilvWorld)); world->world = sord_world_new(); if (!world->world) { @@ -163,6 +178,7 @@ sord_world_free(world->world); world->world = NULL; + free(world->opt.lv2_path); free(world); } @@ -181,6 +197,11 @@ world->opt.filter_language = lilv_node_as_bool(value); return; } + } else if (!strcmp(uri, LILV_OPTION_LV2_PATH)) { + if (lilv_node_is_string(value)) { + world->opt.lv2_path = lilv_strdup(lilv_node_as_string(value)); + return; + } } LILV_WARNF("Unrecognized or invalid option `%s'\n", uri); } @@ -556,7 +577,7 @@ continue; } - LilvDynManifest* desc = malloc(sizeof(LilvDynManifest)); + LilvDynManifest* desc = (LilvDynManifest*)malloc(sizeof(LilvDynManifest)); desc->bundle = lilv_node_new_from_node(world, bundle_node); desc->lib = lib; desc->handle = handle; @@ -1016,7 +1037,10 @@ LILV_API void lilv_world_load_all(LilvWorld* world) { - const char* lv2_path = getenv("LV2_PATH"); + const char* lv2_path = world->opt.lv2_path; + if (!lv2_path) { + lv2_path = getenv("LV2_PATH"); + } if (!lv2_path) { lv2_path = LILV_DEFAULT_LV2_PATH; } diff -Nru lilv-0.24.4~dfsg0/src/zix/tree.c lilv-0.24.6/src/zix/tree.c --- lilv-0.24.4~dfsg0/src/zix/tree.c 2016-09-19 01:02:26.000000000 +0000 +++ lilv-0.24.6/src/zix/tree.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2011-2014 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,15 +14,14 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "zix/common.h" +#include "zix/tree.h" + #include #include -#include #include #include -#include "zix/common.h" -#include "zix/tree.h" - typedef struct ZixTreeNodeImpl ZixTreeNode; struct ZixTreeImpl { diff -Nru lilv-0.24.4~dfsg0/src/zix/tree.h lilv-0.24.6/src/zix/tree.h --- lilv-0.24.4~dfsg0/src/zix/tree.h 2016-09-19 01:02:26.000000000 +0000 +++ lilv-0.24.6/src/zix/tree.h 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2011-2014 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -17,10 +17,11 @@ #ifndef ZIX_TREE_H #define ZIX_TREE_H -#include - #include "zix/common.h" +#include +#include + #ifdef __cplusplus extern "C" { #endif diff -Nru lilv-0.24.4~dfsg0/test/bad_syntax.lv2/bad_syntax.c lilv-0.24.6/test/bad_syntax.lv2/bad_syntax.c --- lilv-0.24.4~dfsg0/test/bad_syntax.lv2/bad_syntax.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/test/bad_syntax.lv2/bad_syntax.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Bad syntax in plugin data file - Copyright 2011-2016 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/bad-syntax" diff -Nru lilv-0.24.4~dfsg0/test/bad_syntax.lv2/test_bad_syntax.c lilv-0.24.6/test/bad_syntax.lv2/test_bad_syntax.c --- lilv-0.24.4~dfsg0/test/bad_syntax.lv2/test_bad_syntax.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/bad_syntax.lv2/test_bad_syntax.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/bad-syntax" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/core.lv2/lv2core.ttl lilv-0.24.6/test/core.lv2/lv2core.ttl --- lilv-0.24.4~dfsg0/test/core.lv2/lv2core.ttl 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/test/core.lv2/lv2core.ttl 2019-10-19 21:34:09.000000000 +0000 @@ -0,0 +1,30 @@ +@prefix lv2: . +@prefix owl: . +@prefix rdfs: . + + + a owl:Ontology . + +lv2:PluginBase + a rdfs:Class , + owl:Class ; + rdfs:label "Plugin Base" . + +lv2:Plugin + a rdfs:Class , + owl:Class ; + rdfs:subClassOf lv2:PluginBase ; + rdfs:label "Plugin" . + +lv2:DynamicsPlugin + a rdfs:Class , + owl:Class ; + rdfs:subClassOf lv2:Plugin ; + rdfs:label "Dynamics" . + +lv2:CompressorPlugin + a rdfs:Class , + owl:Class ; + rdfs:subClassOf lv2:DynamicsPlugin ; + rdfs:label "Compressor" . + diff -Nru lilv-0.24.4~dfsg0/test/core.lv2/manifest.ttl lilv-0.24.6/test/core.lv2/manifest.ttl --- lilv-0.24.4~dfsg0/test/core.lv2/manifest.ttl 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/test/core.lv2/manifest.ttl 2019-10-19 21:34:09.000000000 +0000 @@ -0,0 +1,9 @@ +@prefix doap: . +@prefix lv2: . +@prefix rdfs: . + + + a lv2:Specification ; + lv2:minorVersion 16 ; + lv2:microVersion 0 ; + rdfs:seeAlso . diff -Nru lilv-0.24.4~dfsg0/test/failed_instantiation.lv2/failed_instantiation.c lilv-0.24.6/test/failed_instantiation.lv2/failed_instantiation.c --- lilv-0.24.4~dfsg0/test/failed_instantiation.lv2/failed_instantiation.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/test/failed_instantiation.lv2/failed_instantiation.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Failed instantiation - Copyright 2011-2016 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/failed-instantiation" diff -Nru lilv-0.24.4~dfsg0/test/failed_instantiation.lv2/test_failed_instantiation.c lilv-0.24.6/test/failed_instantiation.lv2/test_failed_instantiation.c --- lilv-0.24.4~dfsg0/test/failed_instantiation.lv2/test_failed_instantiation.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/failed_instantiation.lv2/test_failed_instantiation.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "lilv/lilv.h" +#include "serd/serd.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/failed-instantiation" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/failed_lib_descriptor.lv2/failed_lib_descriptor.c lilv-0.24.6/test/failed_lib_descriptor.lv2/failed_lib_descriptor.c --- lilv-0.24.4~dfsg0/test/failed_lib_descriptor.lv2/failed_lib_descriptor.c 2016-09-19 01:02:26.000000000 +0000 +++ lilv-0.24.6/test/failed_lib_descriptor.lv2/failed_lib_descriptor.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Failed lib descriptor - Copyright 2011-2015 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,9 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include #define PLUGIN_URI "http://example.org/failed-lib-descriptor" diff -Nru lilv-0.24.4~dfsg0/test/failed_lib_descriptor.lv2/test_failed_lib_descriptor.c lilv-0.24.6/test/failed_lib_descriptor.lv2/test_failed_lib_descriptor.c --- lilv-0.24.4~dfsg0/test/failed_lib_descriptor.lv2/test_failed_lib_descriptor.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/failed_lib_descriptor.lv2/test_failed_lib_descriptor.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/failed-lib-descriptor" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/lib_descriptor.lv2/lib_descriptor.c lilv-0.24.6/test/lib_descriptor.lv2/lib_descriptor.c --- lilv-0.24.4~dfsg0/test/lib_descriptor.lv2/lib_descriptor.c 2016-09-19 01:02:26.000000000 +0000 +++ lilv-0.24.6/test/lib_descriptor.lv2/lib_descriptor.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Missing descriptor - Copyright 2011-2015 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/lib-descriptor" diff -Nru lilv-0.24.4~dfsg0/test/lib_descriptor.lv2/test_lib_descriptor.c lilv-0.24.6/test/lib_descriptor.lv2/test_lib_descriptor.c --- lilv-0.24.4~dfsg0/test/lib_descriptor.lv2/test_lib_descriptor.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/lib_descriptor.lv2/test_lib_descriptor.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/lib-descriptor" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/lilv_cxx_test.cpp lilv-0.24.6/test/lilv_cxx_test.cpp --- lilv-0.24.4~dfsg0/test/lilv_cxx_test.cpp 2018-07-10 19:55:39.000000000 +0000 +++ lilv-0.24.6/test/lilv_cxx_test.cpp 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2017 David Robillard + Copyright 2017-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -19,5 +19,7 @@ int main() { + Lilv::World world; + return 0; } diff -Nru lilv-0.24.4~dfsg0/test/lilv_test.c lilv-0.24.6/test/lilv_test.c --- lilv-0.24.4~dfsg0/test/lilv_test.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/lilv_test.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Copyright 2008 Krzysztof Foltman Permission to use, copy, modify, and/or distribute this software for any @@ -18,16 +18,7 @@ #define _POSIX_C_SOURCE 200809L /* for setenv */ #define _XOPEN_SOURCE 600 /* for mkstemp */ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include +#include "../src/lilv_internal.h" #ifdef _WIN32 # include @@ -37,16 +28,27 @@ # define unsetenv(n) SetEnvironmentVariable((n), NULL) # define mkstemp(pat) _mktemp(pat) #else -# include # include #endif #include "lilv/lilv.h" -#include "../src/lilv_internal.h" +#include "lv2/core/lv2.h" +#include "lv2/presets/presets.h" +#include "lv2/state/state.h" +#include "lv2/urid/urid.h" +#include "serd/serd.h" -#include "lv2/lv2plug.in/ns/ext/presets/presets.h" -#include "lv2/lv2plug.in/ns/ext/state/state.h" -#include "lv2/lv2plug.in/ns/ext/urid/urid.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include #define TEST_PATH_MAX 1024 @@ -58,10 +60,10 @@ # define SHLIB_EXT ".so" #endif -static char bundle_dir_name[TEST_PATH_MAX]; -static char bundle_dir_uri[TEST_PATH_MAX]; -static char manifest_name[TEST_PATH_MAX]; -static char content_name[TEST_PATH_MAX]; +static char test_bundle_path[TEST_PATH_MAX + sizeof("/.lv2/lilv-test.lv2")]; +static char test_bundle_uri[sizeof(test_bundle_path) + sizeof("file:///")]; +static char test_manifest_path[sizeof(test_bundle_path) + sizeof("/manifest.ttl")]; +static char test_content_path[sizeof(test_bundle_path) + sizeof("plugin.ttl")]; static LilvWorld* world; @@ -71,32 +73,44 @@ static void delete_bundle(void) { - unlink(content_name); - unlink(manifest_name); - remove(bundle_dir_name); + unlink(test_content_path); + unlink(test_manifest_path); + remove(test_bundle_path); } static void init_tests(void) { - snprintf(bundle_dir_name, TEST_PATH_MAX, "%s/.lv2/lilv-test.lv2", - getenv("HOME")); - lilv_mkdir_p(bundle_dir_name); - - snprintf(bundle_dir_uri, TEST_PATH_MAX, "file://%s/", bundle_dir_name); - snprintf(manifest_name, TEST_PATH_MAX, "%s/manifest.ttl", bundle_dir_name); - snprintf(content_name, TEST_PATH_MAX, "%s/plugin.ttl", bundle_dir_name); + char* test_path = lilv_realpath(LILV_TEST_DIR); + + snprintf(test_bundle_path, sizeof(test_bundle_path), + "%s/test_lv2_path/lilv-test.lv2", test_path); + lilv_mkdir_p(test_bundle_path); + + SerdNode s = serd_node_new_file_uri( + (const uint8_t*)test_bundle_path, NULL, NULL, true); + + snprintf(test_bundle_uri, sizeof(test_bundle_uri), "%s/", + (const char*)s.buf); + snprintf(test_manifest_path, sizeof(test_manifest_path), "%s/manifest.ttl", + test_bundle_path); + snprintf(test_content_path, sizeof(test_content_path), "%s/plugin.ttl", + test_bundle_path); + + serd_node_free(&s); + lilv_free(test_path); delete_bundle(); } static void -fatal_error(const char* err, const char* arg) +fatal_error(const char* fmt, ...) { - /* TODO: possibly change to vfprintf later */ - fprintf(stderr, err, arg); - /* IMHO, the bundle should be left in place after an error, for possible investigation */ - /* delete_bundle(); */ + va_list args; + va_start(args, fmt); + fprintf(stderr, "error: "); + vfprintf(stderr, fmt, args); + va_end(args); exit(1); } @@ -106,7 +120,8 @@ FILE* f = fopen(name, "w"); size_t len = strlen(content); if (fwrite(content, 1, len, f) != len) { - fatal_error("Cannot write file %s\n", name); + fatal_error("Failed to write to file '%s' (%s)\n", + name, strerror(errno)); } fclose(f); } @@ -115,6 +130,16 @@ init_world(void) { world = lilv_world_new(); + + // Set custom LV2_PATH in build directory to only use test data + char* test_path = lilv_realpath(LILV_TEST_DIR); + char* lv2_path = lilv_strjoin(test_path, "/test_lv2_path", NULL); + LilvNode* path = lilv_new_string(world, lv2_path); + lilv_world_set_option(world, LILV_OPTION_LV2_PATH, path); + free(lv2_path); + free(test_path); + lilv_node_free(path); + return world != NULL; } @@ -131,11 +156,12 @@ static void create_bundle(const char* manifest, const char* content) { - if (mkdir(bundle_dir_name, 0700) && errno != EEXIST) { - fatal_error("Cannot create directory %s\n", bundle_dir_name); + if (mkdir(test_bundle_path, 0700) && errno != EEXIST) { + fatal_error("Failed to create directory '%s' (%s)\n", + test_bundle_path, strerror(errno)); } - write_file(manifest_name, manifest); - write_file(content_name, content); + write_file(test_manifest_path, manifest); + write_file(test_content_path, content); } static int @@ -160,6 +186,21 @@ delete_bundle(); } +static void +set_env(const char* name, const char* value) +{ +#ifdef _WIN32 + // setenv on Windows does not modify the current process' environment + const size_t len = strlen(name) + 1 + strlen(value) + 1; + char* str = (char*)calloc(1, len); + snprintf(str, len, "%s=%s", name, value); + putenv(str); + free(str); +#else + setenv(name, value, 1); +#endif +} + /*****************************************************************************/ #define TEST_CASE(name) { #name, test_##name } @@ -462,42 +503,6 @@ /*****************************************************************************/ static int -test_lv2_path(void) -{ -#ifndef _WIN32 - char* orig_lv2_path = lilv_strdup(getenv("LV2_PATH")); - - setenv("LV2_PATH", "~/.lv2:/usr/local/lib/lv2:/usr/lib/lv2", 1); - - world = lilv_world_new(); - lilv_world_load_all(world); - - const LilvPlugins* plugins = lilv_world_get_all_plugins(world); - const size_t n_plugins = lilv_plugins_size(plugins); - - lilv_world_free(world); - - setenv("LV2_PATH", "$HOME/.lv2:/usr/local/lib/lv2:/usr/lib/lv2", 1); - world = lilv_world_new(); - lilv_world_load_all(world); - plugins = lilv_world_get_all_plugins(world); - TEST_ASSERT(lilv_plugins_size(plugins) == n_plugins); - lilv_world_free(world); - world = NULL; - - if (orig_lv2_path) { - setenv("LV2_PATH", orig_lv2_path, 1); - } else { - unsetenv("LV2_PATH"); - } - free(orig_lv2_path); -#endif - return 1; -} - -/*****************************************************************************/ - -static int test_verify(void) { if (!start_bundle(MANIFEST_PREFIXES @@ -582,6 +587,8 @@ lilv_plugin_classes_free(children); + lilv_plugin_class_free(NULL); + cleanup_uris(); return 1; } @@ -646,7 +653,7 @@ TEST_ASSERT(!lilv_plugin_get_related(plug, NULL)); const LilvNode* plug_bundle_uri = lilv_plugin_get_bundle_uri(plug); - TEST_ASSERT(!strcmp(lilv_node_as_string(plug_bundle_uri), bundle_dir_uri)); + TEST_ASSERT(!strcmp(lilv_node_as_string(plug_bundle_uri), test_bundle_uri)); const LilvNodes* data_uris = lilv_plugin_get_data_uris(plug); TEST_ASSERT(lilv_nodes_size(data_uris) == 2); @@ -1092,7 +1099,7 @@ " a lv2:ControlPort ; a lv2:InputPort ; " " lv2:index 0 ; lv2:symbol \"foo\" ; " " lv2:name \"store\" ; " - " lv2:name \"dépanneur\"@fr-ca ; lv2:name \"épicerie\"@fr-fr ; " + " lv2:name \"Laden\"@de-de ; lv2:name \"Geschaeft\"@de-at ; " " lv2:name \"tienda\"@es ; " " rdfs:comment \"comment\"@en , \"commentaires\"@fr ; " " lv2:portProperty lv2:integer ; " @@ -1167,43 +1174,43 @@ lilv_node_free(name); // Exact language match - setenv("LANG", "fr_FR", 1); + set_env("LANG", "de_DE"); name = lilv_port_get_name(plug, p); - TEST_ASSERT(!strcmp(lilv_node_as_string(name), "épicerie")); + TEST_ASSERT(!strcmp(lilv_node_as_string(name), "Laden")); lilv_node_free(name); // Exact language match (with charset suffix) - setenv("LANG", "fr_CA.utf8", 1); + set_env("LANG", "de_AT.utf8"); name = lilv_port_get_name(plug, p); - TEST_ASSERT(!strcmp(lilv_node_as_string(name), "dépanneur")); + TEST_ASSERT(!strcmp(lilv_node_as_string(name), "Geschaeft")); lilv_node_free(name); // Partial language match (choose value translated for different country) - setenv("LANG", "fr_BE", 1); + set_env("LANG", "de_CH"); name = lilv_port_get_name(plug, p); - TEST_ASSERT((!strcmp(lilv_node_as_string(name), "dépanneur")) - ||(!strcmp(lilv_node_as_string(name), "épicerie"))); + TEST_ASSERT((!strcmp(lilv_node_as_string(name), "Laden")) + ||(!strcmp(lilv_node_as_string(name), "Geschaeft"))); lilv_node_free(name); // Partial language match (choose country-less language tagged value) - setenv("LANG", "es_MX", 1); + set_env("LANG", "es_MX"); name = lilv_port_get_name(plug, p); TEST_ASSERT(!strcmp(lilv_node_as_string(name), "tienda")); lilv_node_free(name); // No language match (choose untranslated value) - setenv("LANG", "cn", 1); + set_env("LANG", "cn"); name = lilv_port_get_name(plug, p); TEST_ASSERT(!strcmp(lilv_node_as_string(name), "store")); lilv_node_free(name); // Invalid language - setenv("LANG", "1!", 1); + set_env("LANG", "1!"); name = lilv_port_get_name(plug, p); TEST_ASSERT(!strcmp(lilv_node_as_string(name), "store")); lilv_node_free(name); - setenv("LANG", "en_CA.utf-8", 1); + set_env("LANG", "en_CA.utf-8"); // Language tagged value with no untranslated values LilvNode* rdfs_comment = lilv_new_uri(world, LILV_NS_RDFS "comment"); @@ -1215,14 +1222,14 @@ lilv_node_free(comment); lilv_nodes_free(comments); - setenv("LANG", "fr", 1); + set_env("LANG", "fr"); comments = lilv_port_get_value(plug, p, rdfs_comment); TEST_ASSERT(!strcmp(lilv_node_as_string(lilv_nodes_get_first(comments)), "commentaires")); lilv_nodes_free(comments); - setenv("LANG", "cn", 1); + set_env("LANG", "cn"); comments = lilv_port_get_value(plug, p, rdfs_comment); TEST_ASSERT(!comments); @@ -1230,7 +1237,7 @@ lilv_node_free(rdfs_comment); - setenv("LANG", "C", 1); // Reset locale + set_env("LANG", "C"); // Reset locale LilvScalePoints* points = lilv_port_get_scale_points(plug, p); TEST_ASSERT(lilv_scale_points_size(points) == 2); @@ -1475,6 +1482,8 @@ /*****************************************************************************/ +#ifndef _WIN32 + uint32_t atom_Float = 0; float in = 1.0; float out = 42.0; @@ -1604,21 +1613,21 @@ temp_dir = lilv_realpath("temp"); - const char* file_dir = NULL; - char* copy_dir = NULL; - char* link_dir = NULL; - char* save_dir = NULL; + const char* scratch_dir = NULL; + char* copy_dir = NULL; + char* link_dir = NULL; + char* save_dir = NULL; // Get instance state state LilvState* state = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, save_dir, + scratch_dir, copy_dir, link_dir, save_dir, get_port_value, world, 0, NULL); // Get another instance state LilvState* state2 = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, save_dir, + scratch_dir, copy_dir, link_dir, save_dir, get_port_value, world, 0, NULL); // Ensure they are equal @@ -1648,7 +1657,7 @@ // Ensure they are equal TEST_ASSERT(lilv_state_equals(state, from_str)); - free(state1_str); + lilv_free(state1_str); const LilvNode* state_plugin_uri = lilv_state_get_plugin_uri(state); TEST_ASSERT(lilv_node_equals(state_plugin_uri, plugin_uri)); @@ -1666,7 +1675,7 @@ lilv_instance_run(instance, 1); LilvState* state3 = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, save_dir, + scratch_dir, copy_dir, link_dir, save_dir, get_port_value, world, 0, NULL); TEST_ASSERT(!lilv_state_equals(state2, state3)); // num_runs changed @@ -1676,7 +1685,7 @@ // Take a new snapshot and ensure it matches the set state LilvState* state4 = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, save_dir, + scratch_dir, copy_dir, link_dir, save_dir, get_port_value, world, 0, NULL); TEST_ASSERT(lilv_state_equals(state2, state4)); @@ -1727,7 +1736,7 @@ lilv_world_load_bundle(world, test_state_bundle); lilv_world_load_resource(world, test_state_node); serd_node_free(&state6_uri); - free(state6_path); + lilv_free(state6_path); LilvState* state6 = lilv_state_new_from_world(world, &map, test_state_node); TEST_ASSERT(lilv_state_equals(state, state6)); // Round trip accuracy @@ -1751,7 +1760,7 @@ // Make directories and test files support mkdir("temp", 0700); - file_dir = temp_dir; + scratch_dir = temp_dir; mkdir("files", 0700); copy_dir = lilv_realpath("files"); mkdir("links", 0700); @@ -1781,13 +1790,13 @@ // Get instance state state LilvState* fstate = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, "state/fstate.lv2", + scratch_dir, copy_dir, link_dir, "state/fstate.lv2", get_port_value, world, 0, ffeatures); // Get another instance state LilvState* fstate2 = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, "state/fstate2.lv2", + scratch_dir, copy_dir, link_dir, "state/fstate2.lv2", get_port_value, world, 0, ffeatures); // Should be identical @@ -1798,7 +1807,8 @@ // Get yet another instance state LilvState* fstate3 = lilv_state_new_from_instance( - plugin, instance, &map, file_dir, copy_dir, link_dir, "state/fstate3.lv2", + plugin, instance, &map, + scratch_dir, copy_dir, link_dir, "state/fstate3.lv2", get_port_value, world, 0, ffeatures); // Should be different @@ -1810,8 +1820,8 @@ TEST_ASSERT(!ret); // Load state from directory - LilvState* fstate4 = lilv_state_new_from_file(world, &map, NULL, - "state/fstate.lv2/fstate.ttl"); + LilvState* fstate4 = lilv_state_new_from_file( + world, &map, NULL, "state/fstate.lv2/fstate.ttl"); TEST_ASSERT(lilv_state_equals(fstate, fstate4)); // Round trip accuracy // Restore instance state to loaded state @@ -1820,7 +1830,7 @@ // Take a new snapshot and ensure it matches LilvState* fstate5 = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, "state/fstate5.lv2", + scratch_dir, copy_dir, link_dir, "state/fstate5.lv2", get_port_value, world, 0, ffeatures); TEST_ASSERT(lilv_state_equals(fstate3, fstate5)); @@ -1830,8 +1840,8 @@ TEST_ASSERT(!ret); // Reload it and ensure it's identical to the other loaded version - LilvState* fstate6 = lilv_state_new_from_file(world, &map, NULL, - "state/fstate6.lv2/fstate6.ttl"); + LilvState* fstate6 = lilv_state_new_from_file( + world, &map, NULL, "state/fstate6.lv2/fstate6.ttl"); TEST_ASSERT(lilv_state_equals(fstate4, fstate6)); // Run, changing rec file (without changing size) @@ -1840,7 +1850,7 @@ // Take a new snapshot LilvState* fstate7 = lilv_state_new_from_instance( plugin, instance, &map, - file_dir, copy_dir, link_dir, "state/fstate7.lv2", + scratch_dir, copy_dir, link_dir, "state/fstate7.lv2", get_port_value, world, 0, ffeatures); TEST_ASSERT(!lilv_state_equals(fstate6, fstate7)); @@ -1850,8 +1860,8 @@ TEST_ASSERT(!ret); // Reload it and ensure it's changed - LilvState* fstate72 = lilv_state_new_from_file(world, &map, NULL, - "state/fstate7.lv2/fstate7.ttl"); + LilvState* fstate72 = lilv_state_new_from_file( + world, &map, NULL, "state/fstate7.lv2/fstate7.ttl"); TEST_ASSERT(lilv_state_equals(fstate72, fstate7)); TEST_ASSERT(!lilv_state_equals(fstate6, fstate72)); @@ -1895,6 +1905,7 @@ cleanup_uris(); return 1; } +#endif /*****************************************************************************/ @@ -2050,7 +2061,7 @@ lilv_world_load_specifications(world); // Load bundle - LilvNode* bundle_uri = lilv_new_uri(world, bundle_dir_uri); + LilvNode* bundle_uri = lilv_new_uri(world, test_bundle_uri); lilv_world_load_bundle(world, bundle_uri); // Check that plugin is present @@ -2238,7 +2249,6 @@ TEST_CASE(verify), TEST_CASE(no_verify), TEST_CASE(discovery), - TEST_CASE(lv2_path), TEST_CASE(classes), TEST_CASE(plugin), TEST_CASE(project), @@ -2253,7 +2263,10 @@ TEST_CASE(bad_port_index), TEST_CASE(string), TEST_CASE(world), + // FIXME: State is not currently working on Windows +#ifndef _WIN32 TEST_CASE(state), +#endif TEST_CASE(reload_bundle), TEST_CASE(replace_version), TEST_CASE(get_symbol), @@ -2284,7 +2297,7 @@ printf("Syntax: %s\n", argv[0]); return 0; } - setenv("LANG", "C", 1); + set_env("LANG", "C"); init_tests(); run_tests(); cleanup(); diff -Nru lilv-0.24.4~dfsg0/test/missing_descriptor.lv2/missing_descriptor.c lilv-0.24.6/test/missing_descriptor.lv2/missing_descriptor.c --- lilv-0.24.4~dfsg0/test/missing_descriptor.lv2/missing_descriptor.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/missing_descriptor.lv2/missing_descriptor.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Missing descriptor - Copyright 2011-2015 David Robillard + Copyright 2011-2018 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,7 +15,7 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include "lv2/core/lv2.h" LV2_SYMBOL_EXPORT const char* msg = "this is not the thing you're looking for"; diff -Nru lilv-0.24.4~dfsg0/test/missing_descriptor.lv2/test_missing_descriptor.c lilv-0.24.6/test/missing_descriptor.lv2/test_missing_descriptor.c --- lilv-0.24.4~dfsg0/test/missing_descriptor.lv2/test_missing_descriptor.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/missing_descriptor.lv2/test_missing_descriptor.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/missing-descriptor" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/missing_name.lv2/missing_name.c lilv-0.24.6/test/missing_name.lv2/missing_name.c --- lilv-0.24.4~dfsg0/test/missing_name.lv2/missing_name.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/test/missing_name.lv2/missing_name.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Missing name - Copyright 2011-2015 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/missing-name" diff -Nru lilv-0.24.4~dfsg0/test/missing_name.lv2/test_missing_name.c lilv-0.24.6/test/missing_name.lv2/test_missing_name.c --- lilv-0.24.4~dfsg0/test/missing_name.lv2/test_missing_name.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/missing_name.lv2/test_missing_name.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/missing-name" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/missing_plugin.lv2/missing_plugin.c lilv-0.24.6/test/missing_plugin.lv2/missing_plugin.c --- lilv-0.24.4~dfsg0/test/missing_plugin.lv2/missing_plugin.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/test/missing_plugin.lv2/missing_plugin.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Missing plugin - Copyright 2011-2015 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/missing-plugin" diff -Nru lilv-0.24.4~dfsg0/test/missing_plugin.lv2/test_missing_plugin.c lilv-0.24.6/test/missing_plugin.lv2/test_missing_plugin.c --- lilv-0.24.4~dfsg0/test/missing_plugin.lv2/test_missing_plugin.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/missing_plugin.lv2/test_missing_plugin.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/missing-plugin" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/missing_port.lv2/missing_port.c lilv-0.24.6/test/missing_port.lv2/missing_port.c --- lilv-0.24.4~dfsg0/test/missing_port.lv2/missing_port.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/test/missing_port.lv2/missing_port.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Missing port - Copyright 2011-2016 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/missing-port" diff -Nru lilv-0.24.4~dfsg0/test/missing_port.lv2/test_missing_port.c lilv-0.24.6/test/missing_port.lv2/test_missing_port.c --- lilv-0.24.4~dfsg0/test/missing_port.lv2/test_missing_port.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/missing_port.lv2/test_missing_port.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/missing-port" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/missing_port_name.lv2/missing_port_name.c lilv-0.24.6/test/missing_port_name.lv2/missing_port_name.c --- lilv-0.24.4~dfsg0/test/missing_port_name.lv2/missing_port_name.c 2016-09-19 01:02:26.000000000 +0000 +++ lilv-0.24.6/test/missing_port_name.lv2/missing_port_name.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Missing port name - Copyright 2011-2015 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/missing-port-name" diff -Nru lilv-0.24.4~dfsg0/test/missing_port_name.lv2/test_missing_port_name.c lilv-0.24.6/test/missing_port_name.lv2/test_missing_port_name.c --- lilv-0.24.4~dfsg0/test/missing_port_name.lv2/test_missing_port_name.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/missing_port_name.lv2/test_missing_port_name.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,6 +1,13 @@ -#include "lilv/lilv.h" #include "../src/lilv_internal.h" +#include "serd/serd.h" +#include "lilv/lilv.h" + +#include +#include +#include +#include + #define PLUGIN_URI "http://example.org/missing-port-name" #define TEST_ASSERT(check) do {\ diff -Nru lilv-0.24.4~dfsg0/test/new_version.lv2/new_version.c lilv-0.24.6/test/new_version.lv2/new_version.c --- lilv-0.24.4~dfsg0/test/new_version.lv2/new_version.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/test/new_version.lv2/new_version.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - New version - Copyright 2011-2016 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/versioned" diff -Nru lilv-0.24.4~dfsg0/test/old_version.lv2/old_version.c lilv-0.24.6/test/old_version.lv2/old_version.c --- lilv-0.24.4~dfsg0/test/old_version.lv2/old_version.c 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/test/old_version.lv2/old_version.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Old version - Copyright 2011-2016 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -15,9 +15,10 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lv2/core/lv2.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include +#include #define PLUGIN_URI "http://example.org/versioned" diff -Nru lilv-0.24.4~dfsg0/test/test.lv2/test.c lilv-0.24.6/test/test.lv2/test.c --- lilv-0.24.4~dfsg0/test/test.lv2/test.c 2018-07-21 08:10:18.000000000 +0000 +++ lilv-0.24.6/test/test.lv2/test.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,6 +1,6 @@ /* Lilv Test Plugin - Copyright 2011-2017 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -17,21 +17,21 @@ #define _POSIX_C_SOURCE 200809L -#include -#include -#include -#include - -#include "lv2/lv2plug.in/ns/ext/atom/atom.h" -#include "lv2/lv2plug.in/ns/ext/state/state.h" -#include "lv2/lv2plug.in/ns/ext/urid/urid.h" -#include "lv2/lv2plug.in/ns/lv2core/lv2.h" +#include "lv2/atom/atom.h" +#include "lv2/core/lv2.h" +#include "lv2/state/state.h" +#include "lv2/urid/urid.h" -#ifdef _WIN32 +#ifdef _MSC_VER # include # define mkstemp(pat) _mktemp(pat) #endif +#include +#include +#include +#include + #define TEST_URI "http://example.org/lilv-test-plugin" #define TMP_TEMPLATE "lilv_testXXXXXX" @@ -59,13 +59,23 @@ } Test; static void +free_path(char* path) +{ + /* FIXME: Temporary hack to avoid mismatched malloc/free crashes on + Windows. The specifications needs a feature for this. */ +#ifndef _WIN32 + free(path); +#endif +} + +static void cleanup(LV2_Handle instance) { Test* test = (Test*)instance; if (test->rec_file) { fclose(test->rec_file); } - free(test->rec_file_path); + free_path(test->rec_file_path); free(instance); } @@ -101,7 +111,7 @@ return NULL; } - strncpy(test->tmp_file_path, TMP_TEMPLATE, sizeof(TMP_TEMPLATE)); + strncpy(test->tmp_file_path, TMP_TEMPLATE, strlen(TMP_TEMPLATE) + 1); mkstemp(test->tmp_file_path); LV2_State_Make_Path* make_path = NULL; @@ -192,6 +202,27 @@ map_uri(plugin, LV2_ATOM__URID), LV2_STATE_IS_POD | LV2_STATE_IS_PORTABLE); + // Try to store second value for the same property (should fail) + const uint32_t urid2 = map_uri(plugin, "http://example.org/urivalue2"); + if (!store(callback_data, + map_uri(plugin, "http://example.org/uri"), + &urid2, + sizeof(uint32_t), + map_uri(plugin, LV2_ATOM__URID), + LV2_STATE_IS_POD | LV2_STATE_IS_PORTABLE)) { + return LV2_STATE_ERR_UNKNOWN; + } + + // Try to store with a null key (should fail) + if (!store(callback_data, + 0, + &urid2, + sizeof(uint32_t), + map_uri(plugin, LV2_ATOM__URID), + LV2_STATE_IS_POD | LV2_STATE_IS_PORTABLE)) { + return LV2_STATE_ERR_UNKNOWN; + } + store(callback_data, map_uri(plugin, "http://example.org/num-runs"), &plugin->num_runs, @@ -250,8 +281,8 @@ map_uri(plugin, LV2_ATOM__Path), LV2_STATE_IS_POD); - free(apath); - free(apath2); + free_path(apath); + free_path(apath2); if (plugin->rec_file) { fflush(plugin->rec_file); @@ -265,7 +296,7 @@ map_uri(plugin, LV2_ATOM__Path), LV2_STATE_IS_POD); - free(apath); + free_path(apath); } if (make_path) { @@ -281,8 +312,8 @@ strlen(apath) + 1, map_uri(plugin, LV2_ATOM__Path), LV2_STATE_IS_POD); - free(apath); - free(spath); + free_path(apath); + free_path(spath); } } @@ -338,7 +369,7 @@ fprintf(stderr, "error: Restored bad file contents `%s' != `Hello'\n", str); } - free(path); + free_path(path); } apath = (char*)retrieve( @@ -353,7 +384,7 @@ } else { fclose(sfile); } - free(spath); + free_path(spath); } else { fprintf(stderr, "error: Failed to restore save file.\n"); } diff -Nru lilv-0.24.4~dfsg0/utils/bench.h lilv-0.24.6/utils/bench.h --- lilv-0.24.4~dfsg0/utils/bench.h 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/utils/bench.h 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2011-2014 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -23,8 +23,8 @@ #define _POSIX_C_SOURCE 200809L -#include #include +#include static inline double bench_elapsed_s(const struct timespec* start, const struct timespec* end) diff -Nru lilv-0.24.4~dfsg0/utils/lilv-bench.c lilv-0.24.6/utils/lilv-bench.c --- lilv-0.24.4~dfsg0/utils/lilv-bench.c 2016-09-19 01:02:26.000000000 +0000 +++ lilv-0.24.6/utils/lilv-bench.c 2019-10-19 21:34:09.000000000 +0000 @@ -14,12 +14,8 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include - #include "lilv/lilv.h" -#include "lilv_config.h" - int main(int argc, char** argv) { diff -Nru lilv-0.24.4~dfsg0/utils/lv2apply.c lilv-0.24.6/utils/lv2apply.c --- lilv-0.24.4~dfsg0/utils/lv2apply.c 2018-07-21 10:26:23.000000000 +0000 +++ lilv-0.24.6/utils/lv2apply.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2016 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,16 +14,19 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lilv/lilv.h" + +#include "lv2/core/lv2.h" + #include #include #include +#include +#include #include #include #include -#include "lilv/lilv.h" - /** Control port value set from the command line */ typedef struct Param { const char* sym; ///< Port symbol @@ -199,7 +202,7 @@ { printf( "lv2apply (lilv) " LILV_VERSION "\n" - "Copyright 2007-2016 David Robillard \n" + "Copyright 2007-2019 David Robillard \n" "License: \n" "This is free software: you are free to change and redistribute it.\n" "There is NO WARRANTY, to the extent permitted by law.\n"); diff -Nru lilv-0.24.4~dfsg0/utils/lv2bench.c lilv-0.24.6/utils/lv2bench.c --- lilv-0.24.4~dfsg0/utils/lv2bench.c 2018-07-10 19:55:39.000000000 +0000 +++ lilv-0.24.6/utils/lv2bench.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2012-2017 David Robillard + Copyright 2012-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -16,18 +16,23 @@ #define _POSIX_C_SOURCE 200809L -#include -#include -#include -#include - #include "lilv/lilv.h" -#include "lv2/lv2plug.in/ns/ext/atom/atom.h" +#include "lv2/atom/atom.h" +#include "lv2/core/lv2.h" +#include "lv2/urid/urid.h" -#include "lilv_config.h" #include "bench.h" +#include "lilv_config.h" #include "uri_table.h" +#include +#include +#include +#include +#include +#include +#include + static LilvNode* atom_AtomPort = NULL; static LilvNode* atom_Sequence = NULL; static LilvNode* lv2_AudioPort = NULL; @@ -44,7 +49,7 @@ { printf( "lv2bench (lilv) " LILV_VERSION "\n" - "Copyright 2012-2017 David Robillard \n" + "Copyright 2012-2019 David Robillard \n" "License: \n" "This is free software: you are free to change and redistribute it.\n" "There is NO WARRANTY, to the extent permitted by law.\n"); diff -Nru lilv-0.24.4~dfsg0/utils/lv2info.c lilv-0.24.6/utils/lv2info.c --- lilv-0.24.4~dfsg0/utils/lv2info.c 2018-07-10 19:55:39.000000000 +0000 +++ lilv-0.24.6/utils/lv2info.c 2019-10-20 23:18:59.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2014 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,24 +14,21 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include +#include "lilv_config.h" + +#include "lilv/lilv.h" +#include "lv2/core/lv2.h" +#include "lv2/event/event.h" +#include "lv2/port-groups/port-groups.h" +#include "lv2/presets/presets.h" + #include +#include +#include #include #include #include -#include "lv2/lv2plug.in/ns/ext/port-groups/port-groups.h" -#include "lv2/lv2plug.in/ns/ext/presets/presets.h" -#include "lv2/lv2plug.in/ns/ext/event/event.h" - -#include "lilv/lilv.h" - -#include "lilv_config.h" - -#ifdef _MSC_VER -# define isnan _isnan -#endif - LilvNode* applies_to_pred = NULL; LilvNode* control_class = NULL; LilvNode* event_class = NULL; @@ -338,7 +335,7 @@ { printf( "lv2info (lilv) " LILV_VERSION "\n" - "Copyright 2007-2014 David Robillard \n" + "Copyright 2007-2019 David Robillard \n" "License: \n" "This is free software: you are free to change and redistribute it.\n" "There is NO WARRANTY, to the extent permitted by law.\n"); diff -Nru lilv-0.24.4~dfsg0/utils/lv2ls.c lilv-0.24.6/utils/lv2ls.c --- lilv-0.24.4~dfsg0/utils/lv2ls.c 2016-09-19 01:02:26.000000000 +0000 +++ lilv-0.24.6/utils/lv2ls.c 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2007-2012 David Robillard + Copyright 2007-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -14,12 +14,13 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ -#include -#include +#include "lilv_config.h" #include "lilv/lilv.h" -#include "lilv_config.h" +#include +#include +#include static void list_plugins(const LilvPlugins* list, bool show_names) @@ -41,7 +42,7 @@ { printf( "lv2ls (lilv) " LILV_VERSION "\n" - "Copyright 2007-2012 David Robillard \n" + "Copyright 2007-2019 David Robillard \n" "License: \n" "This is free software: you are free to change and redistribute it.\n" "There is NO WARRANTY, to the extent permitted by law.\n"); diff -Nru lilv-0.24.4~dfsg0/utils/uri_table.h lilv-0.24.6/utils/uri_table.h --- lilv-0.24.4~dfsg0/utils/uri_table.h 2017-01-04 17:29:52.000000000 +0000 +++ lilv-0.24.6/utils/uri_table.h 2019-10-19 21:34:09.000000000 +0000 @@ -1,5 +1,5 @@ /* - Copyright 2011-2012 David Robillard + Copyright 2011-2019 David Robillard Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -23,6 +23,9 @@ #ifndef URI_TABLE_H #define URI_TABLE_H +#include +#include + typedef struct { char** uris; size_t n_uris; diff -Nru lilv-0.24.4~dfsg0/waf lilv-0.24.6/waf --- lilv-0.24.4~dfsg0/waf 2019-08-22 09:55:19.000000000 +0000 +++ lilv-0.24.6/waf 2019-10-19 21:34:09.000000000 +0000 @@ -1,166 +1,27 @@ #!/usr/bin/env python -# encoding: latin-1 -# Thomas Nagy, 2005-2018 -# -""" -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. The name of the author may not be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR -IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, -INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, -STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. -""" - -import os, sys, inspect - -VERSION="2.0.9" -REVISION="7ea8f0d5002fd7cec19aa8d10c4784ce" -GIT="x" -INSTALL='' -C1='#W' -C2='#3' -C3='#0' -cwd = os.getcwd() -join = os.path.join - - -WAF='waf' -def b(x): - return x -if sys.hexversion>0x300000f: - WAF='waf3' - def b(x): - return x.encode() - -def err(m): - print(('\033[91mError: %s\033[0m' % m)) - sys.exit(1) - -def unpack_wafdir(dir, src): - f = open(src,'rb') - c = 'corrupt archive (%d)' - while 1: - line = f.readline() - if not line: err('run waf-light from a folder containing waflib') - if line == b('#==>\n'): - txt = f.readline() - if not txt: err(c % 1) - if f.readline() != b('#<==\n'): err(c % 2) - break - if not txt: err(c % 3) - txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00')) - - import shutil, tarfile - try: shutil.rmtree(dir) - except OSError: pass - try: - for x in ('Tools', 'extras'): - os.makedirs(join(dir, 'waflib', x)) - except OSError: - err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir) - - os.chdir(dir) - tmp = 't.bz2' - t = open(tmp,'wb') - try: t.write(txt) - finally: t.close() - - try: - t = tarfile.open(tmp) - except: - try: - os.system('bunzip2 t.bz2') - t = tarfile.open('t') - tmp = 't' - except: - os.chdir(cwd) - try: shutil.rmtree(dir) - except OSError: pass - err("Waf cannot be unpacked, check that bzip2 support is present") - - try: - for x in t: t.extract(x) - finally: - t.close() - - for x in ('Tools', 'extras'): - os.chmod(join('waflib',x), 493) - - if sys.hexversion<0x300000f: - sys.path = [join(dir, 'waflib')] + sys.path - import fixpy2 - fixpy2.fixdir(dir) - - os.remove(tmp) - os.chdir(cwd) - - try: dir = unicode(dir, 'mbcs') - except: pass - try: - from ctypes import windll - windll.kernel32.SetFileAttributesW(dir, 2) - except: - pass - -def test(dir): - try: - os.stat(join(dir, 'waflib')) - return os.path.abspath(dir) - except OSError: - pass - -def find_lib(): - src = os.path.abspath(inspect.getfile(inspect.getmodule(err))) - base, name = os.path.split(src) - - #devs use $WAFDIR - w=test(os.environ.get('WAFDIR', '')) - if w: return w - - #waf-light - if name.endswith('waf-light'): - w = test(base) - if w: return w - err('waf-light requires waflib -> export WAFDIR=/folder') - - dirname = '%s-%s-%s' % (WAF, VERSION, REVISION) - for i in (INSTALL,'/usr','/usr/local','/opt'): - w = test(i + '/lib/' + dirname) - if w: return w - - #waf-local - dir = join(base, (sys.platform != 'win32' and '.' or '') + dirname) - w = test(dir) - if w: return w - - #unpack - unpack_wafdir(dir, src) - return dir -wafdir = find_lib() -sys.path.insert(0, wafdir) +# Minimal waf script for projects that include waflib directly -if __name__ == '__main__': +import sys +import inspect +import os + +try: + from waflib import Context, Scripting +except Exception as e: + sys.stderr.write('error: Failed to import waf (%s)\n' % e) + if os.path.exists('.git'): + sys.stderr.write("Are submodules up to date? " + "Try 'git submodule update --init --recursive'\n") + + sys.exit(1) + + +def main(): + script_path = os.path.abspath(inspect.getfile(inspect.getmodule(main))) + project_path = os.path.dirname(script_path) + Scripting.waf_entry_point(os.getcwd(), Context.WAFVERSION, project_path) - from waflib import Scripting - Scripting.waf_entry_point(cwd, VERSION, wafdir) +if __name__ == '__main__': + main() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ansiterm.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ansiterm.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ansiterm.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ansiterm.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,238 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,sys +from waflib import Utils +wlock=Utils.threading.Lock() +try: + from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,c_wchar,POINTER,c_long +except ImportError: + class AnsiTerm(object): + def __init__(self,stream): + self.stream=stream + try: + self.errors=self.stream.errors + except AttributeError: + pass + self.encoding=self.stream.encoding + def write(self,txt): + try: + wlock.acquire() + self.stream.write(txt) + self.stream.flush() + finally: + wlock.release() + def fileno(self): + return self.stream.fileno() + def flush(self): + self.stream.flush() + def isatty(self): + return self.stream.isatty() +else: + class COORD(Structure): + _fields_=[("X",c_short),("Y",c_short)] + class SMALL_RECT(Structure): + _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)] + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_ushort),("Window",SMALL_RECT),("MaximumWindowSize",COORD)] + class CONSOLE_CURSOR_INFO(Structure): + _fields_=[('dwSize',c_ulong),('bVisible',c_int)] + try: + _type=unicode + except NameError: + _type=str + to_int=lambda number,default:number and int(number)or default + STD_OUTPUT_HANDLE=-11 + STD_ERROR_HANDLE=-12 + windll.kernel32.GetStdHandle.argtypes=[c_ulong] + windll.kernel32.GetStdHandle.restype=c_ulong + windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)] + windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long + windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort] + windll.kernel32.SetConsoleTextAttribute.restype=c_long + windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputCharacterW.restype=c_long + windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputAttribute.restype=c_long + windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)] + windll.kernel32.SetConsoleCursorPosition.restype=c_long + windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)] + windll.kernel32.SetConsoleCursorInfo.restype=c_long + class AnsiTerm(object): + def __init__(self,s): + self.stream=s + try: + self.errors=s.errors + except AttributeError: + pass + self.encoding=s.encoding + self.cursor_history=[] + handle=(s.fileno()==2)and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE + self.hconsole=windll.kernel32.GetStdHandle(handle) + self._sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + self._csinfo=CONSOLE_CURSOR_INFO() + windll.kernel32.GetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) + self._orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + r=windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._orig_sbinfo)) + self._isatty=r==1 + def screen_buffer_info(self): + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._sbinfo)) + return self._sbinfo + def clear_line(self,param): + mode=param and int(param)or 0 + sbinfo=self.screen_buffer_info() + if mode==1: + line_start=COORD(0,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X + elif mode==2: + line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + else: + line_start=sbinfo.CursorPosition + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written)) + def clear_screen(self,param): + mode=to_int(param,0) + sbinfo=self.screen_buffer_info() + if mode==1: + clear_start=COORD(0,0) + clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y + elif mode==2: + clear_start=COORD(0,0) + clear_length=sbinfo.Size.X*sbinfo.Size.Y + windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start) + else: + clear_start=sbinfo.CursorPosition + clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y)) + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written)) + def push_cursor(self,param): + sbinfo=self.screen_buffer_info() + self.cursor_history.append(sbinfo.CursorPosition) + def pop_cursor(self,param): + if self.cursor_history: + old_pos=self.cursor_history.pop() + windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos) + def set_cursor(self,param): + y,sep,x=param.partition(';') + x=to_int(x,1)-1 + y=to_int(y,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def set_column(self,param): + x=to_int(param,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_cursor(self,x_offset=0,y_offset=0): + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_up(self,param): + self.move_cursor(y_offset=-to_int(param,1)) + def move_down(self,param): + self.move_cursor(y_offset=to_int(param,1)) + def move_left(self,param): + self.move_cursor(x_offset=-to_int(param,1)) + def move_right(self,param): + self.move_cursor(x_offset=to_int(param,1)) + def next_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1)) + def prev_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1)) + def rgb2bgr(self,c): + return((c&1)<<2)|(c&2)|((c&4)>>2) + def set_color(self,param): + cols=param.split(';') + sbinfo=self.screen_buffer_info() + attr=sbinfo.Attributes + for c in cols: + c=to_int(c,0) + if 29>4)|((attr&0x07)<<4) + windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr) + def show_cursor(self,param): + self._csinfo.bVisible=1 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) + def hide_cursor(self,param): + self._csinfo.bVisible=0 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) + ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,} + ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') + def write(self,text): + try: + wlock.acquire() + if self._isatty: + for param,cmd,txt in self.ansi_tokens.findall(text): + if cmd: + cmd_func=self.ansi_command_table.get(cmd) + if cmd_func: + cmd_func(self,param) + else: + self.writeconsole(txt) + else: + self.stream.write(text) + finally: + wlock.release() + def writeconsole(self,txt): + chars_written=c_ulong() + writeconsole=windll.kernel32.WriteConsoleA + if isinstance(txt,_type): + writeconsole=windll.kernel32.WriteConsoleW + done=0 + todo=len(txt) + chunk=32<<10 + while todo!=0: + doing=min(chunk,todo) + buf=txt[done:done+doing] + r=writeconsole(self.hconsole,buf,doing,byref(chars_written),None) + if r==0: + chunk>>=1 + continue + done+=doing + todo-=doing + def fileno(self): + return self.stream.fileno() + def flush(self): + pass + def isatty(self): + return self._isatty + if sys.stdout.isatty()or sys.stderr.isatty(): + handle=sys.stdout.isatty()and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE + console=windll.kernel32.GetStdHandle(handle) + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + def get_term_cols(): + windll.kernel32.GetConsoleScreenBufferInfo(console,byref(sbinfo)) + return sbinfo.Size.X-1 +try: + import struct,fcntl,termios +except ImportError: + pass +else: + if(sys.stdout.isatty()or sys.stderr.isatty())and os.environ.get('TERM','')not in('dumb','emacs'): + FD=sys.stdout.isatty()and sys.stdout.fileno()or sys.stderr.fileno() + def fun(): + return struct.unpack("HHHH",fcntl.ioctl(FD,termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[1] + try: + fun() + except Exception ,e: + pass + else: + get_term_cols=fun diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Build.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Build.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Build.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Build.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,759 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,sys,errno,re,shutil,stat +try: + import cPickle +except ImportError: + import pickle as cPickle +from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors +import waflib.Node +CACHE_DIR='c4che' +CACHE_SUFFIX='_cache.py' +INSTALL=1337 +UNINSTALL=-1337 +SAVED_ATTRS='root node_deps raw_deps task_sigs'.split() +CFG_FILES='cfg_files' +POST_AT_ONCE=0 +POST_LAZY=1 +POST_BOTH=2 +PROTOCOL=-1 +if sys.platform=='cli': + PROTOCOL=0 +class BuildContext(Context.Context): + '''executes the build''' + cmd='build' + variant='' + def __init__(self,**kw): + super(BuildContext,self).__init__(**kw) + self.is_install=0 + self.top_dir=kw.get('top_dir',Context.top_dir) + self.run_dir=kw.get('run_dir',Context.run_dir) + self.post_mode=POST_AT_ONCE + self.out_dir=kw.get('out_dir',Context.out_dir) + self.cache_dir=kw.get('cache_dir',None) + if not self.cache_dir: + self.cache_dir=os.path.join(self.out_dir,CACHE_DIR) + self.all_envs={} + self.task_sigs={} + self.node_deps={} + self.raw_deps={} + self.cache_dir_contents={} + self.task_gen_cache_names={} + self.launch_dir=Context.launch_dir + self.jobs=Options.options.jobs + self.targets=Options.options.targets + self.keep=Options.options.keep + self.progress_bar=Options.options.progress_bar + self.deps_man=Utils.defaultdict(list) + self.current_group=0 + self.groups=[] + self.group_names={} + def get_variant_dir(self): + if not self.variant: + return self.out_dir + return os.path.join(self.out_dir,self.variant) + variant_dir=property(get_variant_dir,None) + def __call__(self,*k,**kw): + kw['bld']=self + ret=TaskGen.task_gen(*k,**kw) + self.task_gen_cache_names={} + self.add_to_group(ret,group=kw.get('group',None)) + return ret + def rule(self,*k,**kw): + def f(rule): + ret=self(*k,**kw) + ret.rule=rule + return ret + return f + def __copy__(self): + raise Errors.WafError('build contexts are not supposed to be copied') + def install_files(self,*k,**kw): + pass + def install_as(self,*k,**kw): + pass + def symlink_as(self,*k,**kw): + pass + def load_envs(self): + node=self.root.find_node(self.cache_dir) + if not node: + raise Errors.WafError('The project was not configured: run "waf configure" first!') + lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True) + if not lst: + raise Errors.WafError('The cache directory is empty: reconfigure the project') + for x in lst: + name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/') + env=ConfigSet.ConfigSet(x.abspath()) + self.all_envs[name]=env + for f in env[CFG_FILES]: + newnode=self.root.find_resource(f) + try: + h=Utils.h_file(newnode.abspath()) + except(IOError,AttributeError): + Logs.error('cannot find %r'%f) + h=Utils.SIG_NIL + newnode.sig=h + def init_dirs(self): + if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)): + raise Errors.WafError('The project was not configured: run "waf configure" first!') + self.path=self.srcnode=self.root.find_dir(self.top_dir) + self.bldnode=self.root.make_node(self.variant_dir) + self.bldnode.mkdir() + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.execute_build() + def execute_build(self): + Logs.info("Waf: Entering directory `%s'"%self.variant_dir) + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + try: + self.compile() + finally: + if self.progress_bar==1 and sys.stderr.isatty(): + c=len(self.returned_tasks)or 1 + m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL) + Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on}) + Logs.info("Waf: Leaving directory `%s'"%self.variant_dir) + self.post_build() + def restore(self): + try: + env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) + except EnvironmentError: + pass + else: + if env['version']').ljust(cols) + msg=Logs.indicator%(left,bar,right) + return msg + def declare_chain(self,*k,**kw): + return TaskGen.declare_chain(*k,**kw) + def pre_build(self): + for m in getattr(self,'pre_funs',[]): + m(self) + def post_build(self): + for m in getattr(self,'post_funs',[]): + m(self) + def add_pre_fun(self,meth): + try: + self.pre_funs.append(meth) + except AttributeError: + self.pre_funs=[meth] + def add_post_fun(self,meth): + try: + self.post_funs.append(meth) + except AttributeError: + self.post_funs=[meth] + def get_group(self,x): + if not self.groups: + self.add_group() + if x is None: + return self.groups[self.current_group] + if x in self.group_names: + return self.group_names[x] + return self.groups[x] + def add_to_group(self,tgen,group=None): + assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase)) + tgen.bld=self + self.get_group(group).append(tgen) + def get_group_name(self,g): + if not isinstance(g,list): + g=self.groups[g] + for x in self.group_names: + if id(self.group_names[x])==id(g): + return x + return'' + def get_group_idx(self,tg): + se=id(tg) + for i in range(len(self.groups)): + for t in self.groups[i]: + if id(t)==se: + return i + return None + def add_group(self,name=None,move=True): + if name and name in self.group_names: + Logs.error('add_group: name %s already present'%name) + g=[] + self.group_names[name]=g + self.groups.append(g) + if move: + self.current_group=len(self.groups)-1 + def set_group(self,idx): + if isinstance(idx,str): + g=self.group_names[idx] + for i in range(len(self.groups)): + if id(g)==id(self.groups[i]): + self.current_group=i + break + else: + self.current_group=idx + def total(self): + total=0 + for group in self.groups: + for tg in group: + try: + total+=len(tg.tasks) + except AttributeError: + total+=1 + return total + def get_targets(self): + to_post=[] + min_grp=0 + for name in self.targets.split(','): + tg=self.get_tgen_by_name(name) + m=self.get_group_idx(tg) + if m>min_grp: + min_grp=m + to_post=[tg] + elif m==min_grp: + to_post.append(tg) + return(min_grp,to_post) + def get_all_task_gen(self): + lst=[] + for g in self.groups: + lst.extend(g) + return lst + def post_group(self): + if self.targets=='*': + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + f() + elif self.targets: + if self.cur259 and not tgt.startswith('\\\\?\\'): + tgt='\\\\?\\'+tgt + shutil.copy2(src,tgt) + os.chmod(tgt,kw.get('chmod',Utils.O644)) + def do_install(self,src,tgt,**kw): + d,_=os.path.split(tgt) + if not d: + raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt)) + Utils.check_dir(d) + srclbl=src.replace(self.srcnode.abspath()+os.sep,'') + if not Options.options.force: + try: + st1=os.stat(tgt) + st2=os.stat(src) + except OSError: + pass + else: + if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: + if not self.progress_bar: + Logs.info('- install %s (from %s)'%(tgt,srclbl)) + return False + if not self.progress_bar: + Logs.info('+ install %s (from %s)'%(tgt,srclbl)) + try: + os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) + except EnvironmentError: + pass + try: + os.remove(tgt) + except OSError: + pass + try: + self.copy_fun(src,tgt,**kw) + except IOError: + try: + os.stat(src) + except EnvironmentError: + Logs.error('File %r does not exist'%src) + raise Errors.WafError('Could not install the file %r'%tgt) + def do_link(self,src,tgt,**kw): + d,_=os.path.split(tgt) + Utils.check_dir(d) + link=False + if not os.path.islink(tgt): + link=True + elif os.readlink(tgt)!=src: + link=True + if link: + try:os.remove(tgt) + except OSError:pass + if not self.progress_bar: + Logs.info('+ symlink %s (to %s)'%(tgt,src)) + os.symlink(src,tgt) + else: + if not self.progress_bar: + Logs.info('- symlink %s (to %s)'%(tgt,src)) + def run_task_now(self,tsk,postpone): + tsk.post() + if not postpone: + if tsk.runnable_status()==Task.ASK_LATER: + raise self.WafError('cannot post the task %r'%tsk) + tsk.run() + tsk.hasrun=True + def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True,task=None): + assert(dest) + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + tsk.task=task + if isinstance(files,waflib.Node.Node): + tsk.source=[files] + else: + tsk.source=Utils.to_list(files) + tsk.dest=dest + tsk.exec_task=tsk.exec_install_files + tsk.relative_trick=relative_trick + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True,task=None): + assert(dest) + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + tsk.source=[srcfile] + tsk.task=task + tsk.dest=dest + tsk.exec_task=tsk.exec_install_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False,task=None): + if Utils.is_win32: + return + assert(dest) + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.dest=dest + tsk.path=cwd or self.path + tsk.source=[] + tsk.task=task + tsk.link=src + tsk.relative_trick=relative_trick + tsk.exec_task=tsk.exec_symlink_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk +class UninstallContext(InstallContext): + '''removes the targets installed''' + cmd='uninstall' + def __init__(self,**kw): + super(UninstallContext,self).__init__(**kw) + self.is_install=UNINSTALL + def rm_empty_dirs(self,tgt): + while tgt: + tgt=os.path.dirname(tgt) + try: + os.rmdir(tgt) + except OSError: + break + def do_install(self,src,tgt,**kw): + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + self.uninstall.append(tgt) + try: + os.remove(tgt) + except OSError ,e: + if e.errno!=errno.ENOENT: + if not getattr(self,'uninstall_error',None): + self.uninstall_error=True + Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') + if Logs.verbose>1: + Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno)) + self.rm_empty_dirs(tgt) + def do_link(self,src,tgt,**kw): + try: + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + os.remove(tgt) + except OSError: + pass + self.rm_empty_dirs(tgt) + def execute(self): + try: + def runnable_status(self): + return Task.SKIP_ME + setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status) + setattr(Task.Task,'runnable_status',runnable_status) + super(UninstallContext,self).execute() + finally: + setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back) +class CleanContext(BuildContext): + '''cleans the project''' + cmd='clean' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + try: + self.clean() + finally: + self.store() + def clean(self): + Logs.debug('build: clean called') + if self.bldnode!=self.srcnode: + lst=[] + for e in self.all_envs.values(): + lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES]) + for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True): + if n in lst: + continue + n.delete() + self.root.children={} + for v in'node_deps task_sigs raw_deps'.split(): + setattr(self,v,{}) +class ListContext(BuildContext): + '''lists the targets to execute''' + cmd='list' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + for g in self.groups: + for tg in g: + try: + f=tg.post + except AttributeError: + pass + else: + f() + try: + self.get_tgen_by_name('') + except Exception: + pass + lst=list(self.task_gen_cache_names.keys()) + lst.sort() + for k in lst: + Logs.pprint('GREEN',k) +class StepContext(BuildContext): + '''executes tasks in a step-by-step fashion, for debugging''' + cmd='step' + def __init__(self,**kw): + super(StepContext,self).__init__(**kw) + self.files=Options.options.files + def compile(self): + if not self.files: + Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') + BuildContext.compile(self) + return + targets=None + if self.targets and self.targets!='*': + targets=self.targets.split(',') + for g in self.groups: + for tg in g: + if targets and tg.name not in targets: + continue + try: + f=tg.post + except AttributeError: + pass + else: + f() + for pat in self.files.split(','): + matcher=self.get_matcher(pat) + for tg in g: + if isinstance(tg,Task.TaskBase): + lst=[tg] + else: + lst=tg.tasks + for tsk in lst: + do_exec=False + for node in getattr(tsk,'inputs',[]): + if matcher(node,output=False): + do_exec=True + break + for node in getattr(tsk,'outputs',[]): + if matcher(node,output=True): + do_exec=True + break + if do_exec: + ret=tsk.run() + Logs.info('%s -> exit %r'%(str(tsk),ret)) + def get_matcher(self,pat): + inn=True + out=True + if pat.startswith('in:'): + out=False + pat=pat.replace('in:','') + elif pat.startswith('out:'): + inn=False + pat=pat.replace('out:','') + anode=self.root.find_node(pat) + pattern=None + if not anode: + if not pat.startswith('^'): + pat='^.+?%s'%pat + if not pat.endswith('$'): + pat='%s$'%pat + pattern=re.compile(pat) + def match(node,output): + if output==True and not out: + return False + if output==False and not inn: + return False + if anode: + return anode==node + else: + return pattern.match(node.abspath()) + return match diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ConfigSet.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ConfigSet.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ConfigSet.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/ConfigSet.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,155 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Logs,Utils +re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M) +class ConfigSet(object): + __slots__=('table','parent') + def __init__(self,filename=None): + self.table={} + if filename: + self.load(filename) + def __contains__(self,key): + if key in self.table:return True + try:return self.parent.__contains__(key) + except AttributeError:return False + def keys(self): + keys=set() + cur=self + while cur: + keys.update(cur.table.keys()) + cur=getattr(cur,'parent',None) + keys=list(keys) + keys.sort() + return keys + def __str__(self): + return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()]) + def __getitem__(self,key): + try: + while 1: + x=self.table.get(key,None) + if not x is None: + return x + self=self.parent + except AttributeError: + return[] + def __setitem__(self,key,value): + self.table[key]=value + def __delitem__(self,key): + self[key]=[] + def __getattr__(self,name): + if name in self.__slots__: + return object.__getattr__(self,name) + else: + return self[name] + def __setattr__(self,name,value): + if name in self.__slots__: + object.__setattr__(self,name,value) + else: + self[name]=value + def __delattr__(self,name): + if name in self.__slots__: + object.__delattr__(self,name) + else: + del self[name] + def derive(self): + newenv=ConfigSet() + newenv.parent=self + return newenv + def detach(self): + tbl=self.get_merged_dict() + try: + delattr(self,'parent') + except AttributeError: + pass + else: + keys=tbl.keys() + for x in keys: + tbl[x]=copy.deepcopy(tbl[x]) + self.table=tbl + return self + def get_flat(self,key): + s=self[key] + if isinstance(s,str):return s + return' '.join(s) + def _get_list_value_for_modification(self,key): + try: + value=self.table[key] + except KeyError: + try:value=self.parent[key] + except AttributeError:value=[] + if isinstance(value,list): + value=value[:] + else: + value=[value] + else: + if not isinstance(value,list): + value=[value] + self.table[key]=value + return value + def append_value(self,var,val): + if isinstance(val,str): + val=[val] + current_value=self._get_list_value_for_modification(var) + current_value.extend(val) + def prepend_value(self,var,val): + if isinstance(val,str): + val=[val] + self.table[var]=val+self._get_list_value_for_modification(var) + def append_unique(self,var,val): + if isinstance(val,str): + val=[val] + current_value=self._get_list_value_for_modification(var) + for x in val: + if x not in current_value: + current_value.append(x) + def get_merged_dict(self): + table_list=[] + env=self + while 1: + table_list.insert(0,env.table) + try:env=env.parent + except AttributeError:break + merged_table={} + for table in table_list: + merged_table.update(table) + return merged_table + def store(self,filename): + try: + os.makedirs(os.path.split(filename)[0]) + except OSError: + pass + buf=[] + merged_table=self.get_merged_dict() + keys=list(merged_table.keys()) + keys.sort() + try: + fun=ascii + except NameError: + fun=repr + for k in keys: + if k!='undo_stack': + buf.append('%s = %s\n'%(k,fun(merged_table[k]))) + Utils.writef(filename,''.join(buf)) + def load(self,filename): + tbl=self.table + code=Utils.readf(filename,m='rU') + for m in re_imp.finditer(code): + g=m.group + tbl[g(2)]=eval(g(3)) + Logs.debug('env: %s'%str(self.table)) + def update(self,d): + for k,v in d.items(): + self[k]=v + def stash(self): + orig=self.table + tbl=self.table=self.table.copy() + for x in tbl.keys(): + tbl[x]=copy.deepcopy(tbl[x]) + self.undo_stack=self.undo_stack+[orig] + def commit(self): + self.undo_stack.pop(-1) + def revert(self): + self.table=self.undo_stack.pop(-1) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Configure.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Configure.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Configure.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Configure.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,379 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,shlex,sys,time,re,shutil +from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors +BREAK='break' +CONTINUE='continue' +WAF_CONFIG_LOG='config.log' +autoconfig=False +conf_template='''# project %(app)s configured on %(now)s by +# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) +# using %(args)s +#''' +class ConfigurationContext(Context.Context): + '''configures the project''' + cmd='configure' + error_handlers=[] + def __init__(self,**kw): + super(ConfigurationContext,self).__init__(**kw) + self.environ=dict(os.environ) + self.all_envs={} + self.top_dir=None + self.out_dir=None + self.tools=[] + self.hash=0 + self.files=[] + self.tool_cache=[] + self.setenv('') + def setenv(self,name,env=None): + if name not in self.all_envs or env: + if not env: + env=ConfigSet.ConfigSet() + self.prepare_env(env) + else: + env=env.derive() + self.all_envs[name]=env + self.variant=name + def get_env(self): + return self.all_envs[self.variant] + def set_env(self,val): + self.all_envs[self.variant]=val + env=property(get_env,set_env) + def init_dirs(self): + top=self.top_dir + if not top: + top=Options.options.top + if not top: + top=getattr(Context.g_module,Context.TOP,None) + if not top: + top=self.path.abspath() + top=os.path.abspath(top) + self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top) + assert(self.srcnode) + out=self.out_dir + if not out: + out=Options.options.out + if not out: + out=getattr(Context.g_module,Context.OUT,None) + if not out: + out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','') + out=os.path.realpath(out) + self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out) + self.bldnode.mkdir() + if not os.path.isdir(self.bldnode.abspath()): + conf.fatal('Could not create the build directory %s'%self.bldnode.abspath()) + def execute(self): + self.init_dirs() + self.cachedir=self.bldnode.make_node(Build.CACHE_DIR) + self.cachedir.mkdir() + path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG) + self.logger=Logs.make_logger(path,'cfg') + app=getattr(Context.g_module,'APPNAME','') + if app: + ver=getattr(Context.g_module,'VERSION','') + if ver: + app="%s (%s)"%(app,ver) + params={'now':time.ctime(),'pyver':sys.hexversion,'systype':sys.platform,'args':" ".join(sys.argv),'wafver':Context.WAFVERSION,'abi':Context.ABI,'app':app} + self.to_log(conf_template%params) + self.msg('Setting top to',self.srcnode.abspath()) + self.msg('Setting out to',self.bldnode.abspath()) + if id(self.srcnode)==id(self.bldnode): + Logs.warn('Setting top == out (remember to use "update_outputs")') + elif id(self.path)!=id(self.srcnode): + if self.srcnode.is_child_of(self.path): + Logs.warn('Are you certain that you do not want to set top="." ?') + super(ConfigurationContext,self).execute() + self.store() + Context.top_dir=self.srcnode.abspath() + Context.out_dir=self.bldnode.abspath() + env=ConfigSet.ConfigSet() + env['argv']=sys.argv + env['options']=Options.options.__dict__ + env.run_dir=Context.run_dir + env.top_dir=Context.top_dir + env.out_dir=Context.out_dir + env['hash']=self.hash + env['files']=self.files + env['environ']=dict(self.environ) + if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options,'no_lock_in_run'): + env.store(os.path.join(Context.run_dir,Options.lockfile)) + if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options,'no_lock_in_top'): + env.store(os.path.join(Context.top_dir,Options.lockfile)) + if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options,'no_lock_in_out'): + env.store(os.path.join(Context.out_dir,Options.lockfile)) + def prepare_env(self,env): + if not env.PREFIX: + if Options.options.prefix or Utils.is_win32: + env.PREFIX=Utils.sane_path(Options.options.prefix) + else: + env.PREFIX='' + if not env.BINDIR: + if Options.options.bindir: + env.BINDIR=Utils.sane_path(Options.options.bindir) + else: + env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env) + if not env.LIBDIR: + if Options.options.libdir: + env.LIBDIR=Utils.sane_path(Options.options.libdir) + else: + env.LIBDIR=Utils.subst_vars('${PREFIX}/lib%s'%Utils.lib64(),env) + def store(self): + n=self.cachedir.make_node('build.config.py') + n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools)) + if not self.all_envs: + self.fatal('nothing to store in the configuration context!') + for key in self.all_envs: + tmpenv=self.all_envs[key] + tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX)) + def load(self,input,tooldir=None,funs=None,with_sys_path=True): + tools=Utils.to_list(input) + if tooldir:tooldir=Utils.to_list(tooldir) + for tool in tools: + mag=(tool,id(self.env),tooldir,funs) + if mag in self.tool_cache: + self.to_log('(tool %s is already loaded, skipping)'%tool) + continue + self.tool_cache.append(mag) + module=None + try: + module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path) + except ImportError ,e: + self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,sys.path,e)) + except Exception ,e: + self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs)) + self.to_log(Utils.ex_stack()) + raise + if funs is not None: + self.eval_rules(funs) + else: + func=getattr(module,'configure',None) + if func: + if type(func)is type(Utils.readf):func(self) + else:self.eval_rules(func) + self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs}) + def post_recurse(self,node): + super(ConfigurationContext,self).post_recurse(node) + self.hash=Utils.h_list((self.hash,node.read('rb'))) + self.files.append(node.abspath()) + def eval_rules(self,rules): + self.rules=Utils.to_list(rules) + for x in self.rules: + f=getattr(self,x) + if not f:self.fatal("No such method '%s'."%x) + try: + f() + except Exception ,e: + ret=self.err_handler(x,e) + if ret==BREAK: + break + elif ret==CONTINUE: + continue + else: + raise + def err_handler(self,fun,error): + pass +def conf(f): + def fun(*k,**kw): + mandatory=True + if'mandatory'in kw: + mandatory=kw['mandatory'] + del kw['mandatory'] + try: + return f(*k,**kw) + except Errors.ConfigurationError: + if mandatory: + raise + fun.__name__=f.__name__ + setattr(ConfigurationContext,f.__name__,fun) + setattr(Build.BuildContext,f.__name__,fun) + return f +@conf +def add_os_flags(self,var,dest=None,dup=True): + try: + flags=shlex.split(self.environ[var]) + except KeyError: + return + if dup or''.join(flags)not in''.join(Utils.to_list(self.env[dest or var])): + self.env.append_value(dest or var,flags) +@conf +def cmd_to_list(self,cmd): + if isinstance(cmd,str)and cmd.find(' '): + try: + os.stat(cmd) + except OSError: + return shlex.split(cmd) + else: + return[cmd] + return cmd +@conf +def check_waf_version(self,mini='1.7.99',maxi='1.9.0',**kw): + self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)),**kw) + ver=Context.HEXVERSION + if Utils.num2ver(mini)>ver: + self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver)) + if Utils.num2ver(maxi) %r'%(filename,path_list,var,ret)) + if not ret: + self.fatal(kw.get('errmsg','')or'Could not find the program %r'%filename) + interpreter=kw.get('interpreter',None) + if interpreter is None: + if not Utils.check_exe(ret[0],env=environ): + self.fatal('Program %r is not executable'%ret) + self.env[var]=ret + else: + self.env[var]=self.env[interpreter]+ret + return ret +@conf +def find_binary(self,filenames,exts,paths): + for f in filenames: + for ext in exts: + exe_name=f+ext + if os.path.isabs(exe_name): + if os.path.isfile(exe_name): + return exe_name + else: + for path in paths: + x=os.path.expanduser(os.path.join(path,exe_name)) + if os.path.isfile(x): + return x + return None +@conf +def run_build(self,*k,**kw): + lst=[str(v)for(p,v)in kw.items()if p!='env'] + h=Utils.h_list(lst) + dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h) + try: + os.makedirs(dir) + except OSError: + pass + try: + os.stat(dir) + except OSError: + self.fatal('cannot use the configuration test folder %r'%dir) + cachemode=getattr(Options.options,'confcache',None) + if cachemode==1: + try: + proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build')) + except OSError: + pass + except IOError: + pass + else: + ret=proj['cache_run_build'] + if isinstance(ret,str)and ret.startswith('Test does not build'): + self.fatal(ret) + return ret + bdir=os.path.join(dir,'testbuild') + if not os.path.exists(bdir): + os.makedirs(bdir) + self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir) + bld.init_dirs() + bld.progress_bar=0 + bld.targets='*' + bld.logger=self.logger + bld.all_envs.update(self.all_envs) + bld.env=kw['env'] + bld.kw=kw + bld.conf=self + kw['build_fun'](bld) + ret=-1 + try: + try: + bld.compile() + except Errors.WafError: + ret='Test does not build: %s'%Utils.ex_stack() + self.fatal(ret) + else: + ret=getattr(bld,'retval',0) + finally: + if cachemode==1: + proj=ConfigSet.ConfigSet() + proj['cache_run_build']=ret + proj.store(os.path.join(dir,'cache_run_build')) + else: + shutil.rmtree(dir) + return ret +@conf +def ret_msg(self,msg,args): + if isinstance(msg,str): + return msg + return msg(args) +@conf +def test(self,*k,**kw): + if not'env'in kw: + kw['env']=self.env.derive() + if kw.get('validate',None): + kw['validate'](kw) + self.start_msg(kw['msg'],**kw) + ret=None + try: + ret=self.run_build(*k,**kw) + except self.errors.ConfigurationError: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + kw['success']=ret + if kw.get('post_check',None): + ret=kw['post_check'](kw) + if ret: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + self.fatal('The configuration failed %r'%ret) + else: + self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + return ret diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Context.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Context.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Context.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Context.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,394 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,imp,sys +from waflib import Utils,Errors,Logs +import waflib.Node +HEXVERSION=0x1081600 +WAFVERSION="1.8.22" +WAFREVISION="17d4d4faa52c454eb3580e482df69b2a80e19fa7" +ABI=98 +DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI) +APPNAME='APPNAME' +VERSION='VERSION' +TOP='top' +OUT='out' +WSCRIPT_FILE='wscript' +launch_dir='' +run_dir='' +top_dir='' +out_dir='' +waf_dir='' +local_repo='' +remote_repo='https://raw.githubusercontent.com/waf-project/waf/master/' +remote_locs=['waflib/extras','waflib/Tools'] +g_module=None +STDOUT=1 +STDERR=-1 +BOTH=0 +classes=[] +def create_context(cmd_name,*k,**kw): + global classes + for x in classes: + if x.cmd==cmd_name: + return x(*k,**kw) + ctx=Context(*k,**kw) + ctx.fun=cmd_name + return ctx +class store_context(type): + def __init__(cls,name,bases,dict): + super(store_context,cls).__init__(name,bases,dict) + name=cls.__name__ + if name=='ctx'or name=='Context': + return + try: + cls.cmd + except AttributeError: + raise Errors.WafError('Missing command for the context class %r (cmd)'%name) + if not getattr(cls,'fun',None): + cls.fun=cls.cmd + global classes + classes.insert(0,cls) +ctx=store_context('ctx',(object,),{}) +class Context(ctx): + errors=Errors + tools={} + def __init__(self,**kw): + try: + rd=kw['run_dir'] + except KeyError: + global run_dir + rd=run_dir + self.node_class=type("Nod3",(waflib.Node.Node,),{}) + self.node_class.__module__="waflib.Node" + self.node_class.ctx=self + self.root=self.node_class('',None) + self.cur_script=None + self.path=self.root.find_dir(rd) + self.stack_path=[] + self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self} + self.logger=None + def __hash__(self): + return id(self) + def finalize(self): + try: + logger=self.logger + except AttributeError: + pass + else: + Logs.free_logger(logger) + delattr(self,'logger') + def load(self,tool_list,*k,**kw): + tools=Utils.to_list(tool_list) + path=Utils.to_list(kw.get('tooldir','')) + with_sys_path=kw.get('with_sys_path',True) + for t in tools: + module=load_tool(t,path,with_sys_path=with_sys_path) + fun=getattr(module,kw.get('name',self.fun),None) + if fun: + fun(self) + def execute(self): + global g_module + self.recurse([os.path.dirname(g_module.root_path)]) + def pre_recurse(self,node): + self.stack_path.append(self.cur_script) + self.cur_script=node + self.path=node.parent + def post_recurse(self,node): + self.cur_script=self.stack_path.pop() + if self.cur_script: + self.path=self.cur_script.parent + def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None): + try: + cache=self.recurse_cache + except AttributeError: + cache=self.recurse_cache={} + for d in Utils.to_list(dirs): + if not os.path.isabs(d): + d=os.path.join(self.path.abspath(),d) + WSCRIPT=os.path.join(d,WSCRIPT_FILE) + WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun) + node=self.root.find_node(WSCRIPT_FUN) + if node and(not once or node not in cache): + cache[node]=True + self.pre_recurse(node) + try: + function_code=node.read('rU',encoding) + exec(compile(function_code,node.abspath(),'exec'),self.exec_dict) + finally: + self.post_recurse(node) + elif not node: + node=self.root.find_node(WSCRIPT) + tup=(node,name or self.fun) + if node and(not once or tup not in cache): + cache[tup]=True + self.pre_recurse(node) + try: + wscript_module=load_module(node.abspath(),encoding=encoding) + user_function=getattr(wscript_module,(name or self.fun),None) + if not user_function: + if not mandatory: + continue + raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath())) + user_function(self) + finally: + self.post_recurse(node) + elif not node: + if not mandatory: + continue + try: + os.listdir(d) + except OSError: + raise Errors.WafError('Cannot read the folder %r'%d) + raise Errors.WafError('No wscript file in directory %s'%d) + def exec_command(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%(cmd,)) + Logs.debug('runner_env: kw=%s'%kw) + if self.logger: + self.logger.info(cmd) + if'stdout'not in kw: + kw['stdout']=subprocess.PIPE + if'stderr'not in kw: + kw['stderr']=subprocess.PIPE + if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): + raise Errors.WafError("Program %s not found!"%cmd[0]) + wargs={} + if'timeout'in kw: + if kw['timeout']is not None: + wargs['timeout']=kw['timeout'] + del kw['timeout'] + if'input'in kw: + if kw['input']: + wargs['input']=kw['input'] + kw['stdin']=subprocess.PIPE + del kw['input'] + try: + if kw['stdout']or kw['stderr']: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate(**wargs) + ret=p.returncode + else: + out,err=(None,None) + ret=subprocess.Popen(cmd,**kw).wait(**wargs) + except Exception ,e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if out: + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.debug('out: %s'%out) + else: + Logs.info(out,extra={'stream':sys.stdout,'c1':''}) + if err: + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.error('err: %s'%err) + else: + Logs.info(err,extra={'stream':sys.stderr,'c1':''}) + return ret + def cmd_and_log(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%(cmd,)) + if'quiet'in kw: + quiet=kw['quiet'] + del kw['quiet'] + else: + quiet=None + if'output'in kw: + to_ret=kw['output'] + del kw['output'] + else: + to_ret=STDOUT + if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): + raise Errors.WafError("Program %s not found!"%cmd[0]) + kw['stdout']=kw['stderr']=subprocess.PIPE + if quiet is None: + self.to_log(cmd) + wargs={} + if'timeout'in kw: + if kw['timeout']is not None: + wargs['timeout']=kw['timeout'] + del kw['timeout'] + if'input'in kw: + if kw['input']: + wargs['input']=kw['input'] + kw['stdin']=subprocess.PIPE + del kw['input'] + try: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate(**wargs) + except Exception ,e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if out and quiet!=STDOUT and quiet!=BOTH: + self.to_log('out: %s'%out) + if err and quiet!=STDERR and quiet!=BOTH: + self.to_log('err: %s'%err) + if p.returncode: + e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode)) + e.returncode=p.returncode + e.stderr=err + e.stdout=out + raise e + if to_ret==BOTH: + return(out,err) + elif to_ret==STDERR: + return err + return out + def fatal(self,msg,ex=None): + if self.logger: + self.logger.info('from %s: %s'%(self.path.abspath(),msg)) + try: + msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename) + except Exception: + pass + raise self.errors.ConfigurationError(msg,ex=ex) + def to_log(self,msg): + if not msg: + return + if self.logger: + self.logger.info(msg) + else: + sys.stderr.write(str(msg)) + sys.stderr.flush() + def msg(self,*k,**kw): + try: + msg=kw['msg'] + except KeyError: + msg=k[0] + self.start_msg(msg,**kw) + try: + result=kw['result'] + except KeyError: + result=k[1] + color=kw.get('color',None) + if not isinstance(color,str): + color=result and'GREEN'or'YELLOW' + self.end_msg(result,color,**kw) + def start_msg(self,*k,**kw): + if kw.get('quiet',None): + return + msg=kw.get('msg',None)or k[0] + try: + if self.in_msg: + self.in_msg+=1 + return + except AttributeError: + self.in_msg=0 + self.in_msg+=1 + try: + self.line_just=max(self.line_just,len(msg)) + except AttributeError: + self.line_just=max(40,len(msg)) + for x in(self.line_just*'-',msg): + self.to_log(x) + Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='') + def end_msg(self,*k,**kw): + if kw.get('quiet',None): + return + self.in_msg-=1 + if self.in_msg: + return + result=kw.get('result',None)or k[0] + defcolor='GREEN' + if result==True: + msg='ok' + elif result==False: + msg='not found' + defcolor='YELLOW' + else: + msg=str(result) + self.to_log(msg) + try: + color=kw['color'] + except KeyError: + if len(k)>1 and k[1]in Logs.colors_lst: + color=k[1] + else: + color=defcolor + Logs.pprint(color,msg) + def load_special_tools(self,var,ban=[]): + global waf_dir + if os.path.isdir(waf_dir): + lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) + for x in lst: + if not x.name in ban: + load_tool(x.name.replace('.py','')) + else: + from zipfile import PyZipFile + waflibs=PyZipFile(waf_dir) + lst=waflibs.namelist() + for x in lst: + if not re.match("waflib/extras/%s"%var.replace("*",".*"),var): + continue + f=os.path.basename(x) + doban=False + for b in ban: + r=b.replace("*",".*") + if re.match(r,f): + doban=True + if not doban: + f=f.replace('.py','') + load_tool(f) +cache_modules={} +def load_module(path,encoding=None): + try: + return cache_modules[path] + except KeyError: + pass + module=imp.new_module(WSCRIPT_FILE) + try: + code=Utils.readf(path,m='rU',encoding=encoding) + except EnvironmentError: + raise Errors.WafError('Could not read the file %r'%path) + module_dir=os.path.dirname(path) + sys.path.insert(0,module_dir) + try:exec(compile(code,path,'exec'),module.__dict__) + finally:sys.path.remove(module_dir) + cache_modules[path]=module + return module +def load_tool(tool,tooldir=None,ctx=None,with_sys_path=True): + if tool=='java': + tool='javaw' + else: + tool=tool.replace('++','xx') + origSysPath=sys.path + if not with_sys_path:sys.path=[] + try: + if tooldir: + assert isinstance(tooldir,list) + sys.path=tooldir+sys.path + try: + __import__(tool) + finally: + for d in tooldir: + sys.path.remove(d) + ret=sys.modules[tool] + Context.tools[tool]=ret + return ret + else: + if not with_sys_path:sys.path.insert(0,waf_dir) + try: + for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'): + try: + __import__(x%tool) + break + except ImportError: + x=None + if x is None: + __import__(tool) + finally: + if not with_sys_path:sys.path.remove(waf_dir) + ret=sys.modules[x%tool] + Context.tools[tool]=ret + return ret + finally: + if not with_sys_path:sys.path+=origSysPath diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Errors.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Errors.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Errors.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Errors.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import traceback,sys +class WafError(Exception): + def __init__(self,msg='',ex=None): + self.msg=msg + assert not isinstance(msg,Exception) + self.stack=[] + if ex: + if not msg: + self.msg=str(ex) + if isinstance(ex,WafError): + self.stack=ex.stack + else: + self.stack=traceback.extract_tb(sys.exc_info()[2]) + self.stack+=traceback.extract_stack()[:-1] + self.verbose_msg=''.join(traceback.format_list(self.stack)) + def __str__(self): + return str(self.msg) +class BuildError(WafError): + def __init__(self,error_tasks=[]): + self.tasks=error_tasks + WafError.__init__(self,self.format_error()) + def format_error(self): + lst=['Build failed'] + for tsk in self.tasks: + txt=tsk.format_error() + if txt:lst.append(txt) + return'\n'.join(lst) +class ConfigurationError(WafError): + pass +class TaskRescan(WafError): + pass +class TaskNotReady(WafError): + pass diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/autowaf.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/autowaf.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/autowaf.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/autowaf.py 2019-10-17 11:18:08.000000000 +0000 @@ -0,0 +1,1431 @@ +import glob +import os +import subprocess +import sys +import time + +from waflib import Configure, ConfigSet, Build, Context, Logs, Options, Utils +from waflib.TaskGen import feature, before, after + +NONEMPTY = -10 + +if sys.platform == 'win32': + lib_path_name = 'PATH' +elif sys.platform == 'darwin': + lib_path_name = 'DYLD_LIBRARY_PATH' +else: + lib_path_name = 'LD_LIBRARY_PATH' + +# Compute dependencies globally +# import preproc +# preproc.go_absolute = True + +@feature('c', 'cxx') +@after('apply_incpaths') +def include_config_h(self): + self.env.append_value('INCPATHS', self.bld.bldnode.abspath()) + +class OptionsContext(Options.OptionsContext): + def __init__(self, **kwargs): + super(OptionsContext, self).__init__(**kwargs) + set_options(self) + + def configuration_options(self): + return self.get_option_group('Configuration options') + + def add_flags(self, group, flags): + """Tersely add flags (a dictionary of longname:desc) to a group""" + for name, desc in flags.items(): + group.add_option('--' + name, action='store_true', + dest=name.replace('-', '_'), help=desc) + +def set_options(opt, debug_by_default=False): + "Add standard autowaf options" + opts = opt.get_option_group('Configuration options') + + # Standard directory options + opts.add_option('--bindir', type='string', + help="executable programs [default: PREFIX/bin]") + opts.add_option('--configdir', type='string', + help="configuration data [default: PREFIX/etc]") + opts.add_option('--datadir', type='string', + help="shared data [default: PREFIX/share]") + opts.add_option('--includedir', type='string', + help="header files [default: PREFIX/include]") + opts.add_option('--libdir', type='string', + help="libraries [default: PREFIX/lib]") + opts.add_option('--mandir', type='string', + help="manual pages [default: DATADIR/man]") + opts.add_option('--docdir', type='string', + help="HTML documentation [default: DATADIR/doc]") + + # Build options + if debug_by_default: + opts.add_option('--optimize', action='store_false', default=True, + dest='debug', help="build optimized binaries") + else: + opts.add_option('-d', '--debug', action='store_true', default=False, + dest='debug', help="build debuggable binaries") + opts.add_option('--pardebug', action='store_true', default=False, + dest='pardebug', + help="build debug libraries with D suffix") + + opts.add_option('-s', '--strict', action='store_true', default=False, + dest='strict', + help="use strict compiler flags and show all warnings") + opts.add_option('-S', '--ultra-strict', action='store_true', default=False, + dest='ultra_strict', + help="use extremely strict compiler flags (likely noisy)") + opts.add_option('--docs', action='store_true', default=False, dest='docs', + help="build documentation (requires doxygen)") + opts.add_option('-w', '--werror', action='store_true', dest='werror', + help="Treat warnings as errors") + + # Test options + if hasattr(Context.g_module, 'test'): + test_opts = opt.add_option_group('Test options', '') + opts.add_option('-T', '--test', action='store_true', dest='build_tests', + help='build unit tests') + opts.add_option('--no-coverage', action='store_true', + dest='no_coverage', + help='do not instrument code for test coverage') + test_opts.add_option('--wrapper', type='string', + dest='test_wrapper', + help='command prefix for tests (e.g. valgrind)') + test_opts.add_option('--test-filter', type='string', + dest='test_filter', + help='regular expression for tests to run') + + # Run options + run_opts = opt.add_option_group('Run options') + run_opts.add_option('--cmd', type='string', dest='cmd', + help='command to run from build directory') + +class ConfigureContext(Configure.ConfigurationContext): + """configures the project""" + + def __init__(self, **kwargs): + self.line_just = 45 + if hasattr(Context.g_module, 'line_just'): + self.line_just = Context.g_module.line_just + + super(ConfigureContext, self).__init__(**kwargs) + self.run_env = ConfigSet.ConfigSet() + self.system_include_paths = set() + + def pre_recurse(self, node): + if len(self.stack_path) == 1: + Logs.pprint('BOLD', 'Configuring %s' % node.parent.srcpath()) + super(ConfigureContext, self).pre_recurse(node) + + def store(self): + self.env.AUTOWAF_RUN_ENV = self.run_env.get_merged_dict() + for path in sorted(self.system_include_paths): + if 'COMPILER_CC' in self.env: + self.env.append_value('CFLAGS', ['-isystem', path]) + if 'COMPILER_CXX' in self.env: + self.env.append_value('CXXFLAGS', ['-isystem', path]) + + super(ConfigureContext, self).store() + + def check_pkg(self, *args, **kwargs): + return check_pkg(self, *args, **kwargs) + + def check_function(self, *args, **kwargs): + return check_function(self, *args, **kwargs) + + def build_path(self, path='.'): + """Return `path` within the build directory""" + return str(self.path.get_bld().make_node(path)) + +def get_check_func(conf, lang): + if lang == 'c': + return conf.check_cc + elif lang == 'cxx': + return conf.check_cxx + else: + Logs.error("Unknown header language `%s'" % lang) + +def check_header(conf, lang, name, define='', mandatory=True): + "Check for a header" + check_func = get_check_func(conf, lang) + if define != '': + check_func(header_name=name, + define_name=define, + mandatory=mandatory) + else: + check_func(header_name=name, mandatory=mandatory) + +def check_function(conf, lang, name, **args): + "Check for a function" + header_names = Utils.to_list(args['header_name']) + includes = ''.join(['#include <%s>\n' % x for x in header_names]) + fragment = ''' +%s +int main() { return !(void(*)())(%s); } +''' % (includes, name) + + check_func = get_check_func(conf, lang) + args['msg'] = 'Checking for %s' % name + check_func(fragment=fragment, **args) + +def nameify(name): + return (name.replace('/', '_').replace('++', 'PP') + .replace('-', '_').replace('.', '_')) + +def define(conf, var_name, value): + conf.define(var_name, value) + conf.env[var_name] = value + +def check_pkg(conf, spec, **kwargs): + "Check for a package iff it hasn't been checked for yet" + + if (kwargs['uselib_store'].lower() in conf.env['AUTOWAF_LOCAL_LIBS'] or + kwargs['uselib_store'].lower() in conf.env['AUTOWAF_LOCAL_HEADERS']): + return + + import re + match = re.match('([^ ]*) >= [0-9\.]*', spec) + args = [] + if match: + name = match.group(1) + args = [spec] + elif spec.find(' ') == -1: + name = spec + else: + Logs.error("Invalid package spec: %s" % spec) + + found = None + pkg_var_name = 'PKG_' + name.replace('-', '_') + pkg_name = name + args += kwargs.get('args', []) + + if conf.env.PARDEBUG: + kwargs['mandatory'] = False # Smash mandatory arg + found = conf.check_cfg(package=pkg_name + 'D', + args=args + ['--cflags', '--libs']) + if found: + pkg_name += 'D' + + args['mandatory'] = mandatory # Unsmash mandatory arg + + if not found: + found = conf.check_cfg(package=spec, + args=args + ['--cflags', '--libs'], + **kwargs) + + if not conf.env.MSVC_COMPILER and 'system' in args and args['system']: + conf.system_include_paths.update( + conf.env['INCLUDES_' + nameify(args['uselib_store'])]) + +def normpath(path): + if sys.platform == 'win32': + return os.path.normpath(path).replace('\\', '/') + else: + return os.path.normpath(path) + +def configure(conf): + def append_cxx_flags(flags): + conf.env.append_value('CFLAGS', flags) + conf.env.append_value('CXXFLAGS', flags) + + if Options.options.docs: + conf.load('doxygen') + + try: + conf.load('clang_compilation_database') + except Exception: + pass + + prefix = normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX']))) + + conf.env['DOCS'] = Options.options.docs and conf.env.DOXYGEN + conf.env['DEBUG'] = Options.options.debug or Options.options.pardebug + conf.env['PARDEBUG'] = Options.options.pardebug + conf.env['PREFIX'] = prefix + + def config_dir(var, opt, default): + if opt: + conf.env[var] = normpath(opt) + else: + conf.env[var] = normpath(default) + + opts = Options.options + + config_dir('BINDIR', opts.bindir, os.path.join(prefix, 'bin')) + config_dir('SYSCONFDIR', opts.configdir, os.path.join(prefix, 'etc')) + config_dir('DATADIR', opts.datadir, os.path.join(prefix, 'share')) + config_dir('INCLUDEDIR', opts.includedir, os.path.join(prefix, 'include')) + config_dir('LIBDIR', opts.libdir, os.path.join(prefix, 'lib')) + + datadir = conf.env['DATADIR'] + config_dir('MANDIR', opts.mandir, os.path.join(datadir, 'man')) + config_dir('DOCDIR', opts.docdir, os.path.join(datadir, 'doc')) + + if Options.options.debug: + if conf.env['MSVC_COMPILER']: + conf.env['CFLAGS'] = ['/Od', '/Z7', '/MTd', '/FS'] + conf.env['CXXFLAGS'] = ['/Od', '/Z7', '/MTd', '/FS'] + conf.env['LINKFLAGS'] = ['/DEBUG', '/MANIFEST'] + else: + conf.env['CFLAGS'] = ['-O0', '-g'] + conf.env['CXXFLAGS'] = ['-O0', '-g'] + else: + if conf.env['MSVC_COMPILER']: + append_cxx_flags(['/MD', '/FS', '/DNDEBUG']) + else: + append_cxx_flags(['-DNDEBUG']) + + if conf.env.MSVC_COMPILER: + Options.options.no_coverage = True + append_cxx_flags(['/nologo', + '/FS', + '/DNDEBUG', + '/D_CRT_SECURE_NO_WARNINGS', + '/experimental:external', + '/external:W0', + '/external:anglebrackets']) + conf.env.append_value('LINKFLAGS', '/nologo') + if Options.options.strict or Options.options.ultra_strict: + ms_strict_flags = ['/Wall', + '/wd4061', + '/wd4200', + '/wd4514', + '/wd4571', + '/wd4625', + '/wd4626', + '/wd4706', + '/wd4710', + '/wd4820', + '/wd5026', + '/wd5027', + '/wd5045'] + conf.env.append_value('CFLAGS', ms_strict_flags) + conf.env.append_value('CXXFLAGS', ms_strict_flags) + conf.env.append_value('CXXFLAGS', ['/EHsc']) + else: + if Options.options.ultra_strict: + Options.options.strict = True + conf.env.append_value('CFLAGS', ['-Wredundant-decls', + '-Wstrict-prototypes', + '-Wmissing-prototypes', + '-Wcast-qual']) + conf.env.append_value('CXXFLAGS', ['-Wcast-qual']) + + if Options.options.strict: + conf.env.append_value('CFLAGS', ['-pedantic', '-Wshadow']) + if conf.env.DEST_OS != "darwin": + conf.env.append_value('LINKFLAGS', ['-Wl,--no-undefined']) + conf.env.append_value('CXXFLAGS', ['-Wnon-virtual-dtor', + '-Woverloaded-virtual']) + append_cxx_flags(['-Wall', + '-Wcast-align', + '-Wextra', + '-Wmissing-declarations', + '-Wno-unused-parameter', + '-Wno-parentheses', + '-Wstrict-overflow', + '-Wundef', + '-Wwrite-strings', + '-fstrict-overflow']) + + # Add less universal flags after checking they work + extra_flags = ['-Wlogical-op', + '-Wsuggest-attribute=noreturn', + '-Wunsafe-loop-optimizations'] + if conf.check_cc(cflags=['-Werror'] + extra_flags, mandatory=False, + msg="Checking for extra C warning flags"): + conf.env.append_value('CFLAGS', extra_flags) + if 'COMPILER_CXX' in conf.env: + if conf.check_cxx(cxxflags=['-Werror'] + extra_flags, + mandatory=False, + msg="Checking for extra C++ warning flags"): + conf.env.append_value('CXXFLAGS', extra_flags) + + if not conf.env['MSVC_COMPILER']: + append_cxx_flags(['-fshow-column']) + + if Options.options.werror: + if conf.env.MSVC_COMPILER: + append_cxx_flags('/WX') + else: + append_cxx_flags('-Werror') + + conf.env.NO_COVERAGE = True + conf.env.BUILD_TESTS = False + try: + conf.env.BUILD_TESTS = Options.options.build_tests + conf.env.NO_COVERAGE = Options.options.no_coverage + if not Options.options.no_coverage: + # Set up unit test code coverage + if conf.is_defined('CLANG'): + for cov in [conf.env.CC[0].replace('clang', 'llvm-cov'), + 'llvm-cov']: + if conf.find_program(cov, var='LLVM_COV', mandatory=False): + break + else: + conf.check_cc(lib='gcov', define_name='HAVE_GCOV', + mandatory=False) + except Exception: + pass # Test options do not exist + + # Define version in configuration + appname = getattr(Context.g_module, Context.APPNAME, 'noname') + version = getattr(Context.g_module, Context.VERSION, '0.0.0') + defname = appname.upper().replace('-', '_').replace('.', '_') + define(conf, defname + '_VERSION', version) + + conf.env.prepend_value('CFLAGS', '-I' + os.path.abspath('.')) + conf.env.prepend_value('CXXFLAGS', '-I' + os.path.abspath('.')) + +def display_summary(conf, msgs=None): + if len(conf.stack_path) == 1: + display_msg(conf, "Install prefix", conf.env['PREFIX']) + if 'COMPILER_CC' in conf.env: + display_msg(conf, "C Flags", ' '.join(conf.env['CFLAGS'])) + if 'COMPILER_CXX' in conf.env: + display_msg(conf, "C++ Flags", ' '.join(conf.env['CXXFLAGS'])) + display_msg(conf, "Debuggable", bool(conf.env['DEBUG'])) + display_msg(conf, "Build documentation", bool(conf.env['DOCS'])) + + if msgs is not None: + display_msgs(conf, msgs) + +def set_c_lang(conf, lang): + "Set a specific C language standard, like 'c99' or 'c11'" + if conf.env.MSVC_COMPILER: + # MSVC has no hope or desire to compile C99, just compile as C++ + conf.env.append_unique('CFLAGS', ['/TP']) + else: + flag = '-std=%s' % lang + conf.check(cflags=['-Werror', flag], + msg="Checking for flag '%s'" % flag) + conf.env.append_unique('CFLAGS', [flag]) + +def set_cxx_lang(conf, lang): + "Set a specific C++ language standard, like 'c++11', 'c++14', or 'c++17'" + if conf.env.MSVC_COMPILER: + if lang != 'c++14': + lang = 'c++latest' + conf.env.append_unique('CXXFLAGS', ['/std:%s' % lang]) + else: + flag = '-std=%s' % lang + conf.check(cxxflags=['-Werror', flag], + msg="Checking for flag '%s'" % flag) + conf.env.append_unique('CXXFLAGS', [flag]) + +def set_modern_c_flags(conf): + "Use the most modern C language available" + if 'COMPILER_CC' in conf.env: + if conf.env.MSVC_COMPILER: + # MSVC has no hope or desire to compile C99, just compile as C++ + conf.env.append_unique('CFLAGS', ['/TP']) + else: + for flag in ['-std=c11', '-std=c99']: + if conf.check(cflags=['-Werror', flag], mandatory=False, + msg="Checking for flag '%s'" % flag): + conf.env.append_unique('CFLAGS', [flag]) + break + +def set_modern_cxx_flags(conf, mandatory=False): + "Use the most modern C++ language available" + if 'COMPILER_CXX' in conf.env: + if conf.env.MSVC_COMPILER: + conf.env.append_unique('CXXFLAGS', ['/std:c++latest']) + else: + for lang in ['c++14', 'c++1y', 'c++11', 'c++0x']: + flag = '-std=%s' % lang + if conf.check(cxxflags=['-Werror', flag], mandatory=False, + msg="Checking for flag '%s'" % flag): + conf.env.append_unique('CXXFLAGS', [flag]) + break + +def set_local_lib(conf, name, has_objects): + var_name = 'HAVE_' + nameify(name.upper()) + define(conf, var_name, 1) + if has_objects: + if type(conf.env['AUTOWAF_LOCAL_LIBS']) != dict: + conf.env['AUTOWAF_LOCAL_LIBS'] = {} + conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()] = True + else: + if type(conf.env['AUTOWAF_LOCAL_HEADERS']) != dict: + conf.env['AUTOWAF_LOCAL_HEADERS'] = {} + conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()] = True + +def append_property(obj, key, val): + if hasattr(obj, key): + setattr(obj, key, getattr(obj, key) + val) + else: + setattr(obj, key, val) + +@feature('c', 'cxx') +@before('apply_link') +def version_lib(self): + if self.env.DEST_OS == 'win32': + self.vnum = None # Prevent waf from automatically appending -0 + if self.env['PARDEBUG']: + applicable = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib'] + if [x for x in applicable if x in self.features]: + self.target = self.target + 'D' + +def set_lib_env(conf, + name, + version, + has_objects=True, + include_path=None, + lib_path=None): + "Set up environment for local library as if found via pkg-config." + NAME = name.upper() + major_ver = version.split('.')[0] + pkg_var_name = 'PKG_' + name.replace('-', '_') + '_' + major_ver + lib_name = '%s-%s' % (name, major_ver) + + if lib_path is None: + lib_path = str(conf.path.get_bld()) + + if include_path is None: + include_path = str(conf.path) + + if conf.env.PARDEBUG: + lib_name += 'D' + + conf.env[pkg_var_name] = lib_name + conf.env['INCLUDES_' + NAME] = [include_path] + conf.env['LIBPATH_' + NAME] = [lib_path] + if has_objects: + conf.env['LIB_' + NAME] = [lib_name] + + conf.run_env.append_unique(lib_path_name, [lib_path]) + conf.define(NAME + '_VERSION', version) + +def display_msg(conf, msg, status=None, color=None): + color = 'CYAN' + if type(status) == bool and status: + color = 'GREEN' + status = 'yes' + elif type(status) == bool and not status or status == "False": + color = 'YELLOW' + status = 'no' + Logs.pprint('BOLD', '%s' % msg.ljust(conf.line_just), sep='') + Logs.pprint('BOLD', ":", sep='') + Logs.pprint(color, status) + +def display_msgs(conf, msgs): + for k, v in msgs.items(): + display_msg(conf, k, v) + +def link_flags(env, lib): + return ' '.join(map(lambda x: env['LIB_ST'] % x, + env['LIB_' + lib])) + +def compile_flags(env, lib): + return ' '.join(map(lambda x: env['CPPPATH_ST'] % x, + env['INCLUDES_' + lib])) + +def build_pc(bld, name, version, version_suffix, libs, subst_dict={}): + """Build a pkg-config file for a library. + + name -- uppercase variable name (e.g. 'SOMENAME') + version -- version string (e.g. '1.2.3') + version_suffix -- name version suffix (e.g. '2') + libs -- string/list of dependencies (e.g. 'LIBFOO GLIB') + """ + + pkg_prefix = bld.env['PREFIX'] + if len(pkg_prefix) > 1 and pkg_prefix[-1] == '/': + pkg_prefix = pkg_prefix[:-1] + + target = name.lower() + if version_suffix != '': + target += '-' + version_suffix + + if bld.env['PARDEBUG']: + target += 'D' + + target += '.pc' + + libdir = bld.env['LIBDIR'] + if libdir.startswith(pkg_prefix): + libdir = libdir.replace(pkg_prefix, '${exec_prefix}') + + includedir = bld.env['INCLUDEDIR'] + if includedir.startswith(pkg_prefix): + includedir = includedir.replace(pkg_prefix, '${prefix}') + + obj = bld(features='subst', + source='%s.pc.in' % name.lower(), + target=target, + install_path=os.path.join(bld.env['LIBDIR'], 'pkgconfig'), + exec_prefix='${prefix}', + PREFIX=pkg_prefix, + EXEC_PREFIX='${prefix}', + LIBDIR=libdir, + INCLUDEDIR=includedir) + + if type(libs) != list: + libs = libs.split() + + subst_dict[name + '_VERSION'] = version + subst_dict[name + '_MAJOR_VERSION'] = version[0:version.find('.')] + for i in libs: + subst_dict[i + '_LIBS'] = link_flags(bld.env, i) + lib_cflags = compile_flags(bld.env, i) + if lib_cflags == '': + lib_cflags = ' ' + subst_dict[i + '_CFLAGS'] = lib_cflags + + obj.__dict__.update(subst_dict) + +def make_simple_dox(name): + "Clean up messy Doxygen documentation after it is built" + name = name.lower() + NAME = name.upper() + try: + top = os.getcwd() + os.chdir(build_dir(name, 'doc/html')) + page = 'group__%s.html' % name + if not os.path.exists(page): + return + for i in [ + ['%s_API ' % NAME, ''], + ['%s_DEPRECATED ' % NAME, ''], + ['group__%s.html' % name, ''], + [' ', ''], + [r'<\/script>', ''], + [r'<\/a>

.*<\/h2>', ''], + [r'', + ''], + [r'\"doxygen\"\/', + 'Doxygen']]: + os.system("sed -i 's/%s/%s/g' %s" % (i[0], i[1], page)) + os.rename('group__%s.html' % name, 'index.html') + for i in (glob.glob('*.png') + + glob.glob('*.html') + + glob.glob('*.js') + + glob.glob('*.css')): + if i != 'index.html' and i != 'style.css': + os.remove(i) + os.chdir(top) + os.chdir(build_dir(name, 'doc/man/man3')) + for i in glob.glob('*.3'): + os.system("sed -i 's/%s_API //' %s" % (NAME, i)) + for i in glob.glob('_*'): + os.remove(i) + os.chdir(top) + except Exception as e: + Logs.error("Failed to fix up %s documentation: %s" % (name, e)) + finally: + os.chdir(top) + +def build_dox(bld, name, version, srcdir, blddir, outdir='', versioned=True): + """Build Doxygen API documentation""" + if not bld.env['DOCS']: + return + + # Doxygen paths in are relative to the doxygen file + src_dir = bld.path.srcpath() + subst_tg = bld(features='subst', + source='doc/reference.doxygen.in', + target='doc/reference.doxygen', + install_path='', + name='doxyfile') + + subst_dict = { + name + '_VERSION': version, + name + '_SRCDIR': os.path.abspath(src_dir), + name + '_DOC_DIR': '' + } + + subst_tg.__dict__.update(subst_dict) + + subst_tg.post() + + docs = bld(features='doxygen', + doxyfile='doc/reference.doxygen') + + docs.post() + + outname = name.lower() + if versioned: + outname += '-%d' % int(version[0:version.find('.')]) + bld.install_files( + os.path.join('${DOCDIR}', outname, outdir, 'html'), + bld.path.get_bld().ant_glob('doc/html/*')) + for i in range(1, 8): + bld.install_files('${MANDIR}/man%d' % i, + bld.path.get_bld().ant_glob('doc/man/man%d/*' % i, + excl='**/_*')) + + +def build_version_files(header_path, source_path, domain, major, minor, micro): + """Generate version code header""" + header_path = os.path.abspath(header_path) + source_path = os.path.abspath(source_path) + text = "int " + domain + "_major_version = " + str(major) + ";\n" + text += "int " + domain + "_minor_version = " + str(minor) + ";\n" + text += "int " + domain + "_micro_version = " + str(micro) + ";\n" + try: + o = open(source_path, 'w') + o.write(text) + o.close() + except IOError: + Logs.error('Failed to open %s for writing\n' % source_path) + sys.exit(-1) + + text = "#ifndef __" + domain + "_version_h__\n" + text += "#define __" + domain + "_version_h__\n" + text += "extern const char* " + domain + "_revision;\n" + text += "extern int " + domain + "_major_version;\n" + text += "extern int " + domain + "_minor_version;\n" + text += "extern int " + domain + "_micro_version;\n" + text += "#endif /* __" + domain + "_version_h__ */\n" + try: + o = open(header_path, 'w') + o.write(text) + o.close() + except IOError: + Logs.warn('Failed to open %s for writing\n' % header_path) + sys.exit(-1) + + return None + +def build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder=None): + Logs.info('Generating pot file from %s' % name) + pot_file = '%s.pot' % name + + cmd = ['xgettext', + '--keyword=_', + '--keyword=N_', + '--keyword=S_', + '--from-code=UTF-8', + '-o', pot_file] + + if copyright_holder: + cmd += ['--copyright-holder="%s"' % copyright_holder] + + cmd += sources + Logs.info('Updating ' + pot_file) + subprocess.call(cmd, cwd=os.path.join(srcdir, dir)) + +def build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder=None): + pwd = os.getcwd() + os.chdir(os.path.join(srcdir, dir)) + pot_file = '%s.pot' % name + po_files = glob.glob('po/*.po') + for po_file in po_files: + cmd = ['msgmerge', + '--update', + po_file, + pot_file] + Logs.info('Updating ' + po_file) + subprocess.call(cmd) + os.chdir(pwd) + +def build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder=None): + pwd = os.getcwd() + os.chdir(os.path.join(srcdir, dir)) + po_files = glob.glob('po/*.po') + for po_file in po_files: + mo_file = po_file.replace('.po', '.mo') + cmd = ['msgfmt', + '-c', + '-f', + '-o', + mo_file, + po_file] + Logs.info('Generating ' + po_file) + subprocess.call(cmd) + os.chdir(pwd) + +def build_i18n(bld, srcdir, dir, name, sources, copyright_holder=None): + build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder) + build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder) + build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder) + +class ExecutionEnvironment: + """Context that sets system environment variables for program execution""" + def __init__(self, changes): + self.original_environ = os.environ.copy() + + self.diff = {} + for path_name, paths in changes.items(): + value = os.pathsep.join(paths) + if path_name in os.environ: + value += os.pathsep + os.environ[path_name] + + self.diff[path_name] = value + + os.environ.update(self.diff) + + def __str__(self): + return '\n'.join({'%s="%s"' % (k, v) for k, v in self.diff.items()}) + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + os.environ = self.original_environ + +class RunContext(Build.BuildContext): + "runs an executable from the build directory" + cmd = 'run' + + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + + with ExecutionEnvironment(self.env.AUTOWAF_RUN_ENV) as env: + if Options.options.verbose: + Logs.pprint('GREEN', str(env) + '\n') + + if Options.options.cmd: + Logs.pprint('GREEN', 'Running %s' % Options.options.cmd) + subprocess.call(Options.options.cmd, shell=True) + else: + Logs.error("error: Missing --cmd option for run command") + +def show_diff(from_lines, to_lines, from_filename, to_filename): + import difflib + import sys + + same = True + for line in difflib.unified_diff( + from_lines, to_lines, + fromfile=os.path.abspath(from_filename), + tofile=os.path.abspath(to_filename)): + sys.stderr.write(line) + same = False + + return same + +def test_file_equals(patha, pathb): + import filecmp + import io + + for path in (patha, pathb): + if not os.access(path, os.F_OK): + Logs.pprint('RED', 'error: missing file %s' % path) + return False + + if filecmp.cmp(patha, pathb, shallow=False): + return True + + with io.open(patha, 'rU', encoding='utf-8') as fa: + with io.open(pathb, 'rU', encoding='utf-8') as fb: + return show_diff(fa.readlines(), fb.readlines(), patha, pathb) + +def bench_time(): + if hasattr(time, 'perf_counter'): # Added in Python 3.3 + return time.perf_counter() + else: + return time.time() + +class TestOutput: + """Test output that is truthy if result is as expected""" + def __init__(self, expected, result=None): + self.stdout = self.stderr = None + self.expected = expected + self.result = result + + def __bool__(self): + return self.expected is None or self.result == self.expected + + __nonzero__ = __bool__ + +def is_string(s): + if sys.version_info[0] < 3: + return isinstance(s, basestring) + return isinstance(s, str) + +class TestScope: + """Scope for running tests that maintains pass/fail statistics""" + def __init__(self, tst, name, defaults): + self.tst = tst + self.name = name + self.defaults = defaults + self.n_failed = 0 + self.n_total = 0 + + def run(self, test, **kwargs): + if type(test) == list and 'name' not in kwargs: + import pipes + kwargs['name'] = ' '.join(map(pipes.quote, test)) + + if Options.options.test_filter and 'name' in kwargs: + import re + found = False + for scope in self.tst.stack: + if re.search(Options.options.test_filter, scope.name): + found = True + break + + if (not found and + not re.search(Options.options.test_filter, self.name) and + not re.search(Options.options.test_filter, kwargs['name'])): + return True + + if callable(test): + output = self._run_callable(test, **kwargs) + elif type(test) == list: + + output = self._run_command(test, **kwargs) + else: + raise Exception("Unknown test type") + + if not output: + self.tst.log_bad('FAILED', kwargs['name']) + + return self.tst.test_result(output) + + def _run_callable(self, test, **kwargs): + expected = kwargs['expected'] if 'expected' in kwargs else True + return TestOutput(expected, test()) + + def _run_command(self, test, **kwargs): + if 'stderr' in kwargs and kwargs['stderr'] == NONEMPTY: + # Run with a temp file for stderr and check that it is non-empty + import tempfile + with tempfile.TemporaryFile() as stderr: + kwargs['stderr'] = stderr + output = self.run(test, **kwargs) + stderr.seek(0, 2) # Seek to end + return (output if not output else + self.run( + lambda: stderr.tell() > 0, + name=kwargs['name'] + ' error message')) + + try: + # Run with stdout and stderr set to the appropriate streams + out_stream = self._stream('stdout', kwargs) + err_stream = self._stream('stderr', kwargs) + return self._exec(test, **kwargs) + finally: + out_stream = out_stream.close() if out_stream else None + err_stream = err_stream.close() if err_stream else None + + def _stream(self, stream_name, kwargs): + s = kwargs[stream_name] if stream_name in kwargs else None + if is_string(s): + kwargs[stream_name] = open(s, 'wb') + return kwargs[stream_name] + return None + + def _exec(self, + test, + expected=0, + name='', + stdin=None, + stdout=None, + stderr=None, + verbosity=1): + def stream(s): + return open(s, 'wb') if type(s) == str else s + + if verbosity > 1: + self.tst.log_good('RUN ', name) + + if Options.options.test_wrapper: + test = [Options.options.test_wrapper] + test + + output = TestOutput(expected) + with open(os.devnull, 'wb') as null: + out = null if verbosity < 3 and not stdout else stdout + err = null if verbosity < 2 and not stderr else stderr + proc = subprocess.Popen(test, stdin=stdin, stdout=out, stderr=err) + output.stdout, output.stderr = proc.communicate() + output.result = proc.returncode + + if output and verbosity > 0: + self.tst.log_good(' OK', name) + + return output + +class TestContext(Build.BuildContext): + "runs test suite" + fun = cmd = 'test' + + def __init__(self, **kwargs): + super(TestContext, self).__init__(**kwargs) + self.start_time = bench_time() + self.max_depth = 1 + + defaults = {'verbosity': Options.options.verbose} + self.stack = [TestScope(self, Context.g_module.APPNAME, defaults)] + + def defaults(self): + return self.stack[-1].defaults + + def finalize(self): + if self.stack[-1].n_failed > 0: + sys.exit(1) + + super(TestContext, self).finalize() + + def __call__(self, test, **kwargs): + return self.stack[-1].run(test, **self.args(**kwargs)) + + def file_equals(self, from_path, to_path, **kwargs): + kwargs.update({'expected': True, + 'name': '%s == %s' % (from_path, to_path)}) + return self(lambda: test_file_equals(from_path, to_path), **kwargs) + + def log_good(self, title, fmt, *args): + Logs.pprint('GREEN', '[%s] %s' % (title.center(10), fmt % args)) + + def log_bad(self, title, fmt, *args): + Logs.pprint('RED', '[%s] %s' % (title.center(10), fmt % args)) + + def pre_recurse(self, node): + wscript_module = Context.load_module(node.abspath()) + group_name = wscript_module.APPNAME + self.stack.append(TestScope(self, group_name, self.defaults())) + self.max_depth = max(self.max_depth, len(self.stack) - 1) + + bld_dir = node.get_bld().parent + if bld_dir != self.path.get_bld(): + Logs.info('') + + self.original_dir = os.getcwd() + Logs.info("Waf: Entering directory `%s'\n", bld_dir) + os.chdir(str(bld_dir)) + + if not self.env.NO_COVERAGE and str(node.parent) == Context.top_dir: + self.clear_coverage() + + self.log_good('=' * 10, 'Running %s tests', group_name) + super(TestContext, self).pre_recurse(node) + + def test_result(self, success): + self.stack[-1].n_total += 1 + self.stack[-1].n_failed += 1 if not success else 0 + return success + + def pop(self): + scope = self.stack.pop() + self.stack[-1].n_total += scope.n_total + self.stack[-1].n_failed += scope.n_failed + return scope + + def post_recurse(self, node): + super(TestContext, self).post_recurse(node) + + scope = self.pop() + duration = (bench_time() - self.start_time) * 1000.0 + is_top = str(node.parent) == str(Context.top_dir) + + if is_top and self.max_depth > 1: + Logs.info('') + + self.log_good('=' * 10, '%d tests from %s ran (%d ms total)', + scope.n_total, scope.name, duration) + + if not self.env.NO_COVERAGE: + if is_top: + self.gen_coverage() + + if os.path.exists('coverage/index.html'): + self.log_good('REPORT', '', + os.path.abspath('coverage/index.html')) + + successes = scope.n_total - scope.n_failed + Logs.pprint('GREEN', '[ PASSED ] %d tests' % successes) + if scope.n_failed > 0: + Logs.pprint('RED', '[ FAILED ] %d tests' % scope.n_failed) + if is_top: + Logs.info("\nWaf: Leaving directory `%s'" % os.getcwd()) + + os.chdir(self.original_dir) + + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + + if not self.env.BUILD_TESTS: + self.fatal('Configuration does not include tests') + + with ExecutionEnvironment(self.env.AUTOWAF_RUN_ENV) as env: + if self.defaults()['verbosity'] > 0: + Logs.pprint('GREEN', str(env) + '\n') + self.recurse([self.run_dir]) + + def src_path(self, path): + return os.path.relpath(os.path.join(str(self.path), path)) + + def args(self, **kwargs): + all_kwargs = self.defaults().copy() + all_kwargs.update(kwargs) + return all_kwargs + + def group(self, name, **kwargs): + return TestGroup( + self, self.stack[-1].name, name, **self.args(**kwargs)) + + def set_test_defaults(self, **kwargs): + """Set default arguments to be passed to all tests""" + self.stack[-1].defaults.update(kwargs) + + def clear_coverage(self): + """Zero old coverage data""" + try: + with open('cov-clear.log', 'w') as log: + subprocess.call(['lcov', '-z', '-d', str(self.path)], + stdout=log, stderr=log) + + except Exception: + Logs.warn('Failed to run lcov to clear old coverage data') + + def gen_coverage(self): + """Generate coverage data and report""" + try: + with open('cov.lcov', 'w') as out: + with open('cov.log', 'w') as err: + subprocess.call(['lcov', '-c', '--no-external', + '--rc', 'lcov_branch_coverage=1', + '-b', '.', + '-d', str(self.path)], + stdout=out, stderr=err) + + if not os.path.isdir('coverage'): + os.makedirs('coverage') + + with open('genhtml.log', 'w') as log: + subprocess.call(['genhtml', + '-o', 'coverage', + '--rc', 'genhtml_branch_coverage=1', + 'cov.lcov'], + stdout=log, stderr=log) + + summary = subprocess.check_output( + ['lcov', '--summary', + '--rc', 'lcov_branch_coverage=1', + 'cov.lcov'], + stderr=subprocess.STDOUT).decode('ascii') + + import re + lines = re.search('lines\.*: (.*)%.*', summary).group(1) + functions = re.search('functions\.*: (.*)%.*', summary).group(1) + branches = re.search('branches\.*: (.*)%.*', summary).group(1) + self.log_good('COVERAGE', '%s%% lines, %s%% functions, %s%% branches', + lines, functions, branches) + + except Exception: + Logs.warn('Failed to run lcov to generate coverage report') + +class TestGroup: + def __init__(self, tst, suitename, name, **kwargs): + self.tst = tst + self.suitename = suitename + self.name = name + self.kwargs = kwargs + self.start_time = bench_time() + tst.stack.append(TestScope(tst, name, tst.defaults())) + + def label(self): + return self.suitename + '.%s' % self.name if self.name else '' + + def args(self, **kwargs): + all_kwargs = self.tst.args(**self.kwargs) + all_kwargs.update(kwargs) + return all_kwargs + + def __enter__(self): + if 'verbosity' in self.kwargs and self.kwargs['verbosity'] > 0: + self.tst.log_good('-' * 10, self.label()) + return self + + def __call__(self, test, **kwargs): + return self.tst(test, **self.args(**kwargs)) + + def file_equals(self, from_path, to_path, **kwargs): + return self.tst.file_equals(from_path, to_path, **kwargs) + + def __exit__(self, type, value, traceback): + duration = (bench_time() - self.start_time) * 1000.0 + scope = self.tst.pop() + n_passed = scope.n_total - scope.n_failed + if scope.n_failed == 0: + self.tst.log_good('-' * 10, '%d tests from %s (%d ms total)', + scope.n_total, self.label(), duration) + else: + self.tst.log_bad('-' * 10, '%d/%d tests from %s (%d ms total)', + n_passed, scope.n_total, self.label(), duration) + +def run_ldconfig(ctx): + should_run = (ctx.cmd == 'install' and + not ctx.env['RAN_LDCONFIG'] and + ctx.env['LIBDIR'] and + 'DESTDIR' not in os.environ and + not Options.options.destdir) + + if should_run: + try: + Logs.info("Waf: Running `/sbin/ldconfig %s'" % ctx.env['LIBDIR']) + subprocess.call(['/sbin/ldconfig', ctx.env['LIBDIR']]) + ctx.env['RAN_LDCONFIG'] = True + except Exception: + pass + +def get_rdf_news(name, + in_files, + top_entries=None, + extra_entries=None, + dev_dist=None): + import rdflib + from time import strptime + + doap = rdflib.Namespace('http://usefulinc.com/ns/doap#') + dcs = rdflib.Namespace('http://ontologi.es/doap-changeset#') + rdfs = rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#') + foaf = rdflib.Namespace('http://xmlns.com/foaf/0.1/') + rdf = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') + m = rdflib.ConjunctiveGraph() + + try: + for i in in_files: + m.parse(i, format='n3') + except Exception: + Logs.warn('Error parsing data, unable to generate NEWS') + return + + proj = m.value(None, rdf.type, doap.Project) + for f in m.triples([proj, rdfs.seeAlso, None]): + if f[2].endswith('.ttl'): + m.parse(f[2], format='n3') + + entries = {} + for r in m.triples([proj, doap.release, None]): + release = r[2] + revision = m.value(release, doap.revision, None) + date = m.value(release, doap.created, None) + blamee = m.value(release, dcs.blame, None) + changeset = m.value(release, dcs.changeset, None) + dist = m.value(release, doap['file-release'], None) + + if not dist: + Logs.warn('No file release for %s %s' % (proj, revision)) + dist = dev_dist + + if revision and date and blamee and changeset: + entry = {} + entry['name'] = str(name) + entry['revision'] = str(revision) + entry['date'] = strptime(str(date), '%Y-%m-%d') + entry['status'] = 'stable' if dist != dev_dist else 'unstable' + entry['dist'] = str(dist) + entry['items'] = [] + + for i in m.triples([changeset, dcs.item, None]): + item = str(m.value(i[2], rdfs.label, None)) + entry['items'] += [item] + if dist and top_entries is not None: + if not str(dist) in top_entries: + top_entries[str(dist)] = {'items': []} + top_entries[str(dist)]['items'] += [ + '%s: %s' % (name, item)] + + if extra_entries and dist: + for i in extra_entries[str(dist)]: + entry['items'] += extra_entries[str(dist)]['items'] + + entry['blamee_name'] = str(m.value(blamee, foaf.name, None)) + entry['blamee_mbox'] = str(m.value(blamee, foaf.mbox, None)) + + entries[(str(date), str(revision))] = entry + else: + Logs.warn('Ignored incomplete %s release description' % name) + + return entries + +def write_news(entries, out_file): + import textwrap + from time import strftime + + if len(entries) == 0: + return + + news = open(out_file, 'w') + for e in sorted(entries.keys(), reverse=True): + entry = entries[e] + news.write('%s (%s) %s;\n' % (entry['name'], entry['revision'], entry['status'])) + for item in entry['items']: + wrapped = textwrap.wrap(item, width=79) + news.write('\n * ' + '\n '.join(wrapped)) + + news.write('\n\n --') + news.write(' %s <%s>' % (entry['blamee_name'], + entry['blamee_mbox'].replace('mailto:', ''))) + + news.write(' %s\n\n' % ( + strftime('%a, %d %b %Y %H:%M:%S +0000', entry['date']))) + + news.close() + +def write_posts(entries, meta, out_dir, status='stable'): + "write news posts in Pelican Markdown format" + from time import strftime + try: + os.mkdir(out_dir) + except Exception: + pass + + for i in entries: + entry = entries[i] + revision = i[1] + if entry['status'] != status: + continue + + date_str = strftime('%Y-%m-%d', entry['date']) + datetime_str = strftime('%Y-%m-%d %H:%M', entry['date']) + + path = os.path.join(out_dir, '%s-%s-%s.md' % ( + date_str, entry['name'], revision.replace('.', '-'))) + post = open(path, 'w') + title = entry['title'] if 'title' in entry else entry['name'] + post.write('Title: %s %s\n' % (title, revision)) + post.write('Date: %s\n' % datetime_str) + post.write('Slug: %s-%s\n' % (entry['name'], revision.replace('.', '-'))) + for k in meta: + post.write('%s: %s\n' % (k, meta[k])) + post.write('\n') + + url = entry['dist'] + if entry['status'] == status: + post.write('[%s %s](%s) has been released.' % ( + (entry['name'], revision, url))) + + if 'description' in entry: + post.write(' ' + entry['description']) + + post.write('\n') + if (len(entry['items']) > 0 and + not (len(entry['items']) == 1 and + entry['items'][0] == 'Initial release')): + post.write('\nChanges:\n\n') + for i in entry['items']: + post.write(' * %s\n' % i) + + post.close() + +def get_blurb(in_file): + "Get the first paragram of a Markdown formatted file, skipping the title" + f = open(in_file, 'r') + f.readline() # Title + f.readline() # Title underline + f.readline() # Blank + out = '' + line = f.readline() + while len(line) > 0 and line != '\n': + out += line.replace('\n', ' ') + line = f.readline() + return out.strip() + +def get_news(in_file, entry_props={}): + """Get NEWS entries in the format expected by write_posts(). + + Properties that should be set on every entry can be passed in + `entry_props`. If `entry_props` has a 'dist_pattern' value, it is used to + set the 'dist' entry of entries by substituting the version number. + """ + + import re + import email.utils + + f = open(in_file, 'r') + entries = {} + while True: + # Read header line + head = f.readline() + matches = re.compile(r'([^ ]*) \((.*)\) ([a-zA-z]*);').match(head) + if matches is None: + break + + entry = {} + entry['name'] = matches.group(1) + entry['revision'] = matches.group(2) + entry['status'] = matches.group(3) + entry['items'] = [] + if 'dist_pattern' in entry_props: + entry['dist'] = entry_props['dist_pattern'] % entry['revision'] + + # Read blank line after header + if f.readline() != '\n': + raise SyntaxError('expected blank line after NEWS header') + + def add_item(item): + if len(item) > 0: + entry['items'] += [item.replace('\n', ' ').strip()] + + # Read entries for this revision + item = '' + line = '' + while line != '\n': + line = f.readline() + if line.startswith(' * '): + add_item(item) + item = line[3:].lstrip() + else: + item += line.lstrip() + add_item(item) + + # Read footer line + foot = f.readline() + matches = re.compile(' -- (.*) <(.*)> (.*)').match(foot) + entry['date'] = email.utils.parsedate(matches.group(3)) + entry['blamee_name'] = matches.group(1) + entry['blamee_mbox'] = matches.group(2) + entry.update(entry_props) + entries[(entry['date'], entry['revision'])] = entry + + # Skip trailing blank line before next entry + f.readline() + + f.close() + + return entries + +def news_to_posts(news_file, entry_props, post_meta, default_post_dir): + post_dir = os.getenv('POST_DIR') + if not post_dir: + post_dir = default_post_dir + sys.stderr.write('POST_DIR not set in environment, writing to %s\n' % post_dir) + else: + sys.stderr.write('writing posts to %s\n' % post_dir) + + entries = get_news(news_file, entry_props) + write_posts(entries, post_meta, post_dir) + +def run_script(cmds): + for cmd in cmds: + subprocess.check_call(cmd, shell=True) + +def release(name, version, dist_name=None): + if dist_name is None: + dist_name = name.lower() + + dist = '%s-%s.tar.bz2' % (dist_name or name.lower(), version) + try: + os.remove(dist) + os.remove(dist + '.sig') + except Exception: + pass + + status = subprocess.check_output('git status --porcelain', shell=True) + if status: + Logs.error('error: git working copy is dirty\n' + status) + raise Exception('git working copy is dirty') + + head = subprocess.check_output('git show -s --oneline', shell=True) + head_summary = head[8:].strip().lower() + expected_summary = '%s %s' % (name.lower(), version) + if head_summary != expected_summary: + raise Exception('latest commit "%s" does not match "%s"' % ( + head_summary, expected_summary)) + + run_script(['./waf configure --docs', + './waf', + './waf distcheck', + './waf posts', + 'gpg -b %s' % dist, + 'git tag -s v%s -m "%s %s"' % (version, name, version)]) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/doxygen.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/doxygen.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/doxygen.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/doxygen.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,167 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,os.path,re +from waflib import Task,Utils,Node +from waflib.TaskGen import feature +DOXY_STR='"${DOXYGEN}" - ' +DOXY_FMTS='html latex man rft xml'.split() +DOXY_FILE_PATTERNS='*.'+' *.'.join(''' +c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3 +inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx +'''.split()) +re_rl=re.compile('\\\\\r*\n',re.MULTILINE) +re_nl=re.compile('\r*\n',re.M) +def parse_doxy(txt): + tbl={} + txt=re_rl.sub('',txt) + lines=re_nl.split(txt) + for x in lines: + x=x.strip() + if not x or x.startswith('#')or x.find('=')<0: + continue + if x.find('+=')>=0: + tmp=x.split('+=') + key=tmp[0].strip() + if key in tbl: + tbl[key]+=' '+'+='.join(tmp[1:]).strip() + else: + tbl[key]='+='.join(tmp[1:]).strip() + else: + tmp=x.split('=') + tbl[tmp[0].strip()]='='.join(tmp[1:]).strip() + return tbl +class doxygen(Task.Task): + vars=['DOXYGEN','DOXYFLAGS'] + color='BLUE' + def runnable_status(self): + ''' + self.pars are populated in runnable_status - because this function is being + run *before* both self.pars "consumers" - scan() and run() + + set output_dir (node) for the output + ''' + for x in self.run_after: + if not x.hasrun: + return Task.ASK_LATER + if not getattr(self,'pars',None): + txt=self.inputs[0].read() + self.pars=parse_doxy(txt) + if self.pars.get('OUTPUT_DIRECTORY'): + output_node=self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY']) + else: + output_node=self.inputs[0].parent.get_bld().make_node(self.inputs[0].name+'.doxy') + output_node.mkdir() + self.pars['OUTPUT_DIRECTORY']=output_node.abspath() + if getattr(self.generator,'pars',None): + for k,v in self.generator.pars.items(): + self.pars[k]=v + self.doxy_inputs=getattr(self,'doxy_inputs',[]) + if not self.pars.get('INPUT'): + self.doxy_inputs.append(self.inputs[0].parent) + else: + for i in self.pars.get('INPUT').split(): + if os.path.isabs(i): + node=self.generator.bld.root.find_node(i) + else: + node=self.inputs[0].parent.find_node(i) + if not node: + self.generator.bld.fatal('Could not find the doxygen input %r'%i) + self.doxy_inputs.append(node) + if not getattr(self,'output_dir',None): + bld=self.generator.bld + self.output_dir=bld.root.find_dir(self.pars['OUTPUT_DIRECTORY']) + self.signature() + ret=Task.Task.runnable_status(self) + if ret==Task.SKIP_ME: + self.add_install() + return ret + def scan(self): + exclude_patterns=self.pars.get('EXCLUDE_PATTERNS','').split() + exclude_patterns=[pattern.replace('*/','**/')for pattern in exclude_patterns] + file_patterns=self.pars.get('FILE_PATTERNS','').split() + if not file_patterns: + file_patterns=DOXY_FILE_PATTERNS.split() + if self.pars.get('RECURSIVE')=='YES': + file_patterns=["**/%s"%pattern for pattern in file_patterns] + nodes=[] + names=[] + for node in self.doxy_inputs: + if os.path.isdir(node.abspath()): + for m in node.ant_glob(incl=file_patterns,excl=exclude_patterns): + nodes.append(m) + else: + nodes.append(node) + return(nodes,names) + def run(self): + dct=self.pars.copy() + code='\n'.join(['%s = %s'%(x,dct[x])for x in self.pars]) + code=code + cmd=Utils.subst_vars(DOXY_STR,self.env) + env=self.env.env or None + proc=Utils.subprocess.Popen(cmd,shell=True,stdin=Utils.subprocess.PIPE,env=env,cwd=self.inputs[0].parent.abspath()) + proc.communicate(code) + return proc.returncode + def post_run(self): + nodes=self.output_dir.ant_glob('**/*',quiet=True) + for x in nodes: + x.sig=Utils.h_file(x.abspath()) + self.add_install() + return Task.Task.post_run(self) + def add_install(self): + nodes=self.output_dir.ant_glob('**/*',quiet=True) + self.outputs+=nodes + if getattr(self.generator,'install_path',None): + if not getattr(self.generator,'doxy_tar',None): + self.generator.bld.install_files(self.generator.install_path,self.outputs,postpone=False,cwd=self.output_dir,relative_trick=True) +class tar(Task.Task): + run_str='${TAR} ${TAROPTS} ${TGT} ${SRC}' + color='RED' + after=['doxygen'] + def runnable_status(self): + for x in getattr(self,'input_tasks',[]): + if not x.hasrun: + return Task.ASK_LATER + if not getattr(self,'tar_done_adding',None): + self.tar_done_adding=True + for x in getattr(self,'input_tasks',[]): + self.set_inputs(x.outputs) + if not self.inputs: + return Task.SKIP_ME + return Task.Task.runnable_status(self) + def __str__(self): + tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs]) + return'%s: %s\n'%(self.__class__.__name__,tgt_str) +@feature('doxygen') +def process_doxy(self): + if not getattr(self,'doxyfile',None): + self.generator.bld.fatal('no doxyfile??') + node=self.doxyfile + if not isinstance(node,Node.Node): + node=self.path.find_resource(node) + if not node: + raise ValueError('doxygen file not found') + dsk=self.create_task('doxygen',node) + if getattr(self,'doxy_tar',None): + tsk=self.create_task('tar') + tsk.input_tasks=[dsk] + tsk.set_outputs(self.path.find_or_declare(self.doxy_tar)) + if self.doxy_tar.endswith('bz2'): + tsk.env['TAROPTS']=['cjf'] + elif self.doxy_tar.endswith('gz'): + tsk.env['TAROPTS']=['czf'] + else: + tsk.env['TAROPTS']=['cf'] + if getattr(self,'install_path',None): + self.bld.install_files(self.install_path,tsk.outputs) +def configure(conf): + ''' + Check if doxygen and tar commands are present in the system + + If the commands are present, then conf.env.DOXYGEN and conf.env.TAR + variables will be set. Detection can be controlled by setting DOXYGEN and + TAR environmental variables. + ''' + conf.find_program('doxygen',var='DOXYGEN',mandatory=False) + conf.find_program('tar',var='TAR',mandatory=False) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/__init__.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/__init__.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/__init__.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/swig.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/swig.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/swig.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/extras/swig.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,145 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Task,Logs +from waflib.TaskGen import extension +from waflib.Configure import conf +from waflib.Tools import c_preproc +SWIG_EXTS=['.swig','.i'] +re_module=re.compile('%module(?:\s*\(.*\))?\s+(.+)',re.M) +re_1=re.compile(r'^%module.*?\s+([\w]+)\s*?$',re.M) +re_2=re.compile('[#%]include [<"](.*)[">]',re.M) +class swig(Task.Task): + color='BLUE' + run_str='${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}' + ext_out=['.h'] + vars=['SWIG_VERSION','SWIGDEPS'] + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not getattr(self,'init_outputs',None): + self.init_outputs=True + if not getattr(self,'module',None): + txt=self.inputs[0].read() + m=re_module.search(txt) + if not m: + raise ValueError("could not find the swig module name") + self.module=m.group(1) + swig_c(self) + for x in self.env['SWIGFLAGS']: + x=x[1:] + try: + fun=swig_langs[x] + except KeyError: + pass + else: + fun(self) + return super(swig,self).runnable_status() + def scan(self): + lst_src=[] + seen=[] + to_see=[self.inputs[0]] + while to_see: + node=to_see.pop(0) + if node in seen: + continue + seen.append(node) + lst_src.append(node) + code=node.read() + code=c_preproc.re_nl.sub('',code) + code=c_preproc.re_cpp.sub(c_preproc.repl,code) + names=re_2.findall(code) + for n in names: + for d in self.generator.includes_nodes+[node.parent]: + u=d.find_resource(n) + if u: + to_see.append(u) + break + else: + Logs.warn('could not find %r'%n) + return(lst_src,[]) +swig_langs={} +def swigf(fun): + swig_langs[fun.__name__.replace('swig_','')]=fun +swig.swigf=swigf +def swig_c(self): + ext='.swigwrap_%d.c'%self.generator.idx + flags=self.env['SWIGFLAGS'] + if'-c++'in flags: + ext+='xx' + out_node=self.inputs[0].parent.find_or_declare(self.module+ext) + if'-c++'in flags: + c_tsk=self.generator.cxx_hook(out_node) + else: + c_tsk=self.generator.c_hook(out_node) + c_tsk.set_run_after(self) + ge=self.generator.bld.producer + ge.outstanding.insert(0,c_tsk) + ge.total+=1 + try: + ltask=self.generator.link_task + except AttributeError: + pass + else: + ltask.set_run_after(c_tsk) + ltask.inputs.append(c_tsk.outputs[0]) + self.outputs.append(out_node) + if not'-o'in self.env['SWIGFLAGS']: + self.env.append_value('SWIGFLAGS',['-o',self.outputs[0].abspath()]) +@swigf +def swig_python(tsk): + node=tsk.inputs[0].parent + if tsk.outdir: + node=tsk.outdir + tsk.set_outputs(node.find_or_declare(tsk.module+'.py')) +@swigf +def swig_ocaml(tsk): + node=tsk.inputs[0].parent + if tsk.outdir: + node=tsk.outdir + tsk.set_outputs(node.find_or_declare(tsk.module+'.ml')) + tsk.set_outputs(node.find_or_declare(tsk.module+'.mli')) +@extension(*SWIG_EXTS) +def i_file(self,node): + tsk=self.create_task('swig') + tsk.set_inputs(node) + tsk.module=getattr(self,'swig_module',None) + flags=self.to_list(getattr(self,'swig_flags',[])) + tsk.env.append_value('SWIGFLAGS',flags) + tsk.outdir=None + if'-outdir'in flags: + outdir=flags[flags.index('-outdir')+1] + outdir=tsk.generator.bld.bldnode.make_node(outdir) + outdir.mkdir() + tsk.outdir=outdir +@conf +def check_swig_version(conf,minver=None): + assert minver is None or isinstance(minver,tuple) + swigbin=conf.env['SWIG'] + if not swigbin: + conf.fatal('could not find the swig executable') + cmd=swigbin+['-version'] + Logs.debug('swig: Running swig command %r',cmd) + reg_swig=re.compile(r'SWIG Version\s(.*)',re.M) + swig_out=conf.cmd_and_log(cmd) + swigver_tuple=tuple([int(s)for s in reg_swig.findall(swig_out)[0].split('.')]) + result=(minver is None)or(swigver_tuple>=minver) + if result: + swigver='.'.join([str(x)for x in swigver_tuple[:2]]) + conf.env['SWIG_VERSION']=swigver + swigver_full='.'.join(map(str,swigver_tuple[:3])) + if minver is None: + conf.msg('Checking for swig version',swigver_full) + else: + minver_str='.'.join(map(str,minver)) + conf.msg('Checking for swig version >= %s'%(minver_str,),swigver_full,color=result and'GREEN'or'YELLOW') + if not result: + conf.fatal('The swig version is too old, expecting %r'%(minver,)) + return swigver_tuple +def configure(conf): + conf.find_program('swig',var='SWIG') + conf.env.SWIGPATH_ST='-I%s' + conf.env.SWIGDEF_ST='-D%s' diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/fixpy2.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/fixpy2.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/fixpy2.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/fixpy2.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,53 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +all_modifs={} +def fixdir(dir): + global all_modifs + for k in all_modifs: + for v in all_modifs[k]: + modif(os.path.join(dir,'waflib'),k,v) +def modif(dir,name,fun): + if name=='*': + lst=[] + for y in'. Tools extras'.split(): + for x in os.listdir(os.path.join(dir,y)): + if x.endswith('.py'): + lst.append(y+os.sep+x) + for x in lst: + modif(dir,x,fun) + return + filename=os.path.join(dir,name) + f=open(filename,'r') + try: + txt=f.read() + finally: + f.close() + txt=fun(txt) + f=open(filename,'w') + try: + f.write(txt) + finally: + f.close() +def subst(*k): + def do_subst(fun): + global all_modifs + for x in k: + try: + all_modifs[x].append(fun) + except KeyError: + all_modifs[x]=[fun] + return fun + return do_subst +@subst('*') +def r1(code): + code=code.replace(',e:',',e:') + code=code.replace("",'') + code=code.replace('','') + return code +@subst('Runner.py') +def r4(code): + code=code.replace('next(self.biter)','self.biter.next()') + return code diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/__init__.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/__init__.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/__init__.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Logs.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Logs.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Logs.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Logs.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,199 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,traceback,sys +from waflib import Utils,ansiterm +if not os.environ.get('NOSYNC',False): + if sys.stdout.isatty()and id(sys.stdout)==id(sys.__stdout__): + sys.stdout=ansiterm.AnsiTerm(sys.stdout) + if sys.stderr.isatty()and id(sys.stderr)==id(sys.__stderr__): + sys.stderr=ansiterm.AnsiTerm(sys.stderr) +import logging +LOG_FORMAT=os.environ.get('WAF_LOG_FORMAT','%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') +HOUR_FORMAT=os.environ.get('WAF_HOUR_FORMAT','%H:%M:%S') +zones='' +verbose=0 +colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','GREY':'\x1b[37m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',} +indicator='\r\x1b[K%s%s%s' +try: + unicode +except NameError: + unicode=None +def enable_colors(use): + if use==1: + if not(sys.stderr.isatty()or sys.stdout.isatty()): + use=0 + if Utils.is_win32 and os.name!='java': + term=os.environ.get('TERM','') + else: + term=os.environ.get('TERM','dumb') + if term in('dumb','emacs'): + use=0 + if use>=1: + os.environ['TERM']='vt100' + colors_lst['USE']=use +try: + get_term_cols=ansiterm.get_term_cols +except AttributeError: + def get_term_cols(): + return 80 +get_term_cols.__doc__=""" + Get the console width in characters. + + :return: the number of characters per line + :rtype: int + """ +def get_color(cl): + if not colors_lst['USE']:return'' + return colors_lst.get(cl,'') +class color_dict(object): + def __getattr__(self,a): + return get_color(a) + def __call__(self,a): + return get_color(a) +colors=color_dict() +re_log=re.compile(r'(\w+): (.*)',re.M) +class log_filter(logging.Filter): + def __init__(self,name=None): + pass + def filter(self,rec): + rec.zone=rec.module + if rec.levelno>=logging.INFO: + return True + m=re_log.match(rec.msg) + if m: + rec.zone=m.group(1) + rec.msg=m.group(2) + if zones: + return getattr(rec,'zone','')in zones or'*'in zones + elif not verbose>2: + return False + return True +class log_handler(logging.StreamHandler): + def emit(self,record): + try: + try: + self.stream=record.stream + except AttributeError: + if record.levelno>=logging.WARNING: + record.stream=self.stream=sys.stderr + else: + record.stream=self.stream=sys.stdout + self.emit_override(record) + self.flush() + except(KeyboardInterrupt,SystemExit): + raise + except: + self.handleError(record) + def emit_override(self,record,**kw): + self.terminator=getattr(record,'terminator','\n') + stream=self.stream + if unicode: + msg=self.formatter.format(record) + fs='%s'+self.terminator + try: + if(isinstance(msg,unicode)and getattr(stream,'encoding',None)): + fs=fs.decode(stream.encoding) + try: + stream.write(fs%msg) + except UnicodeEncodeError: + stream.write((fs%msg).encode(stream.encoding)) + else: + stream.write(fs%msg) + except UnicodeError: + stream.write((fs%msg).encode('utf-8')) + else: + logging.StreamHandler.emit(self,record) +class formatter(logging.Formatter): + def __init__(self): + logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT) + def format(self,rec): + try: + msg=rec.msg.decode('utf-8') + except Exception: + msg=rec.msg + use=colors_lst['USE'] + if(use==1 and rec.stream.isatty())or use==2: + c1=getattr(rec,'c1',None) + if c1 is None: + c1='' + if rec.levelno>=logging.ERROR: + c1=colors.RED + elif rec.levelno>=logging.WARNING: + c1=colors.YELLOW + elif rec.levelno>=logging.INFO: + c1=colors.GREEN + c2=getattr(rec,'c2',colors.NORMAL) + msg='%s%s%s'%(c1,msg,c2) + else: + msg=re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))','',msg) + if rec.levelno>=logging.INFO: + return msg + rec.msg=msg + rec.c1=colors.PINK + rec.c2=colors.NORMAL + return logging.Formatter.format(self,rec) +log=None +def debug(*k,**kw): + if verbose: + k=list(k) + k[0]=k[0].replace('\n',' ') + global log + log.debug(*k,**kw) +def error(*k,**kw): + global log + log.error(*k,**kw) + if verbose>2: + st=traceback.extract_stack() + if st: + st=st[:-1] + buf=[] + for filename,lineno,name,line in st: + buf.append(' File "%s", line %d, in %s'%(filename,lineno,name)) + if line: + buf.append(' %s'%line.strip()) + if buf:log.error("\n".join(buf)) +def warn(*k,**kw): + global log + log.warn(*k,**kw) +def info(*k,**kw): + global log + log.info(*k,**kw) +def init_log(): + global log + log=logging.getLogger('waflib') + log.handlers=[] + log.filters=[] + hdlr=log_handler() + hdlr.setFormatter(formatter()) + log.addHandler(hdlr) + log.addFilter(log_filter()) + log.setLevel(logging.DEBUG) +def make_logger(path,name): + logger=logging.getLogger(name) + hdlr=logging.FileHandler(path,'w') + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.setLevel(logging.DEBUG) + return logger +def make_mem_logger(name,to_log,size=8192): + from logging.handlers import MemoryHandler + logger=logging.getLogger(name) + hdlr=MemoryHandler(size,target=to_log) + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.memhandler=hdlr + logger.setLevel(logging.DEBUG) + return logger +def free_logger(logger): + try: + for x in logger.handlers: + x.close() + logger.removeHandler(x) + except Exception: + pass +def pprint(col,msg,label='',sep='\n'): + info("%s%s%s %s"%(colors(col),msg,colors.NORMAL,label),extra={'terminator':sep}) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Node.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Node.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Node.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Node.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,491 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,sys,shutil +from waflib import Utils,Errors +exclude_regs=''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/BitKeeper +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzrignore +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/.arch-ids +**/{arch} +**/_darcs +**/_darcs/** +**/.intlcache +**/.DS_Store''' +split_path=Utils.split_path +split_path_unix=Utils.split_path_unix +split_path_cygwin=Utils.split_path_cygwin +split_path_win32=Utils.split_path_win32 +class Node(object): + dict_class=dict + __slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig') + def __init__(self,name,parent): + self.name=name + self.parent=parent + if parent: + if name in parent.children: + raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent)) + parent.children[name]=self + def __setstate__(self,data): + self.name=data[0] + self.parent=data[1] + if data[2]is not None: + self.children=self.dict_class(data[2]) + if data[3]is not None: + self.sig=data[3] + def __getstate__(self): + return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None)) + def __str__(self): + return self.name + def __repr__(self): + return self.abspath() + def __hash__(self): + return id(self) + def __eq__(self,node): + return id(self)==id(node) + def __copy__(self): + raise Errors.WafError('nodes are not supposed to be copied') + def read(self,flags='r',encoding='ISO8859-1'): + return Utils.readf(self.abspath(),flags,encoding) + def write(self,data,flags='w',encoding='ISO8859-1'): + Utils.writef(self.abspath(),data,flags,encoding) + def read_json(self,convert=True,encoding='utf-8'): + import json + object_pairs_hook=None + if convert and sys.hexversion<0x3000000: + try: + _type=unicode + except NameError: + _type=str + def convert(value): + if isinstance(value,list): + return[convert(element)for element in value] + elif isinstance(value,_type): + return str(value) + else: + return value + def object_pairs(pairs): + return dict((str(pair[0]),convert(pair[1]))for pair in pairs) + object_pairs_hook=object_pairs + return json.loads(self.read(encoding=encoding),object_pairs_hook=object_pairs_hook) + def write_json(self,data,pretty=True): + import json + indent=2 + separators=(',',': ') + sort_keys=pretty + newline=os.linesep + if not pretty: + indent=None + separators=(',',':') + newline='' + output=json.dumps(data,indent=indent,separators=separators,sort_keys=sort_keys)+newline + self.write(output,encoding='utf-8') + def chmod(self,val): + os.chmod(self.abspath(),val) + def delete(self): + try: + try: + if hasattr(self,'children'): + shutil.rmtree(self.abspath()) + else: + os.remove(self.abspath()) + except OSError ,e: + if os.path.exists(self.abspath()): + raise e + finally: + self.evict() + def evict(self): + del self.parent.children[self.name] + def suffix(self): + k=max(0,self.name.rfind('.')) + return self.name[k:] + def height(self): + d=self + val=-1 + while d: + d=d.parent + val+=1 + return val + def listdir(self): + lst=Utils.listdir(self.abspath()) + lst.sort() + return lst + def mkdir(self): + if getattr(self,'cache_isdir',None): + return + try: + self.parent.mkdir() + except OSError: + pass + if self.name: + try: + os.makedirs(self.abspath()) + except OSError: + pass + if not os.path.isdir(self.abspath()): + raise Errors.WafError('Could not create the directory %s'%self.abspath()) + try: + self.children + except AttributeError: + self.children=self.dict_class() + self.cache_isdir=True + def find_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + try: + ch=cur.children + except AttributeError: + cur.children=self.dict_class() + else: + try: + cur=ch[x] + continue + except KeyError: + pass + cur=self.__class__(x,cur) + try: + os.stat(cur.abspath()) + except OSError: + cur.evict() + return None + ret=cur + try: + os.stat(ret.abspath()) + except OSError: + ret.evict() + return None + try: + while not getattr(cur.parent,'cache_isdir',None): + cur=cur.parent + cur.cache_isdir=True + except AttributeError: + pass + return ret + def make_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + if getattr(cur,'children',{}): + if x in cur.children: + cur=cur.children[x] + continue + else: + cur.children=self.dict_class() + cur=self.__class__(x,cur) + return cur + def search_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + else: + try: + cur=cur.children[x] + except(AttributeError,KeyError): + return None + return cur + def path_from(self,node): + c1=self + c2=node + c1h=c1.height() + c2h=c2.height() + lst=[] + up=0 + while c1h>c2h: + lst.append(c1.name) + c1=c1.parent + c1h-=1 + while c2h>c1h: + up+=1 + c2=c2.parent + c2h-=1 + while id(c1)!=id(c2): + lst.append(c1.name) + up+=1 + c1=c1.parent + c2=c2.parent + if c1.parent: + for i in range(up): + lst.append('..') + else: + if lst and not Utils.is_win32: + lst.append('') + lst.reverse() + return os.sep.join(lst)or'.' + def abspath(self): + try: + return self.cache_abspath + except AttributeError: + pass + if not self.parent: + val=os.sep + elif not self.parent.name: + val=os.sep+self.name + else: + val=self.parent.abspath()+os.sep+self.name + self.cache_abspath=val + return val + if Utils.is_win32: + def abspath(self): + try: + return self.cache_abspath + except AttributeError: + pass + if not self.parent: + val='' + elif not self.parent.name: + val=self.name+os.sep + else: + val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name + self.cache_abspath=val + return val + def is_child_of(self,node): + p=self + diff=self.height()-node.height() + while diff>0: + diff-=1 + p=p.parent + return id(p)==id(node) + def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True): + dircont=self.listdir() + dircont.sort() + try: + lst=set(self.children.keys()) + except AttributeError: + self.children=self.dict_class() + else: + if remove: + for x in lst-set(dircont): + self.children[x].evict() + for name in dircont: + npats=accept(name,pats) + if npats and npats[0]: + accepted=[]in npats[0] + node=self.make_node([name]) + isdir=os.path.isdir(node.abspath()) + if accepted: + if isdir: + if dir: + yield node + else: + if src: + yield node + if getattr(node,'cache_isdir',None)or isdir: + node.cache_isdir=True + if maxdepth: + for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove): + yield k + raise StopIteration + def ant_glob(self,*k,**kw): + src=kw.get('src',True) + dir=kw.get('dir',False) + excl=kw.get('excl',exclude_regs) + incl=k and k[0]or kw.get('incl','**') + reflags=kw.get('ignorecase',0)and re.I + def to_pat(s): + lst=Utils.to_list(s) + ret=[] + for x in lst: + x=x.replace('\\','/').replace('//','/') + if x.endswith('/'): + x+='**' + lst2=x.split('/') + accu=[] + for k in lst2: + if k=='**': + accu.append(k) + else: + k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+') + k='^%s$'%k + try: + accu.append(re.compile(k,flags=reflags)) + except Exception ,e: + raise Errors.WafError("Invalid pattern: %s"%k,e) + ret.append(accu) + return ret + def filtre(name,nn): + ret=[] + for lst in nn: + if not lst: + pass + elif lst[0]=='**': + ret.append(lst) + if len(lst)>1: + if lst[1].match(name): + ret.append(lst[2:]) + else: + ret.append([]) + elif lst[0].match(name): + ret.append(lst[1:]) + return ret + def accept(name,pats): + nacc=filtre(name,pats[0]) + nrej=filtre(name,pats[1]) + if[]in nrej: + nacc=[] + return[nacc,nrej] + ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=kw.get('maxdepth',25),dir=dir,src=src,remove=kw.get('remove',True))] + if kw.get('flat',False): + return' '.join([x.path_from(self)for x in ret]) + return ret + def is_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return False + if id(cur)==x: + return True + cur=cur.parent + return False + def is_bld(self): + cur=self + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return True + cur=cur.parent + return False + def get_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + lst.reverse() + return self.ctx.srcnode.make_node(lst) + if id(cur)==x: + return self + lst.append(cur.name) + cur=cur.parent + return self + def get_bld(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + return self + if id(cur)==x: + lst.reverse() + return self.ctx.bldnode.make_node(lst) + lst.append(cur.name) + cur=cur.parent + lst.reverse() + if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'): + lst[0]=lst[0][0] + return self.ctx.bldnode.make_node(['__root__']+lst) + def find_resource(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if not node: + self=self.get_src() + node=self.find_node(lst) + if node: + if os.path.isdir(node.abspath()): + return None + return node + def find_or_declare(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + self=self.get_src() + node=self.find_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + node=self.get_bld().make_node(lst) + node.parent.mkdir() + return node + def find_dir(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.find_node(lst) + try: + if not os.path.isdir(node.abspath()): + return None + except(OSError,AttributeError): + return None + return node + def change_ext(self,ext,ext_in=None): + name=self.name + if ext_in is None: + k=name.rfind('.') + if k>=0: + name=name[:k]+ext + else: + name=name+ext + else: + name=name[:-len(ext_in)]+ext + return self.parent.find_or_declare([name]) + def bldpath(self): + return self.path_from(self.ctx.bldnode) + def srcpath(self): + return self.path_from(self.ctx.srcnode) + def relpath(self): + cur=self + x=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==x: + return self.bldpath() + cur=cur.parent + return self.srcpath() + def bld_dir(self): + return self.parent.bldpath() + def get_bld_sig(self): + try: + return self.cache_sig + except AttributeError: + pass + if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode: + self.sig=Utils.h_file(self.abspath()) + self.cache_sig=ret=self.sig + return ret +pickle_lock=Utils.threading.Lock() +class Nod3(Node): + pass diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Options.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Options.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Options.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Options.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,147 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,tempfile,optparse,sys,re +from waflib import Logs,Utils,Context +cmds='distclean configure build install clean uninstall check dist distcheck'.split() +options={} +commands=[] +envvars=[] +lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform) +platform=Utils.unversioned_sys_platform() +class opt_parser(optparse.OptionParser): + def __init__(self,ctx): + optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION)) + self.formatter.width=Logs.get_term_cols() + self.ctx=ctx + def print_usage(self,file=None): + return self.print_help(file) + def get_usage(self): + cmds_str={} + for cls in Context.classes: + if not cls.cmd or cls.cmd=='options'or cls.cmd.startswith('_'): + continue + s=cls.__doc__ or'' + cmds_str[cls.cmd]=s + if Context.g_module: + for(k,v)in Context.g_module.__dict__.items(): + if k in('options','init','shutdown'): + continue + if type(v)is type(Context.create_context): + if v.__doc__ and not k.startswith('_'): + cmds_str[k]=v.__doc__ + just=0 + for k in cmds_str: + just=max(just,len(k)) + lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()] + lst.sort() + ret='\n'.join(lst) + return'''waf [commands] [options] + +Main commands (example: ./waf build -j4) +%s +'''%ret +class OptionsContext(Context.Context): + cmd='options' + fun='options' + def __init__(self,**kw): + super(OptionsContext,self).__init__(**kw) + self.parser=opt_parser(self) + self.option_groups={} + jobs=self.jobs() + p=self.add_option + color=os.environ.get('NOCOLOR','')and'no'or'auto' + p('-c','--color',dest='colors',default=color,action='store',help='whether to use colors (yes/no/auto) [default: auto]',choices=('yes','no','auto')) + p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs) + p('-k','--keep',dest='keep',default=0,action='count',help='continue despite errors (-kk to try harder)') + p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]') + p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)') + gr=self.add_option_group('Configuration options') + self.option_groups['configure options']=gr + gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out') + gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top') + gr.add_option('--no-lock-in-run',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_run') + gr.add_option('--no-lock-in-out',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_out') + gr.add_option('--no-lock-in-top',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_top') + default_prefix=getattr(Context.g_module,'default_prefix',os.environ.get('PREFIX')) + if not default_prefix: + if platform=='win32': + d=tempfile.gettempdir() + default_prefix=d[0].upper()+d[1:] + else: + default_prefix='/usr/local/' + gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix) + gr.add_option('--bindir',dest='bindir',help='bindir') + gr.add_option('--libdir',dest='libdir',help='libdir') + gr=self.add_option_group('Build and installation options') + self.option_groups['build and install options']=gr + gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output') + gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"') + gr=self.add_option_group('Step options') + self.option_groups['step options']=gr + gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') + default_destdir=os.environ.get('DESTDIR','') + gr=self.add_option_group('Installation and uninstallation options') + self.option_groups['install/uninstall options']=gr + gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir') + gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation') + gr.add_option('--distcheck-args',metavar='ARGS',help='arguments to pass to distcheck',default=None,action='store') + def jobs(self): + count=int(os.environ.get('JOBS',0)) + if count<1: + if'NUMBER_OF_PROCESSORS'in os.environ: + count=int(os.environ.get('NUMBER_OF_PROCESSORS',1)) + else: + if hasattr(os,'sysconf_names'): + if'SC_NPROCESSORS_ONLN'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_ONLN')) + elif'SC_NPROCESSORS_CONF'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_CONF')) + if not count and os.name not in('nt','java'): + try: + tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0) + except Exception: + pass + else: + if re.match('^[0-9]+$',tmp): + count=int(tmp) + if count<1: + count=1 + elif count>1024: + count=1024 + return count + def add_option(self,*k,**kw): + return self.parser.add_option(*k,**kw) + def add_option_group(self,*k,**kw): + try: + gr=self.option_groups[k[0]] + except KeyError: + gr=self.parser.add_option_group(*k,**kw) + self.option_groups[k[0]]=gr + return gr + def get_option_group(self,opt_str): + try: + return self.option_groups[opt_str] + except KeyError: + for group in self.parser.option_groups: + if group.title==opt_str: + return group + return None + def parse_args(self,_args=None): + global options,commands,envvars + (options,leftover_args)=self.parser.parse_args(args=_args) + for arg in leftover_args: + if'='in arg: + envvars.append(arg) + else: + commands.append(arg) + if options.destdir: + options.destdir=Utils.sane_path(options.destdir) + if options.verbose>=1: + self.load('errcheck') + colors={'yes':2,'auto':1,'no':0}[options.colors] + Logs.enable_colors(colors) + def execute(self): + super(OptionsContext,self).execute() + self.parse_args() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Runner.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Runner.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Runner.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Runner.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,207 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import random,atexit +try: + from queue import Queue +except ImportError: + from Queue import Queue +from waflib import Utils,Task,Errors,Logs +GAP=10 +class TaskConsumer(Utils.threading.Thread): + def __init__(self): + Utils.threading.Thread.__init__(self) + self.ready=Queue() + self.setDaemon(1) + self.start() + def run(self): + try: + self.loop() + except Exception: + pass + def loop(self): + while 1: + tsk=self.ready.get() + if not isinstance(tsk,Task.TaskBase): + tsk(self) + else: + tsk.process() +pool=Queue() +def get_pool(): + try: + return pool.get(False) + except Exception: + return TaskConsumer() +def put_pool(x): + pool.put(x) +def _free_resources(): + global pool + lst=[] + while pool.qsize(): + lst.append(pool.get()) + for x in lst: + x.ready.put(None) + for x in lst: + x.join() + pool=None +atexit.register(_free_resources) +class Parallel(object): + def __init__(self,bld,j=2): + self.numjobs=j + self.bld=bld + self.outstanding=[] + self.frozen=[] + self.out=Queue(0) + self.count=0 + self.processed=1 + self.stop=False + self.error=[] + self.biter=None + self.dirty=False + def get_next_task(self): + if not self.outstanding: + return None + return self.outstanding.pop(0) + def postpone(self,tsk): + if random.randint(0,1): + self.frozen.insert(0,tsk) + else: + self.frozen.append(tsk) + def refill_task_list(self): + while self.count>self.numjobs*GAP: + self.get_out() + while not self.outstanding: + if self.count: + self.get_out() + elif self.frozen: + try: + cond=self.deadlock==self.processed + except AttributeError: + pass + else: + if cond: + msg='check the build order for the tasks' + for tsk in self.frozen: + if not tsk.run_after: + msg='check the methods runnable_status' + break + lst=[] + for tsk in self.frozen: + lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after])) + raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst))) + self.deadlock=self.processed + if self.frozen: + self.outstanding+=self.frozen + self.frozen=[] + elif not self.count: + self.outstanding.extend(self.biter.next()) + self.total=self.bld.total() + break + def add_more_tasks(self,tsk): + if getattr(tsk,'more_tasks',None): + self.outstanding+=tsk.more_tasks + self.total+=len(tsk.more_tasks) + def get_out(self): + tsk=self.out.get() + if not self.stop: + self.add_more_tasks(tsk) + self.count-=1 + self.dirty=True + return tsk + def add_task(self,tsk): + try: + self.pool + except AttributeError: + self.init_task_pool() + self.ready.put(tsk) + def init_task_pool(self): + pool=self.pool=[get_pool()for i in range(self.numjobs)] + self.ready=Queue(0) + def setq(consumer): + consumer.ready=self.ready + for x in pool: + x.ready.put(setq) + return pool + def free_task_pool(self): + def setq(consumer): + consumer.ready=Queue(0) + self.out.put(self) + try: + pool=self.pool + except AttributeError: + pass + else: + for x in pool: + self.ready.put(setq) + for x in pool: + self.get_out() + for x in pool: + put_pool(x) + self.pool=[] + def skip(self,tsk): + tsk.hasrun=Task.SKIPPED + def error_handler(self,tsk): + if hasattr(tsk,'scan')and hasattr(tsk,'uid'): + key=(tsk.uid(),'imp') + try: + del self.bld.task_sigs[key] + except KeyError: + pass + if not self.bld.keep: + self.stop=True + self.error.append(tsk) + def task_status(self,tsk): + try: + return tsk.runnable_status() + except Exception: + self.processed+=1 + tsk.err_msg=Utils.ex_stack() + if not self.stop and self.bld.keep: + self.skip(tsk) + if self.bld.keep==1: + if Logs.verbose>1 or not self.error: + self.error.append(tsk) + self.stop=True + else: + if Logs.verbose>1: + self.error.append(tsk) + return Task.EXCEPTION + tsk.hasrun=Task.EXCEPTION + self.error_handler(tsk) + return Task.EXCEPTION + def start(self): + self.total=self.bld.total() + while not self.stop: + self.refill_task_list() + tsk=self.get_next_task() + if not tsk: + if self.count: + continue + else: + break + if tsk.hasrun: + self.processed+=1 + continue + if self.stop: + break + st=self.task_status(tsk) + if st==Task.RUN_ME: + tsk.position=(self.processed,self.total) + self.count+=1 + tsk.master=self + self.processed+=1 + if self.numjobs==1: + tsk.process() + else: + self.add_task(tsk) + if st==Task.ASK_LATER: + self.postpone(tsk) + elif st==Task.SKIP_ME: + self.processed+=1 + self.skip(tsk) + self.add_more_tasks(tsk) + while self.error and self.count: + self.get_out() + assert(self.count==0 or self.stop) + self.free_task_pool() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Scripting.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Scripting.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Scripting.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Scripting.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,418 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,shlex,shutil,traceback,errno,sys,stat +from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node +build_dir_override=None +no_climb_commands=['configure'] +default_cmd="build" +def waf_entry_point(current_directory,version,wafdir): + Logs.init_log() + if Context.WAFVERSION!=version: + Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir)) + sys.exit(1) + if'--version'in sys.argv: + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + if len(sys.argv)>1: + potential_wscript=os.path.join(current_directory,sys.argv[1]) + if os.path.basename(potential_wscript)=='wscript'and os.path.isfile(potential_wscript): + current_directory=os.path.normpath(os.path.dirname(potential_wscript)) + sys.argv.pop(1) + Context.waf_dir=wafdir + Context.launch_dir=current_directory + no_climb=os.environ.get('NOCLIMB',None) + if not no_climb: + for k in no_climb_commands: + for y in sys.argv: + if y.startswith(k): + no_climb=True + break + for i,x in enumerate(sys.argv): + if x.startswith('--top='): + Context.run_dir=Context.top_dir=Utils.sane_path(x[6:]) + sys.argv[i]='--top='+Context.run_dir + if x.startswith('--out='): + Context.out_dir=Utils.sane_path(x[6:]) + sys.argv[i]='--out='+Context.out_dir + cur=current_directory + while cur and not Context.top_dir: + try: + lst=os.listdir(cur) + except OSError: + lst=[] + Logs.error('Directory %r is unreadable!'%cur) + if Options.lockfile in lst: + env=ConfigSet.ConfigSet() + try: + env.load(os.path.join(cur,Options.lockfile)) + ino=os.stat(cur)[stat.ST_INO] + except Exception: + pass + else: + for x in(env.run_dir,env.top_dir,env.out_dir): + if Utils.is_win32: + if cur==x: + load=True + break + else: + try: + ino2=os.stat(x)[stat.ST_INO] + except OSError: + pass + else: + if ino==ino2: + load=True + break + else: + Logs.warn('invalid lock file in %s'%cur) + load=False + if load: + Context.run_dir=env.run_dir + Context.top_dir=env.top_dir + Context.out_dir=env.out_dir + break + if not Context.run_dir: + if Context.WSCRIPT_FILE in lst: + Context.run_dir=cur + next=os.path.dirname(cur) + if next==cur: + break + cur=next + if no_climb: + break + if not Context.run_dir: + if'-h'in sys.argv or'--help'in sys.argv: + Logs.warn('No wscript file found: the help message may be incomplete') + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE) + sys.exit(1) + try: + os.chdir(Context.run_dir) + except OSError: + Logs.error('Waf: The folder %r is unreadable'%Context.run_dir) + sys.exit(1) + try: + set_main_module(os.path.normpath(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))) + except Errors.WafError ,e: + Logs.pprint('RED',e.verbose_msg) + Logs.error(str(e)) + sys.exit(1) + except Exception ,e: + Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e) + traceback.print_exc(file=sys.stdout) + sys.exit(2) + try: + run_commands() + except Errors.WafError ,e: + if Logs.verbose>1: + Logs.pprint('RED',e.verbose_msg) + Logs.error(e.msg) + sys.exit(1) + except SystemExit: + raise + except Exception ,e: + traceback.print_exc(file=sys.stdout) + sys.exit(2) + except KeyboardInterrupt: + Logs.pprint('RED','Interrupted') + sys.exit(68) +def set_main_module(file_path): + Context.g_module=Context.load_module(file_path) + Context.g_module.root_path=file_path + def set_def(obj): + name=obj.__name__ + if not name in Context.g_module.__dict__: + setattr(Context.g_module,name,obj) + for k in(update,dist,distclean,distcheck): + set_def(k) + if not'init'in Context.g_module.__dict__: + Context.g_module.init=Utils.nada + if not'shutdown'in Context.g_module.__dict__: + Context.g_module.shutdown=Utils.nada + if not'options'in Context.g_module.__dict__: + Context.g_module.options=Utils.nada +def parse_options(): + Context.create_context('options').execute() + for var in Options.envvars: + (name,value)=var.split('=',1) + os.environ[name.strip()]=value + if not Options.commands: + Options.commands=[default_cmd] + Options.commands=[x for x in Options.commands if x!='options'] + Logs.verbose=Options.options.verbose + if Options.options.zones: + Logs.zones=Options.options.zones.split(',') + if not Logs.verbose: + Logs.verbose=1 + elif Logs.verbose>0: + Logs.zones=['runner'] + if Logs.verbose>2: + Logs.zones=['*'] +def run_command(cmd_name): + ctx=Context.create_context(cmd_name) + ctx.log_timer=Utils.Timer() + ctx.options=Options.options + ctx.cmd=cmd_name + try: + ctx.execute() + finally: + ctx.finalize() + return ctx +def run_commands(): + parse_options() + run_command('init') + while Options.commands: + cmd_name=Options.commands.pop(0) + ctx=run_command(cmd_name) + Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer))) + run_command('shutdown') +def _can_distclean(name): + for k in'.o .moc .exe'.split(): + if name.endswith(k): + return True + return False +def distclean_dir(dirname): + for(root,dirs,files)in os.walk(dirname): + for f in files: + if _can_distclean(f): + fname=os.path.join(root,f) + try: + os.remove(fname) + except OSError: + Logs.warn('Could not remove %r'%fname) + for x in(Context.DBFILE,'config.log'): + try: + os.remove(x) + except OSError: + pass + try: + shutil.rmtree('c4che') + except OSError: + pass +def distclean(ctx): + '''removes the build directory''' + lst=os.listdir('.') + for f in lst: + if f==Options.lockfile: + try: + proj=ConfigSet.ConfigSet(f) + except IOError: + Logs.warn('Could not read %r'%f) + continue + if proj['out_dir']!=proj['top_dir']: + try: + shutil.rmtree(proj['out_dir']) + except IOError: + pass + except OSError ,e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%proj['out_dir']) + else: + distclean_dir(proj['out_dir']) + for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']): + p=os.path.join(k,Options.lockfile) + try: + os.remove(p) + except OSError ,e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%p) + if not Options.commands: + for x in'.waf-1. waf-1. .waf3-1. waf3-1.'.split(): + if f.startswith(x): + shutil.rmtree(f,ignore_errors=True) +class Dist(Context.Context): + '''creates an archive containing the project source code''' + cmd='dist' + fun='dist' + algo='tar.bz2' + ext_algo={} + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + def archive(self): + import tarfile + arch_name=self.get_arch_name() + try: + self.base_path + except AttributeError: + self.base_path=self.path + node=self.base_path.make_node(arch_name) + try: + node.delete() + except OSError: + pass + files=self.get_files() + if self.algo.startswith('tar.'): + tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.','')) + for x in files: + self.add_tar_file(x,tar) + tar.close() + elif self.algo=='zip': + import zipfile + zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED) + for x in files: + archive_name=self.get_base_name()+'/'+x.path_from(self.base_path) + zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED) + zip.close() + else: + self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') + try: + from hashlib import sha1 as sha + except ImportError: + from sha import sha + try: + digest=" (sha=%r)"%sha(node.read()).hexdigest() + except Exception: + digest='' + Logs.info('New archive created: %s%s'%(self.arch_name,digest)) + def get_tar_path(self,node): + return node.abspath() + def add_tar_file(self,x,tar): + p=self.get_tar_path(x) + tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path)) + tinfo.uid=0 + tinfo.gid=0 + tinfo.uname='root' + tinfo.gname='root' + fu=None + try: + fu=open(p,'rb') + tar.addfile(tinfo,fileobj=fu) + finally: + if fu: + fu.close() + def get_tar_prefix(self): + try: + return self.tar_prefix + except AttributeError: + return self.get_base_name() + def get_arch_name(self): + try: + self.arch_name + except AttributeError: + self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo) + return self.arch_name + def get_base_name(self): + try: + self.base_name + except AttributeError: + appname=getattr(Context.g_module,Context.APPNAME,'noname') + version=getattr(Context.g_module,Context.VERSION,'1.0') + self.base_name=appname+'-'+version + return self.base_name + def get_excl(self): + try: + return self.excl + except AttributeError: + self.excl=Node.exclude_regs+' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' + if Context.out_dir: + nd=self.root.find_node(Context.out_dir) + if nd: + self.excl+=' '+nd.path_from(self.base_path) + return self.excl + def get_files(self): + try: + files=self.files + except AttributeError: + files=self.base_path.ant_glob('**/*',excl=self.get_excl()) + return files +def dist(ctx): + '''makes a tarball for redistributing the sources''' + pass +class DistCheck(Dist): + fun='distcheck' + cmd='distcheck' + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + self.check() + def check(self): + import tempfile,tarfile + t=None + try: + t=tarfile.open(self.get_arch_name()) + for x in t: + t.extract(x) + finally: + if t: + t.close() + cfg=[] + if Options.options.distcheck_args: + cfg=shlex.split(Options.options.distcheck_args) + else: + cfg=[x for x in sys.argv if x.startswith('-')] + instdir=tempfile.mkdtemp('.inst',self.get_base_name()) + ret=Utils.subprocess.Popen([sys.executable,sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait() + if ret: + raise Errors.WafError('distcheck failed with code %i'%ret) + if os.path.exists(instdir): + raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir) + shutil.rmtree(self.get_base_name()) +def distcheck(ctx): + '''checks if the project compiles (tarball from 'dist')''' + pass +def update(ctx): + lst=Options.options.files + if lst: + lst=lst.split(',') + else: + path=os.path.join(Context.waf_dir,'waflib','extras') + lst=[x for x in Utils.listdir(path)if x.endswith('.py')] + for x in lst: + tool=x.replace('.py','') + if not tool: + continue + try: + dl=Configure.download_tool + except AttributeError: + ctx.fatal('The command "update" is dangerous; include the tool "use_config" in your project!') + try: + dl(tool,force=True,ctx=ctx) + except Errors.WafError: + Logs.error('Could not find the tool %r in the remote repository'%x) + else: + Logs.warn('Updated %r'%tool) +def autoconfigure(execute_method): + def execute(self): + if not Configure.autoconfig: + return execute_method(self) + env=ConfigSet.ConfigSet() + do_config=False + try: + env.load(os.path.join(Context.top_dir,Options.lockfile)) + except Exception: + Logs.warn('Configuring the project') + do_config=True + else: + if env.run_dir!=Context.run_dir: + do_config=True + else: + h=0 + for f in env['files']: + h=Utils.h_list((h,Utils.readf(f,'rb'))) + do_config=h!=env.hash + if do_config: + cmd=env['config_cmd']or'configure' + if Configure.autoconfig=='clobber': + tmp=Options.options.__dict__ + Options.options.__dict__=env.options + try: + run_command(cmd) + finally: + Options.options.__dict__=tmp + else: + run_command(cmd) + run_command(self.cmd) + else: + return execute_method(self) + return execute +Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/TaskGen.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/TaskGen.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/TaskGen.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/TaskGen.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,434 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Task,Utils,Logs,Errors,ConfigSet,Node +feats=Utils.defaultdict(set) +HEADER_EXTS=['.h','.hpp','.hxx','.hh'] +class task_gen(object): + mappings=Utils.ordered_iter_dict() + prec=Utils.defaultdict(list) + def __init__(self,*k,**kw): + self.source='' + self.target='' + self.meths=[] + self.prec=Utils.defaultdict(list) + self.mappings={} + self.features=[] + self.tasks=[] + if not'bld'in kw: + self.env=ConfigSet.ConfigSet() + self.idx=0 + self.path=None + else: + self.bld=kw['bld'] + self.env=self.bld.env.derive() + self.path=self.bld.path + try: + self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1 + except AttributeError: + self.bld.idx={} + self.idx=self.bld.idx[id(self.path)]=1 + for key,val in kw.items(): + setattr(self,key,val) + def __str__(self): + return""%(self.name,self.path.abspath()) + def __repr__(self): + lst=[] + for x in self.__dict__.keys(): + if x not in('env','bld','compiled_tasks','tasks'): + lst.append("%s=%s"%(x,repr(getattr(self,x)))) + return"bld(%s) in %s"%(", ".join(lst),self.path.abspath()) + def get_name(self): + try: + return self._name + except AttributeError: + if isinstance(self.target,list): + lst=[str(x)for x in self.target] + name=self._name=','.join(lst) + else: + name=self._name=str(self.target) + return name + def set_name(self,name): + self._name=name + name=property(get_name,set_name) + def to_list(self,val): + if isinstance(val,str):return val.split() + else:return val + def post(self): + if getattr(self,'posted',None): + return False + self.posted=True + keys=set(self.meths) + self.features=Utils.to_list(self.features) + for x in self.features+['*']: + st=feats[x] + if not st: + if not x in Task.classes: + Logs.warn('feature %r does not exist - bind at least one method to it'%x) + keys.update(list(st)) + prec={} + prec_tbl=self.prec or task_gen.prec + for x in prec_tbl: + if x in keys: + prec[x]=prec_tbl[x] + tmp=[] + for a in keys: + for x in prec.values(): + if a in x:break + else: + tmp.append(a) + tmp.sort() + out=[] + while tmp: + e=tmp.pop() + if e in keys:out.append(e) + try: + nlst=prec[e] + except KeyError: + pass + else: + del prec[e] + for x in nlst: + for y in prec: + if x in prec[y]: + break + else: + tmp.append(x) + if prec: + txt='\n'.join(['- %s after %s'%(k,repr(v))for k,v in prec.items()]) + raise Errors.WafError('Cycle detected in the method execution\n%s'%txt) + out.reverse() + self.meths=out + Logs.debug('task_gen: posting %s %d'%(self,id(self))) + for x in out: + try: + v=getattr(self,x) + except AttributeError: + raise Errors.WafError('%r is not a valid task generator method'%x) + Logs.debug('task_gen: -> %s (%d)'%(x,id(self))) + v() + Logs.debug('task_gen: posted %s'%self.name) + return True + def get_hook(self,node): + name=node.name + if self.mappings: + for k in self.mappings: + if name.endswith(k): + return self.mappings[k] + for k in task_gen.mappings: + if name.endswith(k): + return task_gen.mappings[k] + raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)"%(node,task_gen.mappings.keys())) + def create_task(self,name,src=None,tgt=None,**kw): + task=Task.classes[name](env=self.env.derive(),generator=self) + if src: + task.set_inputs(src) + if tgt: + task.set_outputs(tgt) + task.__dict__.update(kw) + self.tasks.append(task) + return task + def clone(self,env): + newobj=self.bld() + for x in self.__dict__: + if x in('env','bld'): + continue + elif x in('path','features'): + setattr(newobj,x,getattr(self,x)) + else: + setattr(newobj,x,copy.copy(getattr(self,x))) + newobj.posted=False + if isinstance(env,str): + newobj.env=self.bld.all_envs[env].derive() + else: + newobj.env=env.derive() + return newobj +def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False): + ext_in=Utils.to_list(ext_in) + ext_out=Utils.to_list(ext_out) + if not name: + name=rule + cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell) + def x_file(self,node): + ext=decider and decider(self,node)or cls.ext_out + if ext_in: + _ext_in=ext_in[0] + tsk=self.create_task(name,node) + cnt=0 + keys=set(self.mappings.keys())|set(self.__class__.mappings.keys()) + for x in ext: + k=node.change_ext(x,ext_in=_ext_in) + tsk.outputs.append(k) + if reentrant!=None: + if cnt task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg) + except AttributeError: + return' -> task in %r failed: %r\n%r'%(name,self,msg) + elif self.hasrun==MISSING: + return' -> missing files in %r: %r\n%r'%(name,self,msg) + else: + return'invalid status for task in %r: %r'%(name,self.hasrun) + def colon(self,var1,var2): + tmp=self.env[var1] + if not tmp: + return[] + if isinstance(var2,str): + it=self.env[var2] + else: + it=var2 + if isinstance(tmp,str): + return[tmp%x for x in it] + else: + lst=[] + for y in it: + lst.extend(tmp) + lst.append(y) + return lst +class Task(TaskBase): + vars=[] + shell=False + def __init__(self,*k,**kw): + TaskBase.__init__(self,*k,**kw) + self.env=kw['env'] + self.inputs=[] + self.outputs=[] + self.dep_nodes=[] + self.run_after=set([]) + def __str__(self): + name=self.__class__.__name__ + if self.outputs: + if(name.endswith('lib')or name.endswith('program'))or not self.inputs: + node=self.outputs[0] + return node.path_from(node.ctx.launch_node()) + if not(self.inputs or self.outputs): + return self.__class__.__name__ + if len(self.inputs)==1: + node=self.inputs[0] + return node.path_from(node.ctx.launch_node()) + src_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.inputs]) + tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs]) + if self.outputs:sep=' -> ' + else:sep='' + return'%s: %s%s%s'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str) + def keyword(self): + name=self.__class__.__name__ + if name.endswith('lib')or name.endswith('program'): + return'Linking' + if len(self.inputs)==1 and len(self.outputs)==1: + return'Compiling' + if not self.inputs: + if self.outputs: + return'Creating' + else: + return'Running' + return'Processing' + def __repr__(self): + try: + ins=",".join([x.name for x in self.inputs]) + outs=",".join([x.name for x in self.outputs]) + except AttributeError: + ins=",".join([str(x)for x in self.inputs]) + outs=",".join([str(x)for x in self.outputs]) + return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}']) + def uid(self): + try: + return self.uid_ + except AttributeError: + m=Utils.md5() + up=m.update + up(self.__class__.__name__) + for x in self.inputs+self.outputs: + up(x.abspath()) + self.uid_=m.digest() + return self.uid_ + def set_inputs(self,inp): + if isinstance(inp,list):self.inputs+=inp + else:self.inputs.append(inp) + def set_outputs(self,out): + if isinstance(out,list):self.outputs+=out + else:self.outputs.append(out) + def set_run_after(self,task): + assert isinstance(task,TaskBase) + self.run_after.add(task) + def signature(self): + try:return self.cache_sig + except AttributeError:pass + self.m=Utils.md5() + self.m.update(self.hcode) + self.sig_explicit_deps() + self.sig_vars() + if self.scan: + try: + self.sig_implicit_deps() + except Errors.TaskRescan: + return self.signature() + ret=self.cache_sig=self.m.digest() + return ret + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + bld=self.generator.bld + try: + new_sig=self.signature() + except Errors.TaskNotReady: + return ASK_LATER + key=self.uid() + try: + prev_sig=bld.task_sigs[key] + except KeyError: + Logs.debug("task: task %r must run as it was never run before or the task code changed"%self) + return RUN_ME + for node in self.outputs: + try: + if node.sig!=new_sig: + return RUN_ME + except AttributeError: + Logs.debug("task: task %r must run as the output nodes do not exist"%self) + return RUN_ME + if new_sig!=prev_sig: + return RUN_ME + return SKIP_ME + def post_run(self): + bld=self.generator.bld + sig=self.signature() + for node in self.outputs: + try: + os.stat(node.abspath()) + except OSError: + self.hasrun=MISSING + self.err_msg='-> missing file: %r'%node.abspath() + raise Errors.WafError(self.err_msg) + node.sig=node.cache_sig=sig + bld.task_sigs[self.uid()]=self.cache_sig + def sig_explicit_deps(self): + bld=self.generator.bld + upd=self.m.update + for x in self.inputs+self.dep_nodes: + try: + upd(x.get_bld_sig()) + except(AttributeError,TypeError): + raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self)) + if bld.deps_man: + additional_deps=bld.deps_man + for x in self.inputs+self.outputs: + try: + d=additional_deps[id(x)] + except KeyError: + continue + for v in d: + if isinstance(v,bld.root.__class__): + try: + v=v.get_bld_sig() + except AttributeError: + raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self)) + elif hasattr(v,'__call__'): + v=v() + upd(v) + return self.m.digest() + def sig_vars(self): + bld=self.generator.bld + env=self.env + upd=self.m.update + act_sig=bld.hash_env_vars(env,self.__class__.vars) + upd(act_sig) + dep_vars=getattr(self,'dep_vars',None) + if dep_vars: + upd(bld.hash_env_vars(env,dep_vars)) + return self.m.digest() + scan=None + def sig_implicit_deps(self): + bld=self.generator.bld + key=self.uid() + prev=bld.task_sigs.get((key,'imp'),[]) + if prev: + try: + if prev==self.compute_sig_implicit_deps(): + return prev + except Errors.TaskNotReady: + raise + except EnvironmentError: + for x in bld.node_deps.get(self.uid(),[]): + if not x.is_bld(): + try: + os.stat(x.abspath()) + except OSError: + try: + del x.parent.children[x.name] + except KeyError: + pass + del bld.task_sigs[(key,'imp')] + raise Errors.TaskRescan('rescan') + (nodes,names)=self.scan() + if Logs.verbose: + Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names))) + bld.node_deps[key]=nodes + bld.raw_deps[key]=names + self.are_implicit_nodes_ready() + try: + bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps() + except Exception: + if Logs.verbose: + for k in bld.node_deps.get(self.uid(),[]): + try: + k.get_bld_sig() + except Exception: + Logs.warn('Missing signature for node %r (may cause rebuilds)'%k) + else: + return sig + def compute_sig_implicit_deps(self): + upd=self.m.update + bld=self.generator.bld + self.are_implicit_nodes_ready() + for k in bld.node_deps.get(self.uid(),[]): + upd(k.get_bld_sig()) + return self.m.digest() + def are_implicit_nodes_ready(self): + bld=self.generator.bld + try: + cache=bld.dct_implicit_nodes + except AttributeError: + bld.dct_implicit_nodes=cache={} + try: + dct=cache[bld.cur] + except KeyError: + dct=cache[bld.cur]={} + for tsk in bld.cur_tasks: + for x in tsk.outputs: + dct[x]=tsk + modified=False + for x in bld.node_deps.get(self.uid(),[]): + if x in dct: + self.run_after.add(dct[x]) + modified=True + if modified: + for tsk in self.run_after: + if not tsk.hasrun: + raise Errors.TaskNotReady('not ready') +if sys.hexversion>0x3000000: + def uid(self): + try: + return self.uid_ + except AttributeError: + m=Utils.md5() + up=m.update + up(self.__class__.__name__.encode('iso8859-1','xmlcharrefreplace')) + for x in self.inputs+self.outputs: + up(x.abspath().encode('iso8859-1','xmlcharrefreplace')) + self.uid_=m.digest() + return self.uid_ + uid.__doc__=Task.uid.__doc__ + Task.uid=uid +def is_before(t1,t2): + to_list=Utils.to_list + for k in to_list(t2.ext_in): + if k in to_list(t1.ext_out): + return 1 + if t1.__class__.__name__ in to_list(t2.after): + return 1 + if t2.__class__.__name__ in to_list(t1.before): + return 1 + return 0 +def set_file_constraints(tasks): + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for x in tasks: + for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]): + ins[id(a)].add(x) + for a in getattr(x,'outputs',[]): + outs[id(a)].add(x) + links=set(ins.keys()).intersection(outs.keys()) + for k in links: + for a in ins[k]: + a.run_after.update(outs[k]) +def set_precedence_constraints(tasks): + cstr_groups=Utils.defaultdict(list) + for x in tasks: + h=x.hash_constraints() + cstr_groups[h].append(x) + keys=list(cstr_groups.keys()) + maxi=len(keys) + for i in range(maxi): + t1=cstr_groups[keys[i]][0] + for j in range(i+1,maxi): + t2=cstr_groups[keys[j]][0] + if is_before(t1,t2): + a=i + b=j + elif is_before(t2,t1): + a=j + b=i + else: + continue + aval=set(cstr_groups[keys[a]]) + for x in cstr_groups[keys[b]]: + x.run_after.update(aval) +def funex(c): + dc={} + exec(c,dc) + return dc['f'] +re_novar=re.compile(r"^(SRC|TGT)\W+.*?$") +reg_act=re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})",re.M) +def compile_fun_shell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('backslash'):return'\\\\' + elif g('subst'):extr.append((g('var'),g('code')));return"%s" + return None + line=reg_act.sub(repl,line)or line + parm=[] + dvars=[] + app=parm.append + for(var,meth)in extr: + if var=='SRC': + if meth:app('tsk.inputs%s'%meth) + else:app('" ".join([a.path_from(cwdx) for a in tsk.inputs])') + elif var=='TGT': + if meth:app('tsk.outputs%s'%meth) + else:app('" ".join([a.path_from(cwdx) for a in tsk.outputs])') + elif meth: + if meth.startswith(':'): + if var not in dvars: + dvars.append(var) + m=meth[1:] + if m=='SRC': + m='[a.path_from(cwdx) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(cwdx) for a in tsk.outputs]' + elif re_novar.match(m): + m='[tsk.inputs%s]'%m[3:] + elif re_novar.match(m): + m='[tsk.outputs%s]'%m[3:] + elif m[:3]not in('tsk','gen','bld'): + dvars.append(meth[1:]) + m='%r'%m + app('" ".join(tsk.colon(%r, %s))'%(var,m)) + else: + app('%s%s'%(var,meth)) + else: + if var not in dvars: + dvars.append(var) + app("p('%s')"%var) + if parm:parm="%% (%s) "%(',\n\t\t'.join(parm)) + else:parm='' + c=COMPILE_TEMPLATE_SHELL%(line,parm) + Logs.debug('action: %s'%c.strip().splitlines()) + return(funex(c),dvars) +def compile_fun_noshell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('backslash'):return'\\' + elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>" + return None + line2=reg_act.sub(repl,line) + params=line2.split('<<|@|>>') + assert(extr) + buf=[] + dvars=[] + app=buf.append + for x in range(len(extr)): + params[x]=params[x].strip() + if params[x]: + app("lst.extend(%r)"%params[x].split()) + (var,meth)=extr[x] + if var=='SRC': + if meth:app('lst.append(tsk.inputs%s)'%meth) + else:app("lst.extend([a.path_from(cwdx) for a in tsk.inputs])") + elif var=='TGT': + if meth:app('lst.append(tsk.outputs%s)'%meth) + else:app("lst.extend([a.path_from(cwdx) for a in tsk.outputs])") + elif meth: + if meth.startswith(':'): + if not var in dvars: + dvars.append(var) + m=meth[1:] + if m=='SRC': + m='[a.path_from(cwdx) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(cwdx) for a in tsk.outputs]' + elif re_novar.match(m): + m='[tsk.inputs%s]'%m[3:] + elif re_novar.match(m): + m='[tsk.outputs%s]'%m[3:] + elif m[:3]not in('tsk','gen','bld'): + dvars.append(m) + m='%r'%m + app('lst.extend(tsk.colon(%r, %s))'%(var,m)) + else: + app('lst.extend(gen.to_list(%s%s))'%(var,meth)) + else: + app('lst.extend(to_list(env[%r]))'%var) + if not var in dvars: + dvars.append(var) + if extr: + if params[-1]: + app("lst.extend(%r)"%params[-1].split()) + fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf) + Logs.debug('action: %s'%fun.strip().splitlines()) + return(funex(fun),dvars) +def compile_fun(line,shell=False): + if isinstance(line,str): + if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0: + shell=True + else: + dvars_lst=[] + funs_lst=[] + for x in line: + if isinstance(x,str): + fun,dvars=compile_fun(x,shell) + dvars_lst+=dvars + funs_lst.append(fun) + else: + funs_lst.append(x) + def composed_fun(task): + for x in funs_lst: + ret=x(task) + if ret: + return ret + return None + return composed_fun,dvars + if shell: + return compile_fun_shell(line) + else: + return compile_fun_noshell(line) +def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None): + params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,} + if isinstance(func,str)or isinstance(func,tuple): + params['run_str']=func + else: + params['run']=func + cls=type(Task)(name,(Task,),params) + global classes + classes[name]=cls + return cls +def always_run(cls): + old=cls.runnable_status + def always(self): + ret=old(self) + if ret==SKIP_ME: + ret=RUN_ME + return ret + cls.runnable_status=always + return cls +def update_outputs(cls): + old_post_run=cls.post_run + def post_run(self): + old_post_run(self) + for node in self.outputs: + node.sig=node.cache_sig=Utils.h_file(node.abspath()) + self.generator.bld.task_sigs[node.abspath()]=self.uid() + cls.post_run=post_run + old_runnable_status=cls.runnable_status + def runnable_status(self): + status=old_runnable_status(self) + if status!=RUN_ME: + return status + try: + bld=self.generator.bld + prev_sig=bld.task_sigs[self.uid()] + if prev_sig==self.signature(): + for x in self.outputs: + if not x.is_child_of(bld.bldnode): + x.sig=Utils.h_file(x.abspath()) + if not x.sig or bld.task_sigs[x.abspath()]!=self.uid(): + return RUN_ME + return SKIP_ME + except OSError: + pass + except IOError: + pass + except KeyError: + pass + except IndexError: + pass + except AttributeError: + pass + return RUN_ME + cls.runnable_status=runnable_status + return cls diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ar.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ar.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ar.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ar.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,13 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Configure import conf +@conf +def find_ar(conf): + conf.load('ar') +def configure(conf): + conf.find_program('ar',var='AR') + conf.add_os_flags('ARFLAGS') + if not conf.env.ARFLAGS: + conf.env.ARFLAGS=['rcs'] diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/asm.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/asm.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/asm.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/asm.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,24 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Task +import waflib.Task +from waflib.Tools.ccroot import link_task,stlink_task +from waflib.TaskGen import extension +class asm(Task.Task): + color='BLUE' + run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' +@extension('.s','.S','.asm','.ASM','.spp','.SPP') +def asm_hook(self,node): + return self.create_compiled_task('asm',node) +class asmprogram(link_task): + run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' + ext_out=['.bin'] + inst_to='${BINDIR}' +class asmshlib(asmprogram): + inst_to='${LIBDIR}' +class asmstlib(stlink_task): + pass +def configure(conf): + conf.env['ASMPATH_ST']='-I%s' diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/bison.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/bison.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/bison.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/bison.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,28 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Task +from waflib.TaskGen import extension +class bison(Task.Task): + color='BLUE' + run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' + ext_out=['.h'] +@extension('.y','.yc','.yy') +def big_bison(self,node): + has_h='-d'in self.env['BISONFLAGS'] + outs=[] + if node.name.endswith('.yc'): + outs.append(node.change_ext('.tab.cc')) + if has_h: + outs.append(node.change_ext('.tab.hh')) + else: + outs.append(node.change_ext('.tab.c')) + if has_h: + outs.append(node.change_ext('.tab.h')) + tsk=self.create_task('bison',node,outs) + tsk.cwd=node.parent.get_bld().abspath() + self.source.append(outs[0]) +def configure(conf): + conf.find_program('bison',var='BISON') + conf.env.BISONFLAGS=['-d'] diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_aliases.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_aliases.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_aliases.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_aliases.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,63 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils,Errors +from waflib.Configure import conf +def get_extensions(lst): + ret=[] + for x in Utils.to_list(lst): + try: + if not isinstance(x,str): + x=x.name + ret.append(x[x.rfind('.')+1:]) + except Exception: + pass + return ret +def sniff_features(**kw): + exts=get_extensions(kw['source']) + type=kw['_type'] + feats=[] + for x in'cxx cpp c++ cc C'.split(): + if x in exts: + feats.append('cxx') + break + if'c'in exts or'vala'in exts or'gs'in exts: + feats.append('c') + for x in'f f90 F F90 for FOR'.split(): + if x in exts: + feats.append('fc') + break + if'd'in exts: + feats.append('d') + if'java'in exts: + feats.append('java') + return'java' + if type in('program','shlib','stlib'): + will_link=False + for x in feats: + if x in('cxx','d','fc','c'): + feats.append(x+type) + will_link=True + if not will_link and not kw.get('features',[]): + raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?'%kw) + return feats +def set_features(kw,_type): + kw['_type']=_type + kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw)) +@conf +def program(bld,*k,**kw): + set_features(kw,'program') + return bld(*k,**kw) +@conf +def shlib(bld,*k,**kw): + set_features(kw,'shlib') + return bld(*k,**kw) +@conf +def stlib(bld,*k,**kw): + set_features(kw,'stlib') + return bld(*k,**kw) +@conf +def objects(bld,*k,**kw): + set_features(kw,'objects') + return bld(*k,**kw) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_config.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_config.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_config.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_config.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,765 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,shlex +from waflib import Build,Utils,Task,Options,Logs,Errors,Runner +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +WAF_CONFIG_H='config.h' +DEFKEYS='define_key' +INCKEYS='include_key' +cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',} +SNIP_FUNCTION=''' +int main(int argc, char **argv) { + void (*p)(); + (void)argc; (void)argv; + p=(void(*)())(%s); + return !p; +} +''' +SNIP_TYPE=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + if ((%(type_name)s *) 0) return 0; + if (sizeof (%(type_name)s)) return 0; + return 1; +} +''' +SNIP_EMPTY_PROGRAM=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + return 0; +} +''' +SNIP_FIELD=''' +int main(int argc, char **argv) { + char *off; + (void)argc; (void)argv; + off = (char*) &((%(type_name)s*)0)->%(field_name)s; + return (size_t) off < sizeof(%(type_name)s); +} +''' +MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'cygwin','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'} +MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh','__xtensa__':'xtensa',} +@conf +def parse_flags(self,line,uselib_store,env=None,force_static=False,posix=None): + assert(isinstance(line,str)) + env=env or self.env + if posix is None: + posix=True + if'\\'in line: + posix=('\\ 'in line)or('\\\\'in line) + lex=shlex.shlex(line,posix=posix) + lex.whitespace_split=True + lex.commenters='' + lst=list(lex) + app=env.append_value + appu=env.append_unique + uselib=uselib_store + static=False + while lst: + x=lst.pop(0) + st=x[:2] + ot=x[2:] + if st=='-I'or st=='/I': + if not ot:ot=lst.pop(0) + appu('INCLUDES_'+uselib,[ot]) + elif st=='-i': + tmp=[x,lst.pop(0)] + app('CFLAGS',tmp) + app('CXXFLAGS',tmp) + elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'): + if not ot:ot=lst.pop(0) + app('DEFINES_'+uselib,[ot]) + elif st=='-l': + if not ot:ot=lst.pop(0) + prefix=(force_static or static)and'STLIB_'or'LIB_' + appu(prefix+uselib,[ot]) + elif st=='-L': + if not ot:ot=lst.pop(0) + prefix=(force_static or static)and'STLIBPATH_'or'LIBPATH_' + appu(prefix+uselib,[ot]) + elif x.startswith('/LIBPATH:'): + prefix=(force_static or static)and'STLIBPATH_'or'LIBPATH_' + appu(prefix+uselib,[x.replace('/LIBPATH:','')]) + elif x.startswith('-std='): + if'++'in x: + app('CXXFLAGS_'+uselib,[x]) + else: + app('CFLAGS_'+uselib,[x]) + elif x=='-pthread'or x.startswith('+'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + app('LINKFLAGS_'+uselib,[x]) + elif x=='-framework': + appu('FRAMEWORK_'+uselib,[lst.pop(0)]) + elif x.startswith('-F'): + appu('FRAMEWORKPATH_'+uselib,[x[2:]]) + elif x=='-Wl,-rpath'or x=='-Wl,-R': + app('RPATH_'+uselib,lst.pop(0).lstrip('-Wl,')) + elif x.startswith('-Wl,-R,'): + app('RPATH_'+uselib,x[7:]) + elif x.startswith('-Wl,-R'): + app('RPATH_'+uselib,x[6:]) + elif x.startswith('-Wl,-rpath,'): + app('RPATH_'+uselib,x[11:]) + elif x=='-Wl,-Bstatic'or x=='-Bstatic': + static=True + elif x=='-Wl,-Bdynamic'or x=='-Bdynamic': + static=False + elif x.startswith('-Wl'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + elif x.startswith('-bundle'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-undefined')or x.startswith('-Xlinker'): + arg=lst.pop(0) + app('LINKFLAGS_'+uselib,[x,arg]) + elif x.startswith('-arch')or x.startswith('-isysroot'): + tmp=[x,lst.pop(0)] + app('CFLAGS_'+uselib,tmp) + app('CXXFLAGS_'+uselib,tmp) + app('LINKFLAGS_'+uselib,tmp) + elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'): + appu('LINKFLAGS_'+uselib,[x]) +@conf +def validate_cfg(self,kw): + if not'path'in kw: + if not self.env.PKGCONFIG: + self.find_program('pkg-config',var='PKGCONFIG') + kw['path']=self.env.PKGCONFIG + if'atleast_pkgconfig_version'in kw: + if not'msg'in kw: + kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version'] + return + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'errmsg'in kw: + kw['errmsg']='not found' + if'modversion'in kw: + if not'msg'in kw: + kw['msg']='Checking for %r version'%kw['modversion'] + return + for x in cfg_ver.keys(): + y=x.replace('-','_') + if y in kw: + if not'package'in kw: + raise ValueError('%s requires a package'%x) + if not'msg'in kw: + kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y]) + return + if not'define_name'in kw: + pkgname=kw.get('uselib_store',kw['package'].upper()) + kw['define_name']=self.have_define(pkgname) + if not'uselib_store'in kw: + self.undefine(kw['define_name']) + if not'msg'in kw: + kw['msg']='Checking for %r'%(kw['package']or kw['path']) +@conf +def exec_cfg(self,kw): + path=Utils.to_list(kw['path']) + env=self.env.env or None + def define_it(): + pkgname=kw.get('uselib_store',kw['package'].upper()) + if kw.get('global_define'): + self.define(self.have_define(kw['package']),1,False) + else: + self.env.append_unique('DEFINES_%s'%pkgname,"%s=1"%self.have_define(pkgname)) + self.env[self.have_define(pkgname)]=1 + if'atleast_pkgconfig_version'in kw: + cmd=path+['--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']] + self.cmd_and_log(cmd,env=env) + if not'okmsg'in kw: + kw['okmsg']='yes' + return + for x in cfg_ver: + y=x.replace('-','_') + if y in kw: + self.cmd_and_log(path+['--%s=%s'%(x,kw[y]),kw['package']],env=env) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + break + if'modversion'in kw: + version=self.cmd_and_log(path+['--modversion',kw['modversion']],env=env).strip() + self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version) + return version + lst=[]+path + defi=kw.get('define_variable',None) + if not defi: + defi=self.env.PKG_CONFIG_DEFINES or{} + for key,val in defi.items(): + lst.append('--define-variable=%s=%s'%(key,val)) + static=kw.get('force_static',False) + if'args'in kw: + args=Utils.to_list(kw['args']) + if'--static'in args or'--static-libs'in args: + static=True + lst+=args + lst.extend(Utils.to_list(kw['package'])) + if'variables'in kw: + v_env=kw.get('env',self.env) + uselib=kw.get('uselib_store',kw['package'].upper()) + vars=Utils.to_list(kw['variables']) + for v in vars: + val=self.cmd_and_log(lst+['--variable='+v],env=env).strip() + var='%s_%s'%(uselib,v) + v_env[var]=val + if not'okmsg'in kw: + kw['okmsg']='yes' + return + ret=self.cmd_and_log(lst,env=env) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static,posix=kw.get('posix',None)) + return ret +@conf +def check_cfg(self,*k,**kw): + if k: + lst=k[0].split() + kw['package']=lst[0] + kw['args']=' '.join(lst[1:]) + self.validate_cfg(kw) + if'msg'in kw: + self.start_msg(kw['msg'],**kw) + ret=None + try: + ret=self.exec_cfg(kw) + except self.errors.WafError: + if'errmsg'in kw: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + if not ret: + ret=True + kw['success']=ret + if'okmsg'in kw: + self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + return ret +def build_fun(bld): + if bld.kw['compile_filename']: + node=bld.srcnode.make_node(bld.kw['compile_filename']) + node.write(bld.kw['code']) + o=bld(features=bld.kw['features'],source=bld.kw['compile_filename'],target='testprog') + for k,v in bld.kw.items(): + setattr(o,k,v) + if not bld.kw.get('quiet',None): + bld.conf.to_log("==>\n%s\n<=="%bld.kw['code']) +@conf +def validate_c(self,kw): + if not'build_fun'in kw: + kw['build_fun']=build_fun + if not'env'in kw: + kw['env']=self.env.derive() + env=kw['env'] + if not'compiler'in kw and not'features'in kw: + kw['compiler']='c' + if env['CXX_NAME']and Task.classes.get('cxx',None): + kw['compiler']='cxx' + if not self.env['CXX']: + self.fatal('a c++ compiler is required') + else: + if not self.env['CC']: + self.fatal('a c compiler is required') + if not'compile_mode'in kw: + kw['compile_mode']='c' + if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx': + kw['compile_mode']='cxx' + if not'type'in kw: + kw['type']='cprogram' + if not'features'in kw: + if not'header_name'in kw or kw.get('link_header_test',True): + kw['features']=[kw['compile_mode'],kw['type']] + else: + kw['features']=[kw['compile_mode']] + else: + kw['features']=Utils.to_list(kw['features']) + if not'compile_filename'in kw: + kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'') + def to_header(dct): + if'header_name'in dct: + dct=Utils.to_list(dct['header_name']) + return''.join(['#include <%s>\n'%x for x in dct]) + return'' + if'framework_name'in kw: + fwkname=kw['framework_name'] + if not'uselib_store'in kw: + kw['uselib_store']=fwkname.upper() + if not kw.get('no_header',False): + if not'header_name'in kw: + kw['header_name']=[] + fwk='%s/%s.h'%(fwkname,fwkname) + if kw.get('remove_dot_h',None): + fwk=fwk[:-2] + kw['header_name']=Utils.to_list(kw['header_name'])+[fwk] + kw['msg']='Checking for framework %s'%fwkname + kw['framework']=fwkname + if'function_name'in kw: + fu=kw['function_name'] + if not'msg'in kw: + kw['msg']='Checking for function %s'%fu + kw['code']=to_header(kw)+SNIP_FUNCTION%fu + if not'uselib_store'in kw: + kw['uselib_store']=fu.upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(fu) + elif'type_name'in kw: + tu=kw['type_name'] + if not'header_name'in kw: + kw['header_name']='stdint.h' + if'field_name'in kw: + field=kw['field_name'] + kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field} + if not'msg'in kw: + kw['msg']='Checking for field %s in %s'%(field,tu) + if not'define_name'in kw: + kw['define_name']=self.have_define((tu+'_'+field).upper()) + else: + kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu} + if not'msg'in kw: + kw['msg']='Checking for type %s'%tu + if not'define_name'in kw: + kw['define_name']=self.have_define(tu.upper()) + elif'header_name'in kw: + if not'msg'in kw: + kw['msg']='Checking for header %s'%kw['header_name'] + l=Utils.to_list(kw['header_name']) + assert len(l)>0,'list of headers in header_name is empty' + kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM + if not'uselib_store'in kw: + kw['uselib_store']=l[0].upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(l[0]) + if'lib'in kw: + if not'msg'in kw: + kw['msg']='Checking for library %s'%kw['lib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['lib'].upper() + if'stlib'in kw: + if not'msg'in kw: + kw['msg']='Checking for static library %s'%kw['stlib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['stlib'].upper() + if'fragment'in kw: + kw['code']=kw['fragment'] + if not'msg'in kw: + kw['msg']='Checking for code snippet' + if not'errmsg'in kw: + kw['errmsg']='no' + for(flagsname,flagstype)in(('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')): + if flagsname in kw: + if not'msg'in kw: + kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname]) + if not'errmsg'in kw: + kw['errmsg']='no' + if not'execute'in kw: + kw['execute']=False + if kw['execute']: + kw['features'].append('test_exec') + kw['chmod']=493 + if not'errmsg'in kw: + kw['errmsg']='not found' + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'code'in kw: + kw['code']=SNIP_EMPTY_PROGRAM + if self.env[INCKEYS]: + kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code'] + if kw.get('merge_config_header',False)or env.merge_config_header: + kw['code']='%s\n\n%s'%(self.get_config_header(),kw['code']) + env.DEFINES=[] + if not kw.get('success'):kw['success']=None + if'define_name'in kw: + self.undefine(kw['define_name']) + if not'msg'in kw: + self.fatal('missing "msg" in conf.check(...)') +@conf +def post_check(self,*k,**kw): + is_success=0 + if kw['execute']: + if kw['success']is not None: + if kw.get('define_ret',False): + is_success=kw['success'] + else: + is_success=(kw['success']==0) + else: + is_success=(kw['success']==0) + if'define_name'in kw: + comment=kw.get('comment','') + define_name=kw['define_name'] + if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw: + if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str): + self.define(define_name,is_success,quote=kw.get('quote',1),comment=comment) + else: + self.define_cond(define_name,is_success,comment=comment) + else: + self.define_cond(define_name,is_success,comment=comment) + if kw.get('global_define',None): + self.env[kw['define_name']]=is_success + if'header_name'in kw: + if kw.get('auto_add_header_name',False): + self.env.append_value(INCKEYS,Utils.to_list(kw['header_name'])) + if is_success and'uselib_store'in kw: + from waflib.Tools import ccroot + _vars=set([]) + for x in kw['features']: + if x in ccroot.USELIB_VARS: + _vars|=ccroot.USELIB_VARS[x] + for k in _vars: + lk=k.lower() + if lk in kw: + val=kw[lk] + if isinstance(val,str): + val=val.rstrip(os.path.sep) + self.env.append_unique(k+'_'+kw['uselib_store'],Utils.to_list(val)) + return is_success +@conf +def check(self,*k,**kw): + self.validate_c(kw) + self.start_msg(kw['msg'],**kw) + ret=None + try: + ret=self.run_build(*k,**kw) + except self.errors.ConfigurationError: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + kw['success']=ret + ret=self.post_check(*k,**kw) + if not ret: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + self.fatal('The configuration failed %r'%ret) + else: + self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + return ret +class test_exec(Task.Task): + color='PINK' + def run(self): + if getattr(self.generator,'rpath',None): + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()]) + else: + env=self.env.env or{} + env.update(dict(os.environ)) + for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'): + env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'') + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env) +@feature('test_exec') +@after_method('apply_link') +def test_exec_fun(self): + self.create_task('test_exec',self.link_task.outputs[0]) +@conf +def check_cxx(self,*k,**kw): + kw['compiler']='cxx' + return self.check(*k,**kw) +@conf +def check_cc(self,*k,**kw): + kw['compiler']='c' + return self.check(*k,**kw) +@conf +def set_define_comment(self,key,comment): + coms=self.env.DEFINE_COMMENTS + if not coms: + coms=self.env.DEFINE_COMMENTS={} + coms[key]=comment or'' +@conf +def get_define_comment(self,key): + coms=self.env.DEFINE_COMMENTS or{} + return coms.get(key,'') +@conf +def define(self,key,val,quote=True,comment=''): + assert key and isinstance(key,str) + if val is True: + val=1 + elif val in(False,None): + val=0 + if isinstance(val,int)or isinstance(val,float): + s='%s=%s' + else: + s=quote and'%s="%s"'or'%s=%s' + app=s%(key,str(val)) + ban=key+'=' + lst=self.env['DEFINES'] + for x in lst: + if x.startswith(ban): + lst[lst.index(x)]=app + break + else: + self.env.append_value('DEFINES',app) + self.env.append_unique(DEFKEYS,key) + self.set_define_comment(key,comment) +@conf +def undefine(self,key,comment=''): + assert key and isinstance(key,str) + ban=key+'=' + lst=[x for x in self.env['DEFINES']if not x.startswith(ban)] + self.env['DEFINES']=lst + self.env.append_unique(DEFKEYS,key) + self.set_define_comment(key,comment) +@conf +def define_cond(self,key,val,comment=''): + assert key and isinstance(key,str) + if val: + self.define(key,1,comment=comment) + else: + self.undefine(key,comment=comment) +@conf +def is_defined(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return True + return False +@conf +def get_define(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return x[len(ban):] + return None +@conf +def have_define(self,key): + return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key) +@conf +def write_config_header(self,configfile='',guard='',top=False,defines=True,headers=False,remove=True,define_prefix=''): + if not configfile:configfile=WAF_CONFIG_H + waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile) + node=top and self.bldnode or self.path.get_bld() + node=node.make_node(configfile) + node.parent.mkdir() + lst=['/* WARNING! All changes made to this file will be lost! */\n'] + lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard)) + lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix)) + lst.append('\n#endif /* %s */\n'%waf_guard) + node.write('\n'.join(lst)) + self.env.append_unique(Build.CFG_FILES,[node.abspath()]) + if remove: + for key in self.env[DEFKEYS]: + self.undefine(key) + self.env[DEFKEYS]=[] +@conf +def get_config_header(self,defines=True,headers=False,define_prefix=''): + lst=[] + if self.env.WAF_CONFIG_H_PRELUDE: + lst.append(self.env.WAF_CONFIG_H_PRELUDE) + if headers: + for x in self.env[INCKEYS]: + lst.append('#include <%s>'%x) + if defines: + tbl={} + for k in self.env['DEFINES']: + a,_,b=k.partition('=') + tbl[a]=b + for k in self.env[DEFKEYS]: + caption=self.get_define_comment(k) + if caption: + caption=' /* %s */'%caption + try: + txt='#define %s%s %s%s'%(define_prefix,k,tbl[k],caption) + except KeyError: + txt='/* #undef %s%s */%s'%(define_prefix,k,caption) + lst.append(txt) + return"\n".join(lst) +@conf +def cc_add_flags(conf): + conf.add_os_flags('CPPFLAGS',dup=False) + conf.add_os_flags('CFLAGS',dup=False) +@conf +def cxx_add_flags(conf): + conf.add_os_flags('CPPFLAGS',dup=False) + conf.add_os_flags('CXXFLAGS',dup=False) +@conf +def link_add_flags(conf): + conf.add_os_flags('LINKFLAGS',dup=False) + conf.add_os_flags('LDFLAGS',dup=False) +@conf +def cc_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('c') +@conf +def cxx_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('cxx') +@conf +def get_cc_version(conf,cc,gcc=False,icc=False,clang=False): + cmd=cc+['-dM','-E','-'] + env=conf.env.env or None + try: + out,err=conf.cmd_and_log(cmd,output=0,input='\n',env=env) + except Exception: + conf.fatal('Could not determine the compiler version %r'%cmd) + if gcc: + if out.find('__INTEL_COMPILER')>=0: + conf.fatal('The intel compiler pretends to be gcc') + if out.find('__GNUC__')<0 and out.find('__clang__')<0: + conf.fatal('Could not determine the compiler type') + if icc and out.find('__INTEL_COMPILER')<0: + conf.fatal('Not icc/icpc') + if clang and out.find('__clang__')<0: + conf.fatal('Not clang/clang++') + if not clang and out.find('__clang__')>=0: + conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') + k={} + if icc or gcc or clang: + out=out.splitlines() + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + if not conf.env.DEST_OS: + conf.env.DEST_OS='' + for i in MACRO_TO_DESTOS: + if isD(i): + conf.env.DEST_OS=MACRO_TO_DESTOS[i] + break + else: + if isD('__APPLE__')and isD('__MACH__'): + conf.env.DEST_OS='darwin' + elif isD('__unix__'): + conf.env.DEST_OS='generic' + if isD('__ELF__'): + conf.env.DEST_BINFMT='elf' + elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'): + conf.env.DEST_BINFMT='pe' + conf.env.LIBDIR=conf.env.BINDIR + elif isD('__APPLE__'): + conf.env.DEST_BINFMT='mac-o' + if not conf.env.DEST_BINFMT: + conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS) + for i in MACRO_TO_DEST_CPU: + if isD(i): + conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i] + break + Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')])) + if icc: + ver=k['__INTEL_COMPILER'] + conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1]) + else: + if isD('__clang__')and isD('__clang_major__'): + conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__']) + else: + conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k.get('__GNUC_PATCHLEVEL__','0')) + return k +@conf +def get_xlc_version(conf,cc): + cmd=cc+['-qversion'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError: + conf.fatal('Could not find xlc %r'%cmd) + for v in(r"IBM XL C/C\+\+.* V(?P\d*)\.(?P\d*)",): + version_re=re.compile(v,re.I).search + match=version_re(out or err) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + break + else: + conf.fatal('Could not determine the XLC version.') +@conf +def get_suncc_version(conf,cc): + cmd=cc+['-V'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError ,e: + if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')): + conf.fatal('Could not find suncc %r'%cmd) + out=e.stdout + err=e.stderr + version=(out or err) + version=version.splitlines()[0] + version_re=re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P\d*)\.(?P\d*)',re.I).search + match=version_re(version) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + else: + conf.fatal('Could not determine the suncc version.') +@conf +def add_as_needed(self): + if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME): + self.env.append_unique('LINKFLAGS','-Wl,--as-needed') +class cfgtask(Task.TaskBase): + def display(self): + return'' + def runnable_status(self): + return Task.RUN_ME + def uid(self): + return Utils.SIG_NIL + def run(self): + conf=self.conf + bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath()) + bld.env=conf.env + bld.init_dirs() + bld.in_msg=1 + bld.logger=self.logger + try: + bld.check(**self.args) + except Exception: + return 1 +@conf +def multicheck(self,*k,**kw): + self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw) + class par(object): + def __init__(self): + self.keep=False + self.returned_tasks=[] + self.task_sigs={} + self.progress_bar=0 + def total(self): + return len(tasks) + def to_log(self,*k,**kw): + return + bld=par() + tasks=[] + for dct in k: + x=cfgtask(bld=bld) + tasks.append(x) + x.args=dct + x.bld=bld + x.conf=self + x.args=dct + x.logger=Logs.make_mem_logger(str(id(x)),self.logger) + def it(): + yield tasks + while 1: + yield[] + p=Runner.Parallel(bld,Options.options.jobs) + p.biter=it() + p.start() + for x in tasks: + x.logger.memhandler.flush() + if p.error: + for x in p.error: + if getattr(x,'err_msg',None): + self.to_log(x.err_msg) + self.end_msg('fail',color='RED') + raise Errors.WafError('There is an error in the library, read config.log for more information') + for x in tasks: + if x.hasrun!=Task.SUCCESS: + self.end_msg(kw.get('errmsg','no'),color='YELLOW',**kw) + self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, read config.log for more information') + self.end_msg('ok',**kw) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ccroot.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ccroot.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ccroot.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ccroot.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,447 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Task,Utils,Node,Errors +from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension +from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests +from waflib.Configure import conf +SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] +USELIB_VARS=Utils.defaultdict(set) +USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) +USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) +USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) +USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) +USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) +USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['asm']=set(['ASFLAGS']) +@taskgen_method +def create_compiled_task(self,name,node): + out='%s.%d.o'%(node.name,self.idx) + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task +@taskgen_method +def to_incnodes(self,inlst): + lst=[] + seen=set([]) + for x in self.to_list(inlst): + if x in seen or not x: + continue + seen.add(x) + if isinstance(x,Node.Node): + lst.append(x) + else: + if os.path.isabs(x): + lst.append(self.bld.root.make_node(x)or x) + else: + if x[0]=='#': + p=self.bld.bldnode.make_node(x[1:]) + v=self.bld.srcnode.make_node(x[1:]) + else: + p=self.path.get_bld().make_node(x) + v=self.path.make_node(x) + if p.is_child_of(self.bld.bldnode): + p.mkdir() + lst.append(p) + lst.append(v) + return lst +@feature('c','cxx','d','asm','fc','includes') +@after_method('propagate_uselib_vars','process_source') +def apply_incpaths(self): + lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES']) + self.includes_nodes=lst + self.env['INCPATHS']=[x.abspath()for x in lst] +class link_task(Task.Task): + color='YELLOW' + inst_to=None + chmod=Utils.O755 + def add_target(self,target): + if isinstance(target,str): + pattern=self.env[self.__class__.__name__+'_PATTERN'] + if not pattern: + pattern='%s' + folder,name=os.path.split(target) + if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None): + nums=self.generator.vnum.split('.') + if self.env.DEST_BINFMT=='pe': + name=name+'-'+nums[0] + elif self.env.DEST_OS=='openbsd': + pattern='%s.%s'%(pattern,nums[0]) + if len(nums)>=2: + pattern+='.%s'%nums[1] + if folder: + tmp=folder+os.sep+pattern%name + else: + tmp=pattern%name + target=self.generator.path.find_or_declare(tmp) + self.set_outputs(target) +class stlink_task(link_task): + run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' + chmod=Utils.O644 +def rm_tgt(cls): + old=cls.run + def wrap(self): + try:os.remove(self.outputs[0].abspath()) + except OSError:pass + return old(self) + setattr(cls,'run',wrap) +rm_tgt(stlink_task) +@feature('c','cxx','d','fc','asm') +@after_method('process_source') +def apply_link(self): + for x in self.features: + if x=='cprogram'and'cxx'in self.features: + x='cxxprogram' + elif x=='cshlib'and'cxx'in self.features: + x='cxxshlib' + if x in Task.classes: + if issubclass(Task.classes[x],link_task): + link=x + break + else: + return + objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])] + self.link_task=self.create_task(link,objs) + self.link_task.add_target(self.target) + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod,task=self.link_task) +@taskgen_method +def use_rec(self,name,**kw): + if name in self.tmp_use_not or name in self.tmp_use_seen: + return + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.uselib.append(name) + self.tmp_use_not.add(name) + return + self.tmp_use_seen.append(name) + y.post() + y.tmp_use_objects=objects=kw.get('objects',True) + y.tmp_use_stlib=stlib=kw.get('stlib',True) + try: + link_task=y.link_task + except AttributeError: + y.tmp_use_var='' + else: + objects=False + if not isinstance(link_task,stlink_task): + stlib=False + y.tmp_use_var='LIB' + else: + y.tmp_use_var='STLIB' + p=self.tmp_use_prec + for x in self.to_list(getattr(y,'use',[])): + if self.env["STLIB_"+x]: + continue + try: + p[x].append(name) + except KeyError: + p[x]=[name] + self.use_rec(x,objects=objects,stlib=stlib) +@feature('c','cxx','d','use','fc') +@before_method('apply_incpaths','propagate_uselib_vars') +@after_method('apply_link','process_source') +def process_use(self): + use_not=self.tmp_use_not=set([]) + self.tmp_use_seen=[] + use_prec=self.tmp_use_prec={} + self.uselib=self.to_list(getattr(self,'uselib',[])) + self.includes=self.to_list(getattr(self,'includes',[])) + names=self.to_list(getattr(self,'use',[])) + for x in names: + self.use_rec(x) + for x in use_not: + if x in use_prec: + del use_prec[x] + out=[] + tmp=[] + for x in self.tmp_use_seen: + for k in use_prec.values(): + if x in k: + break + else: + tmp.append(x) + while tmp: + e=tmp.pop() + out.append(e) + try: + nlst=use_prec[e] + except KeyError: + pass + else: + del use_prec[e] + for x in nlst: + for y in use_prec: + if x in use_prec[y]: + break + else: + tmp.append(x) + if use_prec: + raise Errors.WafError('Cycle detected in the use processing %r'%use_prec) + out.reverse() + link_task=getattr(self,'link_task',None) + for x in out: + y=self.bld.get_tgen_by_name(x) + var=y.tmp_use_var + if var and link_task: + if var=='LIB'or y.tmp_use_stlib or x in names: + self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]]) + self.link_task.dep_nodes.extend(y.link_task.outputs) + tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode) + self.env.append_unique(var+'PATH',[tmp_path]) + else: + if y.tmp_use_objects: + self.add_objects_from_tgen(y) + if getattr(y,'export_includes',None): + self.includes.extend(y.to_incnodes(y.export_includes)) + if getattr(y,'export_defines',None): + self.env.append_value('DEFINES',self.to_list(y.export_defines)) + for x in names: + try: + y=self.bld.get_tgen_by_name(x) + except Errors.WafError: + if not self.env['STLIB_'+x]and not x in self.uselib: + self.uselib.append(x) + else: + for k in self.to_list(getattr(y,'use',[])): + if not self.env['STLIB_'+k]and not k in self.uselib: + self.uselib.append(k) +@taskgen_method +def accept_node_to_link(self,node): + return not node.name.endswith('.pdb') +@taskgen_method +def add_objects_from_tgen(self,tg): + try: + link_task=self.link_task + except AttributeError: + pass + else: + for tsk in getattr(tg,'compiled_tasks',[]): + for x in tsk.outputs: + if self.accept_node_to_link(x): + link_task.inputs.append(x) +@taskgen_method +def get_uselib_vars(self): + _vars=set([]) + for x in self.features: + if x in USELIB_VARS: + _vars|=USELIB_VARS[x] + return _vars +@feature('c','cxx','d','fc','javac','cs','uselib','asm') +@after_method('process_use') +def propagate_uselib_vars(self): + _vars=self.get_uselib_vars() + env=self.env + app=env.append_value + feature_uselib=self.features+self.to_list(getattr(self,'uselib',[])) + for var in _vars: + y=var.lower() + val=getattr(self,y,[]) + if val: + app(var,self.to_list(val)) + for x in feature_uselib: + val=env['%s_%s'%(var,x)] + if val: + app(var,val) +@feature('cshlib','cxxshlib','fcshlib') +@after_method('apply_link') +def apply_implib(self): + if not self.env.DEST_BINFMT=='pe': + return + dll=self.link_task.outputs[0] + if isinstance(self.target,Node.Node): + name=self.target.name + else: + name=os.path.split(self.target)[1] + implib=self.env['implib_PATTERN']%name + implib=dll.parent.find_or_declare(implib) + self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath()) + self.link_task.outputs.append(implib) + if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe': + node=self.path.find_resource(self.defs) + if not node: + raise Errors.WafError('invalid def file %r'%self.defs) + if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME): + self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode)) + self.link_task.dep_nodes.append(node) + else: + self.link_task.inputs.append(node) + if getattr(self,'install_task',None): + try: + inst_to=self.install_path_implib + except AttributeError: + try: + inst_to=self.install_path + except AttributeError: + inst_to='${IMPLIBDIR}' + self.install_task.dest='${BINDIR}' + if not self.env.IMPLIBDIR: + self.env.IMPLIBDIR=self.env.LIBDIR + self.implib_install_task=self.bld.install_files(inst_to,implib,env=self.env,chmod=self.link_task.chmod,task=self.link_task) +re_vnum=re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$') +@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') +@after_method('apply_link','propagate_uselib_vars') +def apply_vnum(self): + if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'): + return + link=self.link_task + if not re_vnum.match(self.vnum): + raise Errors.WafError('Invalid vnum %r for target %r'%(self.vnum,getattr(self,'name',self))) + nums=self.vnum.split('.') + node=link.outputs[0] + cnum=getattr(self,'cnum',str(nums[0])) + cnums=cnum.split('.') + if len(cnums)>len(nums)or nums[0:len(cnums)]!=cnums: + raise Errors.WafError('invalid compatibility version %s'%cnum) + libname=node.name + if libname.endswith('.dylib'): + name3=libname.replace('.dylib','.%s.dylib'%self.vnum) + name2=libname.replace('.dylib','.%s.dylib'%cnum) + else: + name3=libname+'.'+self.vnum + name2=libname+'.'+cnum + if self.env.SONAME_ST: + v=self.env.SONAME_ST%name2 + self.env.append_value('LINKFLAGS',v.split()) + if self.env.DEST_OS!='openbsd': + outs=[node.parent.find_or_declare(name3)] + if name2!=name3: + outs.append(node.parent.find_or_declare(name2)) + self.create_task('vnum',node,outs) + if getattr(self,'install_task',None): + self.install_task.hasrun=Task.SKIP_ME + bld=self.bld + path=self.install_task.dest + if self.env.DEST_OS=='openbsd': + libname=self.link_task.outputs[0].name + t1=bld.install_as('%s%s%s'%(path,os.sep,libname),node,env=self.env,chmod=self.link_task.chmod) + self.vnum_install_task=(t1,) + else: + t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod) + t3=bld.symlink_as(path+os.sep+libname,name3) + if name2!=name3: + t2=bld.symlink_as(path+os.sep+name2,name3) + self.vnum_install_task=(t1,t2,t3) + else: + self.vnum_install_task=(t1,t3) + if'-dynamiclib'in self.env['LINKFLAGS']: + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + p=Utils.subst_vars(inst_to,self.env) + path=os.path.join(p,name2) + self.env.append_value('LINKFLAGS',['-install_name',path]) + self.env.append_value('LINKFLAGS','-Wl,-compatibility_version,%s'%cnum) + self.env.append_value('LINKFLAGS','-Wl,-current_version,%s'%self.vnum) +class vnum(Task.Task): + color='CYAN' + quient=True + ext_in=['.bin'] + def keyword(self): + return'Symlinking' + def run(self): + for x in self.outputs: + path=x.abspath() + try: + os.remove(path) + except OSError: + pass + try: + os.symlink(self.inputs[0].name,path) + except OSError: + return 1 +class fake_shlib(link_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +class fake_stlib(stlink_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines) +@conf +def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines) +lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],} +@feature('fake_lib') +def process_lib(self): + node=None + names=[x%self.name for x in lib_patterns[self.lib_type]] + for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS: + if not isinstance(x,Node.Node): + x=self.bld.root.find_node(x)or self.path.find_node(x) + if not x: + continue + for y in names: + node=x.find_node(y) + if node: + node.sig=Utils.h_file(node.abspath()) + break + else: + continue + break + else: + raise Errors.WafError('could not find library %r'%self.name) + self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node]) + self.target=self.name +class fake_o(Task.Task): + def runnable_status(self): + return Task.SKIP_ME +@extension('.o','.obj') +def add_those_o_files(self,node): + tsk=self.create_task('fake_o',[],node) + try: + self.compiled_tasks.append(tsk) + except AttributeError: + self.compiled_tasks=[tsk] +@feature('fake_obj') +@before_method('process_source') +def process_objs(self): + for node in self.to_nodes(self.source): + self.add_those_o_files(node) + self.source=[] +@conf +def read_object(self,obj): + if not isinstance(obj,self.path.__class__): + obj=self.path.find_resource(obj) + return self(features='fake_obj',source=obj,name=obj.name) +@feature('cxxprogram','cprogram') +@after_method('apply_link','process_use') +def set_full_paths_hpux(self): + if self.env.DEST_OS!='hp-ux': + return + base=self.bld.bldnode.abspath() + for var in['LIBPATH','STLIBPATH']: + lst=[] + for x in self.env[var]: + if x.startswith('/'): + lst.append(x) + else: + lst.append(os.path.normpath(os.path.join(base,x))) + self.env[var]=lst diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clang.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clang.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clang.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clang.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,20 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar,gcc +from waflib.Configure import conf +@conf +def find_clang(conf): + cc=conf.find_program('clang',var='CC') + conf.get_cc_version(cc,clang=True) + conf.env.CC_NAME='clang' +def configure(conf): + conf.find_clang() + conf.find_program(['llvm-ar','ar'],var='AR') + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clangxx.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clangxx.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clangxx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/clangxx.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,20 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar,gxx +from waflib.Configure import conf +@conf +def find_clangxx(conf): + cxx=conf.find_program('clang++',var='CXX') + conf.get_cc_version(cxx,clang=True) + conf.env.CXX_NAME='clang' +def configure(conf): + conf.find_clangxx() + conf.find_program(['llvm-ar','ar'],var='AR') + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_c.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_c.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_c.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_c.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,40 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib.Tools import ccroot +from waflib import Utils +from waflib.Logs import debug +c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'osf1V':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['gcc','clang'],} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=c_compiler.get(build_platform,c_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_c_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (C compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_c: %r'%e) + else: + if conf.env['CC']: + conf.end_msg(conf.env.get_flat('CC')) + conf.env['COMPILER_CC']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a C compiler!') +def options(opt): + test_for_compiler=default_compilers() + opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py']) + cc_compiler_opts=opt.add_option_group('Configuration options') + cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_cxx.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_cxx.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_cxx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_cxx.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,40 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib.Tools import ccroot +from waflib import Utils +from waflib.Logs import debug +cxx_compiler={'win32':['msvc','g++','clang++'],'cygwin':['g++'],'darwin':['clang++','g++'],'aix':['xlc++','g++','clang++'],'linux':['g++','clang++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'osf1V':['g++'],'gnu':['g++','clang++'],'java':['g++','msvc','clang++','icpc'],'default':['g++','clang++']} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=cxx_compiler.get(build_platform,cxx_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_cxx_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (C++ compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_cxx: %r'%e) + else: + if conf.env['CXX']: + conf.end_msg(conf.env.get_flat('CXX')) + conf.env['COMPILER_CXX']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a C++ compiler!') +def options(opt): + test_for_compiler=default_compilers() + opt.load_special_tools('cxx_*.py') + cxx_compiler_opts=opt.add_option_group('Configuration options') + cxx_compiler_opts.add_option('--check-cxx-compiler',default=None,help='list of C++ compilers to try [%s]'%test_for_compiler,dest="check_cxx_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_d.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_d.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_d.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_d.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +d_compiler={'default':['gdc','dmd','ldc2']} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=d_compiler.get(build_platform,d_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_d_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_d')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (D compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_d: %r'%e) + else: + if conf.env.D: + conf.end_msg(conf.env.get_flat('D')) + conf.env['COMPILER_D']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a D compiler!') +def options(opt): + test_for_compiler=default_compilers() + d_compiler_opts=opt.add_option_group('Configuration options') + d_compiler_opts.add_option('--check-d-compiler',default=None,help='list of D compilers to try [%s]'%test_for_compiler,dest='check_d_compiler') + for x in test_for_compiler.split(): + opt.load('%s'%x) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_fc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_fc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_fc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/compiler_fc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,39 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +from waflib.Tools import fc +fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=fc_compiler.get(build_platform,fc_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_fortran_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (Fortran compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_fortran: %r'%e) + else: + if conf.env['FC']: + conf.end_msg(conf.env.get_flat('FC')) + conf.env.COMPILER_FORTRAN=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a Fortran compiler!') +def options(opt): + test_for_compiler=default_compilers() + opt.load_special_tools('fc_*.py') + fortran_compiler_opts=opt.add_option_group('Configuration options') + fortran_compiler_opts.add_option('--check-fortran-compiler',default=None,help='list of Fortran compiler to try [%s]'%test_for_compiler,dest="check_fortran_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_osx.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_osx.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_osx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_osx.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,137 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,shutil,platform +from waflib import Task,Utils,Errors +from waflib.TaskGen import taskgen_method,feature,after_method,before_method +app_info=''' + + + + + CFBundlePackageType + APPL + CFBundleGetInfoString + Created by Waf + CFBundleSignature + ???? + NOTE + THIS IS A GENERATED FILE, DO NOT MODIFY + CFBundleExecutable + {app_name} + + +''' +@feature('c','cxx') +def set_macosx_deployment_target(self): + if self.env['MACOSX_DEPLOYMENT_TARGET']: + os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET'] + elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ: + if Utils.unversioned_sys_platform()=='darwin': + os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2]) +@taskgen_method +def create_bundle_dirs(self,name,out): + dir=out.parent.find_or_declare(name) + dir.mkdir() + macos=dir.find_or_declare(['Contents','MacOS']) + macos.mkdir() + return dir +def bundle_name_for_output(out): + name=out.name + k=name.rfind('.') + if k>=0: + name=name[:k]+'.app' + else: + name=name+'.app' + return name +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macapp(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','MacOS',out.name]) + self.apptask=self.create_task('macapp',self.link_task.outputs,n1) + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name + self.bld.install_files(inst_to,n1,chmod=Utils.O755) + if getattr(self,'mac_files',None): + mac_files_root=getattr(self,'mac_files_root',None) + if isinstance(mac_files_root,str): + mac_files_root=self.path.find_node(mac_files_root) + if not mac_files_root: + self.bld.fatal('Invalid mac_files_root %r'%self.mac_files_root) + res_dir=n1.parent.parent.make_node('Resources') + inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name + for node in self.to_nodes(self.mac_files): + relpath=node.path_from(mac_files_root or node.parent) + self.create_task('macapp',node,res_dir.make_node(relpath)) + self.bld.install_as(os.path.join(inst_to,relpath),node) + if getattr(self,'mac_resources',None): + res_dir=n1.parent.parent.make_node('Resources') + inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name + for x in self.to_list(self.mac_resources): + node=self.path.find_node(x) + if not node: + raise Errors.WafError('Missing mac_resource %r in %r'%(x,self)) + parent=node.parent + if os.path.isdir(node.abspath()): + nodes=node.ant_glob('**') + else: + nodes=[node] + for node in nodes: + rel=node.path_from(parent) + self.create_task('macapp',node,res_dir.make_node(rel)) + self.bld.install_as(inst_to+'/%s'%rel,node) + if getattr(self.bld,'is_install',None): + self.install_task.hasrun=Task.SKIP_ME +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macplist(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','Info.plist']) + self.plisttask=plisttask=self.create_task('macplist',[],n1) + plisttask.context={'app_name':self.link_task.outputs[0].name,'env':self.env} + plist_ctx=getattr(self,'plist_context',None) + if(plist_ctx): + plisttask.context.update(plist_ctx) + if getattr(self,'mac_plist',False): + node=self.path.find_resource(self.mac_plist) + if node: + plisttask.inputs.append(node) + else: + plisttask.code=self.mac_plist + else: + plisttask.code=app_info + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name + self.bld.install_files(inst_to,n1) +@feature('cshlib','cxxshlib') +@before_method('apply_link','propagate_uselib_vars') +def apply_bundle(self): + if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False): + self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[] + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN'] + use=self.use=self.to_list(getattr(self,'use',[])) + if not'MACBUNDLE'in use: + use.append('MACBUNDLE') +app_dirs=['Contents','Contents/MacOS','Contents/Resources'] +class macapp(Task.Task): + color='PINK' + def run(self): + self.outputs[0].parent.mkdir() + shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath()) +class macplist(Task.Task): + color='PINK' + ext_in=['.bin'] + def run(self): + if getattr(self,'code',None): + txt=self.code + else: + txt=self.inputs[0].read() + context=getattr(self,'context',{}) + txt=txt.format(**context) + self.outputs[0].write(txt) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_preproc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_preproc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_preproc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_preproc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,611 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re,string,traceback +from waflib import Logs,Utils,Errors +from waflib.Logs import debug,error +class PreprocError(Errors.WafError): + pass +POPFILE='-' +recursion_limit=150 +go_absolute=False +standard_includes=['/usr/include'] +if Utils.is_win32: + standard_includes=[] +use_trigraphs=0 +strict_quotes=0 +g_optrans={'not':'!','not_eq':'!','and':'&&','and_eq':'&=','or':'||','or_eq':'|=','xor':'^','xor_eq':'^=','bitand':'&','bitor':'|','compl':'~',} +re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) +re_mac=re.compile("^[a-zA-Z_]\w*") +re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') +re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE) +re_nl=re.compile('\\\\\r*\n',re.MULTILINE) +re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE) +trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')] +chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39} +NUM='i' +OP='O' +IDENT='T' +STR='s' +CHAR='c' +tok_types=[NUM,STR,IDENT,OP] +exp_types=[r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',] +re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M) +accepted='a' +ignored='i' +undefined='u' +skipped='s' +def repl(m): + s=m.group(0) + if s.startswith('/'): + return' ' + return s +def filter_comments(filename): + code=Utils.readf(filename) + if use_trigraphs: + for(a,b)in trig_def:code=code.split(a).join(b) + code=re_nl.sub('',code) + code=re_cpp.sub(repl,code) + return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)] +prec={} +ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',','] +for x in range(len(ops)): + syms=ops[x] + for u in syms.split(): + prec[u]=x +def trimquotes(s): + if not s:return'' + s=s.rstrip() + if s[0]=="'"and s[-1]=="'":return s[1:-1] + return s +def reduce_nums(val_1,val_2,val_op): + try:a=0+val_1 + except TypeError:a=int(val_1) + try:b=0+val_2 + except TypeError:b=int(val_2) + d=val_op + if d=='%':c=a%b + elif d=='+':c=a+b + elif d=='-':c=a-b + elif d=='*':c=a*b + elif d=='/':c=a/b + elif d=='^':c=a^b + elif d=='==':c=int(a==b) + elif d=='|'or d=='bitor':c=a|b + elif d=='||'or d=='or':c=int(a or b) + elif d=='&'or d=='bitand':c=a&b + elif d=='&&'or d=='and':c=int(a and b) + elif d=='!='or d=='not_eq':c=int(a!=b) + elif d=='^'or d=='xor':c=int(a^b) + elif d=='<=':c=int(a<=b) + elif d=='<':c=int(a':c=int(a>b) + elif d=='>=':c=int(a>=b) + elif d=='<<':c=a<>':c=a>>b + else:c=0 + return c +def get_num(lst): + if not lst:raise PreprocError("empty list for get_num") + (p,v)=lst[0] + if p==OP: + if v=='(': + count_par=1 + i=1 + while i=prec[v]: + num2=reduce_nums(num,num2,v) + return get_term([(NUM,num2)]+lst) + else: + num3,lst=get_num(lst[1:]) + num3=reduce_nums(num2,num3,v2) + return get_term([(NUM,num),(p,v),(NUM,num3)]+lst) + raise PreprocError("cannot reduce %r"%lst) +def reduce_eval(lst): + num,lst=get_term(lst) + return(NUM,num) +def stringize(lst): + lst=[str(v2)for(p2,v2)in lst] + return"".join(lst) +def paste_tokens(t1,t2): + p1=None + if t1[0]==OP and t2[0]==OP: + p1=OP + elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM): + p1=IDENT + elif t1[0]==NUM and t2[0]==NUM: + p1=NUM + if not p1: + raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2)) + return(p1,t1[1]+t2[1]) +def reduce_tokens(lst,defs,ban=[]): + i=0 + while i=len(lst): + raise PreprocError("expected '(' after %r (got nothing)"%v) + (p2,v2)=lst[i] + if p2!=OP or v2!='(': + raise PreprocError("expected '(' after %r"%v) + del lst[i] + one_param=[] + count_paren=0 + while i1: + (p3,v3)=accu[-1] + (p4,v4)=accu[-2] + if v3=='##': + accu.pop() + if v4==','and pt1: + return(v,[[],t[1:]]) + else: + return(v,[[],[('T','')]]) +re_include=re.compile('^\s*(<(?P.*)>|"(?P.*)")') +def extract_include(txt,defs): + m=re_include.search(txt) + if m: + if m.group('a'):return'<',m.group('a') + if m.group('b'):return'"',m.group('b') + toks=tokenize(txt) + reduce_tokens(toks,defs,['waf_include']) + if not toks: + raise PreprocError("could not parse include %s"%txt) + if len(toks)==1: + if toks[0][0]==STR: + return'"',toks[0][1] + else: + if toks[0][1]=='<'and toks[-1][1]=='>': + ret='<',stringize(toks).lstrip('<').rstrip('>') + return ret + raise PreprocError("could not parse include %s."%txt) +def parse_char(txt): + if not txt:raise PreprocError("attempted to parse a null char") + if txt[0]!='\\': + return ord(txt) + c=txt[1] + if c=='x': + if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16) + return int(txt[2:],16) + elif c.isdigit(): + if c=='0'and len(txt)==2:return 0 + for i in 3,2,1: + if len(txt)>i and txt[1:1+i].isdigit(): + return(1+i,int(txt[1:1+i],8)) + else: + try:return chr_esc[c] + except KeyError:raise PreprocError("could not parse char literal '%s'"%txt) +def tokenize(s): + return tokenize_private(s)[:] +@Utils.run_once +def tokenize_private(s): + ret=[] + for match in re_clexer.finditer(s): + m=match.group + for name in tok_types: + v=m(name) + if v: + if name==IDENT: + try: + g_optrans[v] + name=OP + except KeyError: + if v.lower()=="true": + v=1 + name=NUM + elif v.lower()=="false": + v=0 + name=NUM + elif name==NUM: + if m('oct'):v=int(v,8) + elif m('hex'):v=int(m('hex'),16) + elif m('n0'):v=m('n0') + else: + v=m('char') + if v:v=parse_char(v) + else:v=m('n2')or m('n4') + elif name==OP: + if v=='%:':v='#' + elif v=='%:%:':v='##' + elif name==STR: + v=v[1:-1] + ret.append((name,v)) + break + return ret +@Utils.run_once +def define_name(line): + return re_mac.match(line).group(0) +class c_parser(object): + def __init__(self,nodepaths=None,defines=None): + self.lines=[] + if defines is None: + self.defs={} + else: + self.defs=dict(defines) + self.state=[] + self.count_files=0 + self.currentnode_stack=[] + self.nodepaths=nodepaths or[] + self.nodes=[] + self.names=[] + self.curfile='' + self.ban_includes=set([]) + def cached_find_resource(self,node,filename): + try: + nd=node.ctx.cache_nd + except AttributeError: + nd=node.ctx.cache_nd={} + tup=(node,filename) + try: + return nd[tup] + except KeyError: + ret=node.find_resource(filename) + if ret: + if getattr(ret,'children',None): + ret=None + elif ret.is_child_of(node.ctx.bldnode): + tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) + if tmp and getattr(tmp,'children',None): + ret=None + nd[tup]=ret + return ret + def tryfind(self,filename): + if filename.endswith('.moc'): + self.names.append(filename) + return None + self.curfile=filename + found=self.cached_find_resource(self.currentnode_stack[-1],filename) + for n in self.nodepaths: + if found: + break + found=self.cached_find_resource(n,filename) + if found and not found in self.ban_includes: + self.nodes.append(found) + self.addlines(found) + else: + if not filename in self.names: + self.names.append(filename) + return found + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>recursion_limit: + raise PreprocError("recursion limit exceeded") + pc=self.parse_cache + debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=filter_comments(filepath) + lines.append((POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + error("parsing %s failed"%filepath) + traceback.print_exc() + def start(self,node,env): + debug('preproc: scanning %s (in %s)',node.name,node.parent.name) + bld=node.ctx + try: + self.parse_cache=bld.parse_cache + except AttributeError: + self.parse_cache=bld.parse_cache={} + self.current_file=node + self.addlines(node) + if env['DEFINES']: + try: + lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]] + lst.reverse() + self.lines.extend([('define',x)for x in lst]) + except AttributeError: + pass + while self.lines: + (token,line)=self.lines.pop() + if token==POPFILE: + self.count_files-=1 + self.currentnode_stack.pop() + continue + try: + ve=Logs.verbose + if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state) + state=self.state + if token[:2]=='if': + state.append(undefined) + elif token=='endif': + state.pop() + if token[0]!='e': + if skipped in self.state or ignored in self.state: + continue + if token=='if': + ret=eval_macro(tokenize(line),self.defs) + if ret:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifdef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifndef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=ignored + else:state[-1]=accepted + elif token=='include'or token=='import': + (kind,inc)=extract_include(line,self.defs) + if ve:debug('preproc: include found %s (%s) ',inc,kind) + if kind=='"'or not strict_quotes: + self.current_file=self.tryfind(inc) + if token=='import': + self.ban_includes.add(self.current_file) + elif token=='elif': + if state[-1]==accepted: + state[-1]=skipped + elif state[-1]==ignored: + if eval_macro(tokenize(line),self.defs): + state[-1]=accepted + elif token=='else': + if state[-1]==accepted:state[-1]=skipped + elif state[-1]==ignored:state[-1]=accepted + elif token=='define': + try: + self.defs[define_name(line)]=line + except Exception: + raise PreprocError("Invalid define line %s"%line) + elif token=='undef': + m=re_mac.match(line) + if m and m.group(0)in self.defs: + self.defs.__delitem__(m.group(0)) + elif token=='pragma': + if re_pragma_once.match(line.lower()): + self.ban_includes.add(self.current_file) + except Exception ,e: + if Logs.verbose: + debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack()) +def scan(task): + global go_absolute + try: + incn=task.generator.includes_nodes + except AttributeError: + raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator) + if go_absolute: + nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes] + else: + nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)] + tmp=c_parser(nodepaths) + tmp.start(task.inputs[0],task.env) + if Logs.verbose: + debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,26 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import TaskGen,Task +from waflib.Tools import c_preproc +from waflib.Tools.ccroot import link_task,stlink_task +@TaskGen.extension('.c') +def c_hook(self,node): + if not self.env.CC and self.env.CXX: + return self.create_compiled_task('cxx',node) + return self.create_compiled_task('c',node) +class c(Task.Task): + run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()}' + vars=['CCDEPS'] + ext_in=['.h'] + scan=c_preproc.scan +class cprogram(link_task): + run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' + ext_out=['.bin'] + vars=['LINKDEPS'] + inst_to='${BINDIR}' +class cshlib(cprogram): + inst_to='${LIBDIR}' +class cstlib(stlink_task): + pass diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cs.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cs.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cs.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cs.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,132 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Options,Errors +from waflib.TaskGen import before_method,after_method,feature +from waflib.Tools import ccroot +from waflib.Configure import conf +import os,tempfile +ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES']) +ccroot.lib_patterns['csshlib']=['%s'] +@feature('cs') +@before_method('process_source') +def apply_cs(self): + cs_nodes=[] + no_nodes=[] + for x in self.to_nodes(self.source): + if x.name.endswith('.cs'): + cs_nodes.append(x) + else: + no_nodes.append(x) + self.source=no_nodes + bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe') + self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen)) + tsk.env.CSTYPE='/target:%s'%bintype + tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath() + self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu')) + inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}') + if inst_to: + mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644) + self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod) +@feature('cs') +@after_method('apply_cs') +def use_cs(self): + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Errors.WafError: + self.env.append_value('CSFLAGS','/reference:%s'%x) + continue + y.post() + tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None) + if not tsk: + self.bld.fatal('cs task has no link task for use %r'%self) + self.cs_task.dep_nodes.extend(tsk.outputs) + self.cs_task.set_run_after(tsk) + self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath()) +@feature('cs') +@after_method('apply_cs','use_cs') +def debug_cs(self): + csdebug=getattr(self,'csdebug',self.env.CSDEBUG) + if not csdebug: + return + node=self.cs_task.outputs[0] + if self.env.CS_NAME=='mono': + out=node.parent.find_or_declare(node.name+'.mdb') + else: + out=node.change_ext('.pdb') + self.cs_task.outputs.append(out) + try: + self.install_task.source.append(out) + except AttributeError: + pass + if csdebug=='pdbonly': + val=['/debug+','/debug:pdbonly'] + elif csdebug=='full': + val=['/debug+','/debug:full'] + else: + val=['/debug-'] + self.env.append_value('CSFLAGS',val) +class mcs(Task.Task): + color='YELLOW' + run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + try: + tmp=None + if isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:])) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret + def quote_response_command(self,flag): + if flag.lower()=='/noconfig': + return'' + if flag.find(' ')>-1: + for x in('/r:','/reference:','/resource:','/lib:','/out:'): + if flag.startswith(x): + flag='%s"%s"'%(x,'","'.join(flag[len(x):].split(','))) + break + else: + flag='"%s"'%flag + return flag +def configure(conf): + csc=getattr(Options.options,'cscbinary',None) + if csc: + conf.env.MCS=csc + conf.find_program(['csc','mcs','gmcs'],var='MCS') + conf.env.ASS_ST='/r:%s' + conf.env.RES_ST='/resource:%s' + conf.env.CS_NAME='csc' + if str(conf.env.MCS).lower().find('mcs')>-1: + conf.env.CS_NAME='mono' +def options(opt): + opt.add_option('--with-csc-binary',type='string',dest='cscbinary') +class fake_csshlib(Task.Task): + color='YELLOW' + inst_to=None + def runnable_status(self): + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_csshlib(self,name,paths=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_tests.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_tests.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_tests.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/c_tests.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,152 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Task +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +LIB_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllexport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void) { return 9; } +''' +MAIN_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllimport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void); +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(lib_func() == 9); +} +''' +@feature('link_lib_test') +@before_method('process_source') +def link_lib_test_fun(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + rpath=[] + if getattr(self,'add_rpath',False): + rpath=[self.bld.path.get_bld().abspath()] + mode=self.mode + m='%s %s'%(mode,mode) + ex=self.test_exec and'test_exec'or'' + bld=self.bld + bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE) + bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE) + bld(features='%sshlib'%m,source='test.'+mode,target='test') + bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath) +@conf +def check_library(self,mode=None,test_exec=True): + if not mode: + mode='c' + if self.env.CXX: + mode='cxx' + self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,) +INLINE_CODE=''' +typedef int foo_t; +static %s foo_t static_foo () {return 0; } +%s foo_t foo () { + return 0; +} +''' +INLINE_VALUES=['inline','__inline__','__inline'] +@conf +def check_inline(self,**kw): + self.start_msg('Checking for inline') + if not'define_name'in kw: + kw['define_name']='INLINE_MACRO' + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx'] + else: + kw['features']=['c'] + for x in INLINE_VALUES: + kw['fragment']=INLINE_CODE%(x,x) + try: + self.check(**kw) + except self.errors.ConfigurationError: + continue + else: + self.end_msg(x) + if x!='inline': + self.define('inline',x,quote=False) + return x + self.fatal('could not use inline functions') +LARGE_FRAGMENT='''#include +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(sizeof(off_t) >= 8); +} +''' +@conf +def check_large_file(self,**kw): + if not'define_name'in kw: + kw['define_name']='HAVE_LARGEFILE' + if not'execute'in kw: + kw['execute']=True + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx','cxxprogram'] + else: + kw['features']=['c','cprogram'] + kw['fragment']=LARGE_FRAGMENT + kw['msg']='Checking for large file support' + ret=True + try: + if self.env.DEST_BINFMT!='pe': + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + if ret: + return True + kw['msg']='Checking for -D_FILE_OFFSET_BITS=64' + kw['defines']=['_FILE_OFFSET_BITS=64'] + try: + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + self.define('_FILE_OFFSET_BITS',64) + return ret + self.fatal('There is no support for large files') +ENDIAN_FRAGMENT=''' +short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; +short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; +int use_ascii (int i) { + return ascii_mm[i] + ascii_ii[i]; +} +short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; +short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; +int use_ebcdic (int i) { + return ebcdic_mm[i] + ebcdic_ii[i]; +} +extern int foo; +''' +class grep_for_endianness(Task.Task): + color='PINK' + def run(self): + txt=self.inputs[0].read(flags='rb').decode('iso8859-1') + if txt.find('LiTTleEnDian')>-1: + self.generator.tmp.append('little') + elif txt.find('BIGenDianSyS')>-1: + self.generator.tmp.append('big') + else: + return-1 +@feature('grep_for_endianness') +@after_method('process_source') +def grep_for_endianness_fun(self): + self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0]) +@conf +def check_endianness(self): + tmp=[] + def check_msg(self): + return tmp[0] + self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg) + return tmp[0] diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cxx.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cxx.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cxx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/cxx.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,26 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import TaskGen,Task +from waflib.Tools import c_preproc +from waflib.Tools.ccroot import link_task,stlink_task +@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') +def cxx_hook(self,node): + return self.create_compiled_task('cxx',node) +if not'.c'in TaskGen.task_gen.mappings: + TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp'] +class cxx(Task.Task): + run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}' + vars=['CXXDEPS'] + ext_in=['.h'] + scan=c_preproc.scan +class cxxprogram(link_task): + run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' + vars=['LINKDEPS'] + ext_out=['.bin'] + inst_to='${BINDIR}' +class cxxshlib(cxxprogram): + inst_to='${LIBDIR}' +class cxxstlib(stlink_task): + pass diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dbus.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dbus.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dbus.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dbus.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Task,Errors +from waflib.TaskGen import taskgen_method,before_method +@taskgen_method +def add_dbus_file(self,filename,prefix,mode): + if not hasattr(self,'dbus_lst'): + self.dbus_lst=[] + if not'process_dbus'in self.meths: + self.meths.append('process_dbus') + self.dbus_lst.append([filename,prefix,mode]) +@before_method('apply_core') +def process_dbus(self): + for filename,prefix,mode in getattr(self,'dbus_lst',[]): + node=self.path.find_resource(filename) + if not node: + raise Errors.WafError('file not found '+filename) + tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h')) + tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix + tsk.env.DBUS_BINDING_TOOL_MODE=mode +class dbus_binding_tool(Task.Task): + color='BLUE' + ext_out=['.h'] + run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}' + shell=True +def configure(conf): + conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_config.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_config.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_config.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_config.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,52 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils +from waflib.Configure import conf +@conf +def d_platform_flags(self): + v=self.env + if not v.DEST_OS: + v.DEST_OS=Utils.unversioned_sys_platform() + binfmt=Utils.destos_to_binfmt(self.env.DEST_OS) + if binfmt=='pe': + v['dprogram_PATTERN']='%s.exe' + v['dshlib_PATTERN']='lib%s.dll' + v['dstlib_PATTERN']='lib%s.a' + elif binfmt=='mac-o': + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.dylib' + v['dstlib_PATTERN']='lib%s.a' + else: + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.so' + v['dstlib_PATTERN']='lib%s.a' +DLIB=''' +version(D_Version2) { + import std.stdio; + int main() { + writefln("phobos2"); + return 0; + } +} else { + version(Tango) { + import tango.stdc.stdio; + int main() { + printf("tango"); + return 0; + } + } else { + import std.stdio; + int main() { + writefln("phobos1"); + return 0; + } + } +} +''' +@conf +def check_dlibrary(self,execute=True): + ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True) + if execute: + self.env.DLIBRARY=ret.strip() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dmd.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dmd.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dmd.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/dmd.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,51 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import sys +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_dmd(conf): + conf.find_program(['dmd','dmd2','ldc'],var='D') + out=conf.cmd_and_log(conf.env.D+['--help']) + if out.find("D Compiler v")==-1: + out=conf.cmd_and_log(conf.env.D+['-version']) + if out.find("based on DMD v1.")==-1: + conf.fatal("detected compiler is not dmd/ldc") +@conf +def common_flags_ldc(conf): + v=conf.env + v['DFLAGS']=['-d-version=Posix'] + v['LINKFLAGS']=[] + v['DFLAGS_dshlib']=['-relocation-model=pic'] +@conf +def common_flags_dmd(conf): + v=conf.env + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-of%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' + v['LINKFLAGS_dprogram']=['-quiet'] + v['DFLAGS_dshlib']=['-fPIC'] + v['LINKFLAGS_dshlib']=['-L-shared'] + v['DHEADER_ext']='.di' + v.DFLAGS_d_with_header=['-H','-Hf'] + v['D_HDR_F']='%s' +def configure(conf): + conf.find_dmd() + if sys.platform=='win32': + out=conf.cmd_and_log(conf.env.D+['--help']) + if out.find("D Compiler v2.")>-1: + conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') + conf.load('ar') + conf.load('d') + conf.common_flags_dmd() + conf.d_platform_flags() + if str(conf.env.D).find('ldc')>-1: + conf.common_flags_ldc() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Errors +from waflib.TaskGen import taskgen_method,feature,extension +from waflib.Tools import d_scan,d_config +from waflib.Tools.ccroot import link_task,stlink_task +class d(Task.Task): + color='GREEN' + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' + scan=d_scan.scan +class d_with_header(d): + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' +class d_header(Task.Task): + color='BLUE' + run_str='${D} ${D_HEADER} ${SRC}' +class dprogram(link_task): + run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' + inst_to='${BINDIR}' +class dshlib(dprogram): + inst_to='${LIBDIR}' +class dstlib(stlink_task): + pass +@extension('.d','.di','.D') +def d_hook(self,node): + ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o' + out='%s.%d.%s'%(node.name,self.idx,ext) + def create_compiled_task(self,name,node): + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task + if getattr(self,'generate_headers',None): + tsk=create_compiled_task(self,'d_with_header',node) + tsk.outputs.append(node.change_ext(self.env['DHEADER_ext'])) + else: + tsk=create_compiled_task(self,'d',node) + return tsk +@taskgen_method +def generate_header(self,filename): + try: + self.header_lst.append([filename,self.install_path]) + except AttributeError: + self.header_lst=[[filename,self.install_path]] +@feature('d') +def process_header(self): + for i in getattr(self,'header_lst',[]): + node=self.path.find_resource(i[0]) + if not node: + raise Errors.WafError('file %r not found on d obj'%i[0]) + self.create_task('d_header',node,node.change_ext('.di')) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_scan.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_scan.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_scan.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/d_scan.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,133 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +def filter_comments(filename): + txt=Utils.readf(filename) + i=0 + buf=[] + max=len(txt) + begin=0 + while i1: + dupe=True + msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"") + Logs.error(msg) + for x in v: + if Logs.verbose>1: + Logs.error(' %d. %r'%(1+v.index(x),x.generator)) + else: + Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None))) + if not dupe: + for(k,v)in uids.items(): + if len(v)>1: + Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') + for tsk in v: + Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator)) +def check_invalid_constraints(self): + feat=set([]) + for x in list(TaskGen.feats.values()): + feat.union(set(x)) + for(x,y)in TaskGen.task_gen.prec.items(): + feat.add(x) + feat.union(set(y)) + ext=set([]) + for x in TaskGen.task_gen.mappings.values(): + ext.add(x.__name__) + invalid=ext&feat + if invalid: + Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid)) + for cls in list(Task.classes.values()): + if sys.hexversion>0x3000000 and issubclass(cls,Task.Task)and isinstance(cls.hcode,str): + raise Errors.WafError('Class %r has hcode value %r of type , expecting (use Utils.h_cmd() ?)'%(cls,cls.hcode)) + for x in('before','after'): + for y in Utils.to_list(getattr(cls,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__)) + if getattr(cls,'rule',None): + Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__) +def replace(m): + oldcall=getattr(Build.BuildContext,m) + def call(self,*k,**kw): + ret=oldcall(self,*k,**kw) + for x in typos: + if x in kw: + if x=='iscopy'and'subst'in getattr(self,'features',''): + continue + Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret)) + return ret + setattr(Build.BuildContext,m,call) +def enhance_lib(): + for m in meths_typos: + replace(m) + def ant_glob(self,*k,**kw): + if k: + lst=Utils.to_list(k[0]) + for pat in lst: + if'..'in pat.split('/'): + Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0]) + if kw.get('remove',True): + try: + if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False): + Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self) + except AttributeError: + pass + return self.old_ant_glob(*k,**kw) + Node.Node.old_ant_glob=Node.Node.ant_glob + Node.Node.ant_glob=ant_glob + old=Task.is_before + def is_before(t1,t2): + ret=old(t1,t2) + if ret and old(t2,t1): + Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2)) + return ret + Task.is_before=is_before + def check_err_features(self): + lst=self.to_list(self.features) + if'shlib'in lst: + Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') + for x in('c','cxx','d','fc'): + if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]: + Logs.error('%r features is probably missing %r'%(self,x)) + TaskGen.feature('*')(check_err_features) + def check_err_order(self): + if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features): + for x in('before','after','ext_in','ext_out'): + if hasattr(self,x): + Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self)) + else: + for x in('before','after'): + for y in self.to_list(getattr(self,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self)) + TaskGen.feature('*')(check_err_order) + def check_compile(self): + check_invalid_constraints(self) + try: + ret=self.orig_compile() + finally: + check_same_targets(self) + return ret + Build.BuildContext.orig_compile=Build.BuildContext.compile + Build.BuildContext.compile=check_compile + def use_rec(self,name,**kw): + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + pass + else: + idx=self.bld.get_group_idx(self) + odx=self.bld.get_group_idx(y) + if odx>idx: + msg="Invalid 'use' across build groups:" + if Logs.verbose>1: + msg+='\n target %r\n uses:\n %r'%(self,y) + else: + msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name) + raise Errors.WafError(msg) + self.orig_use_rec(name,**kw) + TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec + TaskGen.task_gen.use_rec=use_rec + def getattri(self,name,default=None): + if name=='append'or name=='add': + raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') + elif name=='prepend': + raise Errors.WafError('env.prepend does not exist: use env.prepend_value') + if name in self.__slots__: + return object.__getattr__(self,name,default) + else: + return self[name] + ConfigSet.ConfigSet.__getattr__=getattri +def options(opt): + enhance_lib() +def configure(conf): + pass diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_config.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_config.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_config.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_config.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,286 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re,os,sys,shlex +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method +FC_FRAGMENT=' program main\n end program main\n' +FC_FRAGMENT2=' PROGRAM MAIN\n END\n' +@conf +def fc_flags(conf): + v=conf.env + v['FC_SRC_F']=[] + v['FC_TGT_F']=['-c','-o'] + v['FCINCPATH_ST']='-I%s' + v['FCDEFINES_ST']='-D%s' + if not v['LINK_FC']:v['LINK_FC']=v['FC'] + v['FCLNK_SRC_F']=[] + v['FCLNK_TGT_F']=['-o'] + v['FCFLAGS_fcshlib']=['-fpic'] + v['LINKFLAGS_fcshlib']=['-shared'] + v['fcshlib_PATTERN']='lib%s.so' + v['fcstlib_PATTERN']='lib%s.a' + v['FCLIB_ST']='-l%s' + v['FCLIBPATH_ST']='-L%s' + v['FCSTLIB_ST']='-l%s' + v['FCSTLIBPATH_ST']='-L%s' + v['FCSTLIB_MARKER']='-Wl,-Bstatic' + v['FCSHLIB_MARKER']='-Wl,-Bdynamic' + v['SONAME_ST']='-Wl,-h,%s' +@conf +def fc_add_flags(conf): + conf.add_os_flags('FCFLAGS',dup=False) + conf.add_os_flags('LINKFLAGS',dup=False) + conf.add_os_flags('LDFLAGS',dup=False) +@conf +def check_fortran(self,*k,**kw): + self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app') +@conf +def check_fc(self,*k,**kw): + kw['compiler']='fc' + if not'compile_mode'in kw: + kw['compile_mode']='fc' + if not'type'in kw: + kw['type']='fcprogram' + if not'compile_filename'in kw: + kw['compile_filename']='test.f90' + if not'code'in kw: + kw['code']=FC_FRAGMENT + return self.check(*k,**kw) +@conf +def fortran_modifier_darwin(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['LINKFLAGS_fcshlib']=['-dynamiclib'] + v['fcshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']='-framework %s' + v['LINKFLAGS_fcstlib']=[] + v['FCSHLIB_MARKER']='' + v['FCSTLIB_MARKER']='' + v['SONAME_ST']='' +@conf +def fortran_modifier_win32(conf): + v=conf.env + v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe' + v['fcshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['FCFLAGS_fcshlib']=[] + v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT']) + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def fortran_modifier_cygwin(conf): + fortran_modifier_win32(conf) + v=conf.env + v['fcshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base']) + v['FCFLAGS_fcshlib']=[] +@conf +def check_fortran_dummy_main(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for check_fortran_dummy_main') + lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN'] + lst.extend([m.lower()for m in lst]) + lst.append('') + self.start_msg('Detecting whether we need a dummy main') + for main in lst: + kw['fortran_main']=main + try: + self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True) + if not main: + self.env.FC_MAIN=-1 + self.end_msg('no') + else: + self.env.FC_MAIN=main + self.end_msg('yes %s'%main) + break + except self.errors.ConfigurationError: + pass + else: + self.end_msg('not found') + self.fatal('could not detect whether fortran requires a dummy main, see the config.log') +GCC_DRIVER_LINE=re.compile('^Driving:') +POSIX_STATIC_EXT=re.compile('\S+\.a') +POSIX_LIB_FLAGS=re.compile('-l\S+') +@conf +def is_link_verbose(self,txt): + assert isinstance(txt,str) + for line in txt.splitlines(): + if not GCC_DRIVER_LINE.search(line): + if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line): + return True + return False +@conf +def check_fortran_verbose_flag(self,*k,**kw): + self.start_msg('fortran link verbose flag') + for x in('-v','--verbose','-verbose','-V'): + try: + self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True) + except self.errors.ConfigurationError: + pass + else: + if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out): + self.end_msg(x) + break + else: + self.end_msg('failure') + self.fatal('Could not obtain the fortran link verbose flag (see config.log)') + self.env.FC_VERBOSE_FLAG=x + return x +LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*'] +if os.name=='nt': + LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname']) +else: + LINKFLAGS_IGNORED.append(r'-lgcc*') +RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED] +def _match_ignore(line): + for i in RLINKFLAGS_IGNORED: + if i.match(line): + return True + return False +def parse_fortran_link(lines): + final_flags=[] + for line in lines: + if not GCC_DRIVER_LINE.match(line): + _parse_flink_line(line,final_flags) + return final_flags +SPACE_OPTS=re.compile('^-[LRuYz]$') +NOSPACE_OPTS=re.compile('^-[RL]') +def _parse_flink_token(lexer,token,tmp_flags): + if _match_ignore(token): + pass + elif token.startswith('-lkernel32')and sys.platform=='cygwin': + tmp_flags.append(token) + elif SPACE_OPTS.match(token): + t=lexer.get_token() + if t.startswith('P,'): + t=t[2:] + for opt in t.split(os.pathsep): + tmp_flags.append('-L%s'%opt) + elif NOSPACE_OPTS.match(token): + tmp_flags.append(token) + elif POSIX_LIB_FLAGS.match(token): + tmp_flags.append(token) + else: + pass + t=lexer.get_token() + return t +def _parse_flink_line(line,final_flags): + lexer=shlex.shlex(line,posix=True) + lexer.whitespace_split=True + t=lexer.get_token() + tmp_flags=[] + while t: + t=_parse_flink_token(lexer,t,tmp_flags) + final_flags.extend(tmp_flags) + return final_flags +@conf +def check_fortran_clib(self,autoadd=True,*k,**kw): + if not self.env.FC_VERBOSE_FLAG: + self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') + self.start_msg('Getting fortran runtime link flags') + try: + self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG]) + except Exception: + self.end_msg(False) + if kw.get('mandatory',True): + conf.fatal('Could not find the c library flags') + else: + out=self.test_bld.err + flags=parse_fortran_link(out.splitlines()) + self.end_msg('ok (%s)'%' '.join(flags)) + self.env.LINKFLAGS_CLIB=flags + return flags + return[] +def getoutput(conf,cmd,stdin=False): + from waflib import Errors + if conf.env.env: + env=conf.env.env + else: + env=dict(os.environ) + env['LANG']='C' + input=stdin and'\n'or None + try: + out,err=conf.cmd_and_log(cmd,env=env,output=0,input=input) + except Errors.WafError ,e: + if not(hasattr(e,'stderr')and hasattr(e,'stdout')): + raise e + else: + out=e.stdout + err=e.stderr + except Exception: + conf.fatal('could not determine the compiler version %r'%cmd) + return(out,err) +ROUTINES_CODE="""\ + subroutine foobar() + return + end + subroutine foo_bar() + return + end +""" +MAIN_CODE=""" +void %(dummy_func_nounder)s(void); +void %(dummy_func_under)s(void); +int %(main_func_name)s() { + %(dummy_func_nounder)s(); + %(dummy_func_under)s(); + return 0; +} +""" +@feature('link_main_routines_func') +@before_method('process_source') +def link_main_routines_tg_method(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + bld=self.bld + bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__) + bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE) + bld(features='fc fcstlib',source='test.f',target='test') + bld(features='c fcprogram',source='main.c',target='app',use='test') +def mangling_schemes(): + for u in('_',''): + for du in('','_'): + for c in("lower","upper"): + yield(u,du,c) +def mangle_name(u,du,c,name): + return getattr(name,c)()+u+(name.find('_')!=-1 and du or'') +@conf +def check_fortran_mangling(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for link_main_routines') + if not self.env.FC: + self.fatal('A fortran compiler is required for link_main_routines') + if not self.env.FC_MAIN: + self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') + self.start_msg('Getting fortran mangling scheme') + for(u,du,c)in mangling_schemes(): + try: + self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN) + except self.errors.ConfigurationError: + pass + else: + self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c)) + self.env.FORTRAN_MANGLING=(u,du,c) + break + else: + self.end_msg(False) + self.fatal('mangler not found') + return(u,du,c) +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +def set_lib_pat(self): + self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN'] +@conf +def detect_openmp(self): + for x in('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): + try: + self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP') + except self.errors.ConfigurationError: + pass + else: + break + else: + self.fatal('Could not find OpenMP') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,115 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Logs +from waflib.Tools import ccroot,fc_config,fc_scan +from waflib.TaskGen import feature,extension +from waflib.Configure import conf +ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES']) +ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS']) +@feature('fcprogram','fcshlib','fcstlib','fcprogram_test') +def dummy(self): + pass +@extension('.f','.f90','.F','.F90','.for','.FOR') +def fc_hook(self,node): + return self.create_compiled_task('fc',node) +@conf +def modfile(conf,name): + return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower'] +def get_fortran_tasks(tsk): + bld=tsk.generator.bld + tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator)) + return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)] +class fc(Task.Task): + color='GREEN' + run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}' + vars=["FORTRANMODPATHFLAG"] + def scan(self): + tmp=fc_scan.fortran_parser(self.generator.includes_nodes) + tmp.task=self + tmp.start(self.inputs[0]) + if Logs.verbose: + Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) + def runnable_status(self): + if getattr(self,'mod_fortran_done',None): + return super(fc,self).runnable_status() + bld=self.generator.bld + lst=get_fortran_tasks(self) + for tsk in lst: + tsk.mod_fortran_done=True + for tsk in lst: + ret=tsk.runnable_status() + if ret==Task.ASK_LATER: + for x in lst: + x.mod_fortran_done=None + return Task.ASK_LATER + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('MOD@'): + name=bld.modfile(x.replace('MOD@','')) + node=bld.srcnode.find_or_declare(name) + if not getattr(node,'sig',None): + node.sig=Utils.SIG_NIL + tsk.set_outputs(node) + outs[id(node)].add(tsk) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('USE@'): + name=bld.modfile(x.replace('USE@','')) + node=bld.srcnode.find_resource(name) + if node and node not in tsk.outputs: + if not node in bld.node_deps[key]: + bld.node_deps[key].append(node) + ins[id(node)].add(tsk) + for k in ins.keys(): + for a in ins[k]: + a.run_after.update(outs[k]) + tmp=[] + for t in outs[k]: + tmp.extend(t.outputs) + a.dep_nodes.extend(tmp) + a.dep_nodes.sort(key=lambda x:x.abspath()) + for tsk in lst: + try: + delattr(tsk,'cache_sig') + except AttributeError: + pass + return super(fc,self).runnable_status() +class fcprogram(ccroot.link_task): + color='YELLOW' + run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' + inst_to='${BINDIR}' +class fcshlib(fcprogram): + inst_to='${LIBDIR}' +class fcprogram_test(fcprogram): + def runnable_status(self): + ret=super(fcprogram_test,self).runnable_status() + if ret==Task.SKIP_ME: + ret=Task.RUN_ME + return ret + def exec_command(self,cmd,**kw): + bld=self.generator.bld + kw['shell']=isinstance(cmd,str) + kw['stdout']=kw['stderr']=Utils.subprocess.PIPE + kw['cwd']=bld.variant_dir + bld.out=bld.err='' + bld.to_log('command: %s\n'%cmd) + kw['output']=0 + try: + (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw) + except Exception: + return-1 + if bld.out: + bld.to_log("out: %s\n"%bld.out) + if bld.err: + bld.to_log("err: %s\n"%bld.err) +class fcstlib(ccroot.stlink_task): + pass diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_scan.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_scan.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_scan.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/fc_scan.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,64 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" +USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +re_inc=re.compile(INC_REGEX,re.I) +re_use=re.compile(USE_REGEX,re.I) +re_mod=re.compile(MOD_REGEX,re.I) +class fortran_parser(object): + def __init__(self,incpaths): + self.seen=[] + self.nodes=[] + self.names=[] + self.incpaths=incpaths + def find_deps(self,node): + txt=node.read() + incs=[] + uses=[] + mods=[] + for line in txt.splitlines(): + m=re_inc.search(line) + if m: + incs.append(m.group(1)) + m=re_use.search(line) + if m: + uses.append(m.group(1)) + m=re_mod.search(line) + if m: + mods.append(m.group(1)) + return(incs,uses,mods) + def start(self,node): + self.waiting=[node] + while self.waiting: + nd=self.waiting.pop(0) + self.iter(nd) + def iter(self,node): + incs,uses,mods=self.find_deps(node) + for x in incs: + if x in self.seen: + continue + self.seen.append(x) + self.tryfind_header(x) + for x in uses: + name="USE@%s"%x + if not name in self.names: + self.names.append(name) + for x in mods: + name="MOD@%s"%x + if not name in self.names: + self.names.append(name) + def tryfind_header(self,filename): + found=None + for n in self.incpaths: + found=n.find_resource(filename) + if found: + self.nodes.append(found) + self.waiting.append(found) + break + if not found: + if not filename in self.names: + self.names.append(filename) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/flex.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/flex.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/flex.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/flex.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import waflib.TaskGen,os,re +def decide_ext(self,node): + if'cxx'in self.features: + return['.lex.cc'] + return['.lex.c'] +def flexfun(tsk): + env=tsk.env + bld=tsk.generator.bld + wd=bld.variant_dir + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + tsk.last_cmd=lst=[] + lst.extend(to_list(env['FLEX'])) + lst.extend(to_list(env['FLEXFLAGS'])) + inputs=[a.path_from(bld.bldnode)for a in tsk.inputs] + if env.FLEX_MSYS: + inputs=[x.replace(os.sep,'/')for x in inputs] + lst.extend(inputs) + lst=[x for x in lst if x] + txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n')) +waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,) +def configure(conf): + conf.find_program('flex',var='FLEX') + conf.env.FLEXFLAGS=['-t'] + if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX[0]): + conf.env.FLEX_MSYS=True diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/g95.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/g95.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/g95.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/g95.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_g95(conf): + fc=conf.find_program('g95',var='FC') + conf.get_g95_version(fc) + conf.env.FC_NAME='G95' +@conf +def g95_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-fmod=',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def g95_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def g95_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def g95_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def g95_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None) + if g95_modifier_func: + g95_modifier_func() +@conf +def get_g95_version(conf,fc): + version_re=re.compile(r"g95\s*(?P\d*)\.(?P\d*)").search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + conf.fatal('cannot determine g95 version') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + conf.find_g95() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.g95_flags() + conf.g95_modifier_platform() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gas.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gas.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gas.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gas.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,12 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import waflib.Tools.asm +from waflib.Tools import ar +def configure(conf): + conf.find_program(['gas','gcc'],var='AS') + conf.env.AS_TGT_F=['-c','-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.find_ar() + conf.load('asm') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gcc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gcc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gcc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gcc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,102 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_gcc(conf): + cc=conf.find_program(['gcc','cc'],var='CC') + conf.get_cc_version(cc,gcc=True) + conf.env.CC_NAME='gcc' +@conf +def gcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Wl,-Bdynamic' + v['STLIB_MARKER']='-Wl,-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-shared'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Wl,-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' + v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] + v['CFLAGS_MACBUNDLE']=['-fPIC'] + v['macbundle_PATTERN']='%s.bundle' +@conf +def gcc_modifier_win32(conf): + v=conf.env + v['cprogram_PATTERN']='%s.exe' + v['cshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['CFLAGS_cshlib']=[] + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def gcc_modifier_cygwin(conf): + gcc_modifier_win32(conf) + v=conf.env + v['cshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base']) + v['CFLAGS_cshlib']=[] +@conf +def gcc_modifier_darwin(conf): + v=conf.env + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-dynamiclib'] + v['cshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']=['-framework'] + v['ARCH_ST']=['-arch'] + v['LINKFLAGS_cstlib']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gcc_modifier_aix(conf): + v=conf.env + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull'] + v['SHLIB_MARKER']=[] +@conf +def gcc_modifier_hpux(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['CFLAGS_cshlib']=['-fPIC','-DPIC'] + v['cshlib_PATTERN']='lib%s.sl' +@conf +def gcc_modifier_openbsd(conf): + conf.env.SONAME_ST=[] +@conf +def gcc_modifier_osf1V(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gcc_modifier_platform(conf): + gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None) + if gcc_modifier_func: + gcc_modifier_func() +def configure(conf): + conf.find_gcc() + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gdc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gdc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gdc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gdc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,35 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_gdc(conf): + conf.find_program('gdc',var='D') + out=conf.cmd_and_log(conf.env.D+['--version']) + if out.find("gdc")==-1: + conf.fatal("detected compiler is not gdc") +@conf +def common_flags_gdc(conf): + v=conf.env + v['DFLAGS']=[] + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-o%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-o%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s' + v['LINKFLAGS_dshlib']=['-shared'] + v['DHEADER_ext']='.di' + v.DFLAGS_d_with_header='-fintfc' + v['D_HDR_F']='-fintfc-file=%s' +def configure(conf): + conf.find_gdc() + conf.load('ar') + conf.load('d') + conf.common_flags_gdc() + conf.d_platform_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gfortran.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gfortran.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gfortran.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gfortran.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_gfortran(conf): + fc=conf.find_program(['gfortran','g77'],var='FC') + conf.get_gfortran_version(fc) + conf.env.FC_NAME='GFORTRAN' +@conf +def gfortran_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-J',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def gfortran_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def gfortran_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def gfortran_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def gfortran_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None) + if gfortran_modifier_func: + gfortran_modifier_func() +@conf +def get_gfortran_version(conf,fc): + version_re=re.compile(r"GNU\s*Fortran",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out:match=version_re(out) + else:match=version_re(err) + if not match: + conf.fatal('Could not determine the compiler type') + cmd=fc+['-dM','-E','-'] + out,err=fc_config.getoutput(conf,cmd,stdin=True) + if out.find('__GNUC__')<0: + conf.fatal('Could not determine the compiler type') + k={} + out=out.splitlines() + import shlex + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + def isT(var): + return var in k and k[var]!='0' + conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) +def configure(conf): + conf.find_gfortran() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.gfortran_flags() + conf.gfortran_modifier_platform() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/glib2.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/glib2.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/glib2.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/glib2.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,234 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib import Context,Task,Utils,Options,Errors,Logs +from waflib.TaskGen import taskgen_method,before_method,feature,extension +from waflib.Configure import conf +@taskgen_method +def add_marshal_file(self,filename,prefix): + if not hasattr(self,'marshal_list'): + self.marshal_list=[] + self.meths.append('process_marshal') + self.marshal_list.append((filename,prefix)) +@before_method('process_source') +def process_marshal(self): + for f,prefix in getattr(self,'marshal_list',[]): + node=self.path.find_resource(f) + if not node: + raise Errors.WafError('file not found %r'%f) + h_node=node.change_ext('.h') + c_node=node.change_ext('.c') + task=self.create_task('glib_genmarshal',node,[h_node,c_node]) + task.env.GLIB_GENMARSHAL_PREFIX=prefix + self.source=self.to_nodes(getattr(self,'source',[])) + self.source.append(c_node) +class glib_genmarshal(Task.Task): + def run(self): + bld=self.inputs[0].__class__.ctx + get=self.env.get_flat + cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath()) + ret=bld.exec_command(cmd1) + if ret:return ret + c='''#include "%s"\n'''%self.outputs[0].name + self.outputs[1].write(c) + cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath()) + return bld.exec_command(cmd2) + vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'] + color='BLUE' + ext_out=['.h'] +@taskgen_method +def add_enums_from_template(self,source='',target='',template='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments}) +@taskgen_method +def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments}) +@before_method('process_source') +def process_enums(self): + for enum in getattr(self,'enums_list',[]): + task=self.create_task('glib_mkenums') + env=task.env + inputs=[] + source_list=self.to_list(enum['source']) + if not source_list: + raise Errors.WafError('missing source '+str(enum)) + source_list=[self.path.find_resource(k)for k in source_list] + inputs+=source_list + env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + if not enum['target']: + raise Errors.WafError('missing target '+str(enum)) + tgt_node=self.path.find_or_declare(enum['target']) + if tgt_node.name.endswith('.c'): + self.source.append(tgt_node) + env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + options=[] + if enum['template']: + template_node=self.path.find_resource(enum['template']) + options.append('--template %s'%(template_node.abspath())) + inputs.append(template_node) + params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'} + for param,option in params.items(): + if enum[param]: + options.append('%s %r'%(option,enum[param])) + env['GLIB_MKENUMS_OPTIONS']=' '.join(options) + task.set_inputs(inputs) + task.set_outputs(tgt_node) +class glib_mkenums(Task.Task): + run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' + color='PINK' + ext_out=['.h'] +@taskgen_method +def add_settings_schemas(self,filename_list): + if not hasattr(self,'settings_schema_files'): + self.settings_schema_files=[] + if not isinstance(filename_list,list): + filename_list=[filename_list] + self.settings_schema_files.extend(filename_list) +@taskgen_method +def add_settings_enums(self,namespace,filename_list): + if hasattr(self,'settings_enum_namespace'): + raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name) + self.settings_enum_namespace=namespace + if type(filename_list)!='list': + filename_list=[filename_list] + self.settings_enum_files=filename_list +@feature('glib2') +def process_settings(self): + enums_tgt_node=[] + install_files=[] + settings_schema_files=getattr(self,'settings_schema_files',[]) + if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']: + raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") + if hasattr(self,'settings_enum_files'): + enums_task=self.create_task('glib_mkenums') + source_list=self.settings_enum_files + source_list=[self.path.find_resource(k)for k in source_list] + enums_task.set_inputs(source_list) + enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + target=self.settings_enum_namespace+'.enums.xml' + tgt_node=self.path.find_or_declare(target) + enums_task.set_outputs(tgt_node) + enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + enums_tgt_node=[tgt_node] + install_files.append(tgt_node) + options='--comments "" --fhead "" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " " --vtail " " --ftail "" '%(self.settings_enum_namespace) + enums_task.env['GLIB_MKENUMS_OPTIONS']=options + for schema in settings_schema_files: + schema_task=self.create_task('glib_validate_schema') + schema_node=self.path.find_resource(schema) + if not schema_node: + raise Errors.WafError("Cannot find the schema file '%s'"%schema) + install_files.append(schema_node) + source_list=enums_tgt_node+[schema_node] + schema_task.set_inputs(source_list) + schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list] + target_node=schema_node.change_ext('.xml.valid') + schema_task.set_outputs(target_node) + schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath() + def compile_schemas_callback(bld): + if not bld.is_install:return + Logs.pprint('YELLOW','Updating GSettings schema cache') + command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env) + self.bld.exec_command(command) + if self.bld.is_install: + if not self.env['GSETTINGSSCHEMADIR']: + raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') + if install_files: + self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files) + if not hasattr(self.bld,'_compile_schemas_registered'): + self.bld.add_post_fun(compile_schemas_callback) + self.bld._compile_schemas_registered=True +class glib_validate_schema(Task.Task): + run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' + color='PINK' +@extension('.gresource.xml') +def process_gresource_source(self,node): + if not self.env['GLIB_COMPILE_RESOURCES']: + raise Errors.WafError("Unable to process GResource file - glib-compile-resources was not found during configure") + if'gresource'in self.features: + return + h_node=node.change_ext('_xml.h') + c_node=node.change_ext('_xml.c') + self.create_task('glib_gresource_source',node,[h_node,c_node]) + self.source.append(c_node) +@feature('gresource') +def process_gresource_bundle(self): + for i in self.to_list(self.source): + node=self.path.find_resource(i) + task=self.create_task('glib_gresource_bundle',node,node.change_ext('')) + inst_to=getattr(self,'install_path',None) + if inst_to: + self.bld.install_files(inst_to,task.outputs) +class glib_gresource_base(Task.Task): + color='BLUE' + base_cmd='${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' + def scan(self): + bld=self.generator.bld + kw={} + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + kw['quiet']=Context.BOTH + cmd=Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s'%(self.inputs[0].parent.srcpath(),self.inputs[0].bld_dir(),self.inputs[0].bldpath()),self.env) + output=bld.cmd_and_log(cmd,**kw) + nodes=[] + names=[] + for dep in output.splitlines(): + if dep: + node=bld.bldnode.find_node(dep) + if node: + nodes.append(node) + else: + names.append(dep) + return(nodes,names) +class glib_gresource_source(glib_gresource_base): + vars=['GLIB_COMPILE_RESOURCES'] + fun_h=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[0].abspath()} --generate-header ${SRC}') + fun_c=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[1].abspath()} --generate-source ${SRC}') + ext_out=['.h'] + def run(self): + return self.fun_h[0](self)or self.fun_c[0](self) +class glib_gresource_bundle(glib_gresource_base): + run_str=glib_gresource_base.base_cmd+' --target=${TGT} ${SRC}' + shell=True +@conf +def find_glib_genmarshal(conf): + conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL') +@conf +def find_glib_mkenums(conf): + if not conf.env.PERL: + conf.find_program('perl',var='PERL') + conf.find_program('glib-mkenums',interpreter='PERL',var='GLIB_MKENUMS') +@conf +def find_glib_compile_schemas(conf): + conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS') + def getstr(varname): + return getattr(Options.options,varname,getattr(conf.env,varname,'')) + gsettingsschemadir=getstr('GSETTINGSSCHEMADIR') + if not gsettingsschemadir: + datadir=getstr('DATADIR') + if not datadir: + prefix=conf.env['PREFIX'] + datadir=os.path.join(prefix,'share') + gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas') + conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir +@conf +def find_glib_compile_resources(conf): + conf.find_program('glib-compile-resources',var='GLIB_COMPILE_RESOURCES') +def configure(conf): + conf.find_glib_genmarshal() + conf.find_glib_mkenums() + conf.find_glib_compile_schemas(mandatory=False) + conf.find_glib_compile_resources(mandatory=False) +def options(opt): + gr=opt.add_option_group('Installation directories') + gr.add_option('--gsettingsschemadir',help='GSettings schema location [DATADIR/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gnu_dirs.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gnu_dirs.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gnu_dirs.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/gnu_dirs.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,66 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Utils,Options,Context +gnuopts=''' +bindir, user commands, ${EXEC_PREFIX}/bin +sbindir, system binaries, ${EXEC_PREFIX}/sbin +libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec +sysconfdir, host-specific configuration, ${PREFIX}/etc +sharedstatedir, architecture-independent variable data, ${PREFIX}/com +localstatedir, variable data, ${PREFIX}/var +libdir, object code libraries, ${EXEC_PREFIX}/lib%s +includedir, header files, ${PREFIX}/include +oldincludedir, header files for non-GCC compilers, /usr/include +datarootdir, architecture-independent data root, ${PREFIX}/share +datadir, architecture-independent data, ${DATAROOTDIR} +infodir, GNU "info" documentation, ${DATAROOTDIR}/info +localedir, locale-dependent data, ${DATAROOTDIR}/locale +mandir, manual pages, ${DATAROOTDIR}/man +docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} +htmldir, HTML documentation, ${DOCDIR} +dvidir, DVI documentation, ${DOCDIR} +pdfdir, PDF documentation, ${DOCDIR} +psdir, PostScript documentation, ${DOCDIR} +'''%Utils.lib64() +_options=[x.split(', ')for x in gnuopts.splitlines()if x] +def configure(conf): + def get_param(varname,default): + return getattr(Options.options,varname,'')or default + env=conf.env + env.LIBDIR=env.BINDIR=[] + env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX) + env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE + complete=False + iter=0 + while not complete and iter\d*)\.(?P\d*)",re.I).search + if Utils.is_win32: + cmd=fc + else: + cmd=fc+['-logo'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + match=version_re(out)or version_re(err) + if not match: + conf.fatal('cannot determine ifort version.') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + if Utils.is_win32: + compiler,version,path,includes,libdirs,arch=conf.detect_ifort(True) + v=conf.env + v.DEST_CPU=arch + v.PATH=path + v.INCLUDES=includes + v.LIBPATH=libdirs + v.MSVC_COMPILER=compiler + try: + v.MSVC_VERSION=float(version) + except Exception: + raise + v.MSVC_VERSION=float(version[:-3]) + conf.find_ifort_win32() + conf.ifort_modifier_win32() + else: + conf.find_ifort() + conf.find_program('xiar',var='AR') + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.ifort_modifier_platform() +import os,sys,re,tempfile +from waflib import Task,Logs,Options,Errors +from waflib.Logs import debug,warn +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +from waflib.Tools import ccroot,ar,winres +all_ifort_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] +@conf +def gather_ifort_versions(conf,versions): + version_pattern=re.compile('^...?.?\....?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\Fortran') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_ifort_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + pass + for target,arch in all_ifort_platforms: + try: + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + continue + major=version[0:2] + versions.append(('intel '+major,targets)) +def setup_ifort(conf,versions,arch=False): + platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_ifort_platforms] + desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] + versiondict=dict(versions) + for version in desired_versions: + try: + targets=dict(versiondict[version]) + for target in platforms: + try: + try: + realtarget,(p1,p2,p3)=targets[target] + except conf.errors.ConfigurationError: + del(targets[target]) + else: + compiler,revision=version.rsplit(' ',1) + if arch: + return compiler,revision,p1,p2,p3,realtarget + else: + return compiler,revision,p1,p2,p3 + except KeyError: + continue + except KeyError: + continue + conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_ifort)') +@conf +def get_ifort_version_win32(conf,compiler,version,target,vcvars): + try: + conf.msvc_cnt+=1 + except AttributeError: + conf.msvc_cnt=1 + batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt) + batfile.write("""@echo off +set INCLUDE= +set LIB= +call "%s" %s +echo PATH=%%PATH%% +echo INCLUDE=%%INCLUDE%% +echo LIB=%%LIB%%;%%LIBPATH%% +"""%(vcvars,target)) + sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()]) + batfile.delete() + lines=sout.splitlines() + if not lines[0]: + lines.pop(0) + MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None + for line in lines: + if line.startswith('PATH='): + path=line[5:] + MSVC_PATH=path.split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR=[i for i in line[8:].split(';')if i] + elif line.startswith('LIB='): + MSVC_LIBDIR=[i for i in line[4:].split(';')if i] + if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): + conf.fatal('msvc: Could not find a valid architecture for building (get_ifort_version_win32)') + env=dict(os.environ) + env.update(PATH=path) + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + fc=conf.find_program(compiler_name,path_list=MSVC_PATH) + if'CL'in env: + del(env['CL']) + try: + try: + conf.cmd_and_log(fc+['/help'],env=env) + except UnicodeError: + st=Utils.ex_stack() + if conf.logger: + conf.logger.error(st) + conf.fatal('msvc: Unicode error - check the code page?') + except Exception ,e: + debug('msvc: get_ifort_version: %r %r %r -> failure %s'%(compiler,version,target,str(e))) + conf.fatal('msvc: cannot run the compiler in get_ifort_version (run with -v to display errors)') + else: + debug('msvc: get_ifort_version: %r %r %r -> OK',compiler,version,target) + finally: + conf.env[compiler_name]='' + return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) +def get_compiler_env(conf,compiler,version,bat_target,bat,select=None): + lazy=getattr(Options.options,'msvc_lazy',True) + if conf.env.MSVC_LAZY_AUTODETECT is False: + lazy=False + def msvc_thunk(): + vs=conf.get_ifort_version_win32(compiler,version,bat_target,bat) + if select: + return select(vs) + else: + return vs + return lazytup(msvc_thunk,lazy,([],[],[])) +class lazytup(object): + def __init__(self,fn,lazy=True,default=None): + self.fn=fn + self.default=default + if not lazy: + self.evaluate() + def __len__(self): + self.evaluate() + return len(self.value) + def __iter__(self): + self.evaluate() + for i,v in enumerate(self.value): + yield v + def __getitem__(self,i): + self.evaluate() + return self.value[i] + def __repr__(self): + if hasattr(self,'value'): + return repr(self.value) + elif self.default: + return repr(self.default) + else: + self.evaluate() + return repr(self.value) + def evaluate(self): + if hasattr(self,'value'): + return + self.value=self.fn() +@conf +def get_ifort_versions(conf,eval_and_save=True): + if conf.env['IFORT_INSTALLED_VERSIONS']: + return conf.env['IFORT_INSTALLED_VERSIONS'] + lst=[] + conf.gather_ifort_versions(lst) + if eval_and_save: + def checked_target(t): + target,(arch,paths)=t + try: + paths.evaluate() + except conf.errors.ConfigurationError: + return None + else: + return t + lst=[(version,list(filter(checked_target,targets)))for version,targets in lst] + conf.env['IFORT_INSTALLED_VERSIONS']=lst + return lst +@conf +def detect_ifort(conf,arch=False): + versions=get_ifort_versions(conf,False) + return setup_ifort(conf,versions,arch) +def _get_prog_names(conf,compiler): + if compiler=='intel': + compiler_name='ifort' + linker_name='XILINK' + lib_name='XILIB' + else: + compiler_name='CL' + linker_name='LINK' + lib_name='LIB' + return compiler_name,linker_name,lib_name +@conf +def find_ifort_win32(conf): + v=conf.env + path=v['PATH'] + compiler=v['MSVC_COMPILER'] + version=v['MSVC_VERSION'] + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + v.IFORT_MANIFEST=(compiler=='intel'and version>=11) + fc=conf.find_program(compiler_name,var='FC',path_list=path) + env=dict(conf.environ) + if path:env.update(PATH=';'.join(path)) + if not conf.cmd_and_log(fc+['/nologo','/help'],env=env): + conf.fatal('not intel fortran compiler could not be identified') + v['FC_NAME']='IFORT' + if not v['LINK_FC']: + conf.find_program(linker_name,var='LINK_FC',path_list=path,mandatory=True) + if not v['AR']: + conf.find_program(lib_name,path_list=path,var='AR',mandatory=True) + v['ARFLAGS']=['/NOLOGO'] + if v.IFORT_MANIFEST: + conf.find_program('MT',path_list=path,var='MT') + v['MTFLAGS']=['/NOLOGO'] + try: + conf.load('winres') + except Errors.WafError: + warn('Resource compiler not found. Compiling resource file is disabled') +@after_method('apply_link') +@feature('fc') +def apply_flags_ifort(self): + if not self.env.IFORT_WIN32 or not getattr(self,'link_task',None): + return + is_static=isinstance(self.link_task,ccroot.stlink_task) + subsystem=getattr(self,'subsystem','') + if subsystem: + subsystem='/subsystem:%s'%subsystem + flags=is_static and'ARFLAGS'or'LINKFLAGS' + self.env.append_value(flags,subsystem) + if not is_static: + for f in self.env.LINKFLAGS: + d=f.lower() + if d[1:]=='debug': + pdbnode=self.link_task.outputs[0].change_ext('.pdb') + self.link_task.outputs.append(pdbnode) + if getattr(self,'install_task',None): + self.pdb_install_task=self.bld.install_files(self.install_task.dest,pdbnode,env=self.env) + break +@feature('fcprogram','fcshlib','fcprogram_test') +@after_method('apply_link') +def apply_manifest_ifort(self): + if self.env.IFORT_WIN32 and getattr(self,'link_task',None): + self.link_task.env.FC=self.env.LINK_FC + if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self,'link_task',None): + out_node=self.link_task.outputs[0] + man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') + self.link_task.outputs.append(man_node) + self.link_task.do_manifest=True +def exec_mf(self): + env=self.env + mtool=env['MT'] + if not mtool: + return 0 + self.do_manifest=False + outfile=self.outputs[0].abspath() + manifest=None + for out_node in self.outputs: + if out_node.name.endswith('.manifest'): + manifest=out_node.abspath() + break + if manifest is None: + return 0 + mode='' + if'fcprogram'in self.generator.features or'fcprogram_test'in self.generator.features: + mode='1' + elif'fcshlib'in self.generator.features: + mode='2' + debug('msvc: embedding manifest in mode %r'%mode) + lst=[]+mtool + lst.extend(Utils.to_list(env['MTFLAGS'])) + lst.extend(['-manifest',manifest]) + lst.append('-outputresource:%s;%s'%(outfile,mode)) + return self.exec_command(lst) +def quote_response_command(self,flag): + if flag.find(' ')>-1: + for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'): + if flag.startswith(x): + flag='%s"%s"'%(x,flag[len(x):]) + break + else: + flag='"%s"'%flag + return flag +def exec_response_command(self,cmd,**kw): + try: + tmp=None + if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:])) + os.close(fd) + cmd=[program,'@'+tmp] + ret=super(self.__class__,self).exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret +def exec_command_ifort(self,*k,**kw): + if isinstance(k[0],list): + lst=[] + carry='' + for a in k[0]: + if a=='/Fo'or a=='/doc'or a[-1]==':': + carry=a + else: + lst.append(carry+a) + carry='' + k=[lst] + if self.env['PATH']: + env=dict(self.env.env or os.environ) + env.update(PATH=';'.join(self.env['PATH'])) + kw['env']=env + if not'cwd'in kw: + kw['cwd']=self.generator.bld.variant_dir + ret=self.exec_response_command(k[0],**kw) + if not ret and getattr(self,'do_manifest',None): + ret=self.exec_mf() + return ret +def wrap_class(class_name): + cls=Task.classes.get(class_name,None) + if not cls: + return None + derived_class=type(class_name,(cls,),{}) + def exec_command(self,*k,**kw): + if self.env.IFORT_WIN32: + return self.exec_command_ifort(*k,**kw) + else: + return super(derived_class,self).exec_command(*k,**kw) + derived_class.exec_command=exec_command + derived_class.exec_response_command=exec_response_command + derived_class.quote_response_command=quote_response_command + derived_class.exec_command_ifort=exec_command_ifort + derived_class.exec_mf=exec_mf + if hasattr(cls,'hcode'): + derived_class.hcode=cls.hcode + return derived_class +for k in'fc fcprogram fcprogram_test fcshlib fcstlib'.split(): + wrap_class(k) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/__init__.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/__init__.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/__init__.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/intltool.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/intltool.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/intltool.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/intltool.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,97 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Context,Task,Utils,Logs +import waflib.Tools.ccroot +from waflib.TaskGen import feature,before_method,taskgen_method +from waflib.Logs import error +from waflib.Configure import conf +_style_flags={'ba':'-b','desktop':'-d','keys':'-k','quoted':'--quoted-style','quotedxml':'--quotedxml-style','rfc822deb':'-r','schemas':'-s','xml':'-x',} +@taskgen_method +def ensure_localedir(self): + if not self.env.LOCALEDIR: + if self.env.DATAROOTDIR: + self.env.LOCALEDIR=os.path.join(self.env.DATAROOTDIR,'locale') + else: + self.env.LOCALEDIR=os.path.join(self.env.PREFIX,'share','locale') +@before_method('process_source') +@feature('intltool_in') +def apply_intltool_in_f(self): + try:self.meths.remove('process_source') + except ValueError:pass + self.ensure_localedir() + podir=getattr(self,'podir','.') + podirnode=self.path.find_dir(podir) + if not podirnode: + error("could not find the podir %r"%podir) + return + cache=getattr(self,'intlcache','.intlcache') + self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)] + self.env.INTLPODIR=podirnode.bldpath() + self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT)) + if'-c'in self.env.INTLFLAGS: + self.bld.fatal('Redundant -c flag in intltool task %r'%self) + style=getattr(self,'style',None) + if style: + try: + style_flag=_style_flags[style] + except KeyError: + self.bld.fatal('intltool_in style "%s" is not valid'%style) + self.env.append_unique('INTLFLAGS',[style_flag]) + for i in self.to_list(self.source): + node=self.path.find_resource(i) + task=self.create_task('intltool',node,node.change_ext('')) + inst=getattr(self,'install_path',None) + if inst: + self.bld.install_files(inst,task.outputs) +@feature('intltool_po') +def apply_intltool_po(self): + try:self.meths.remove('process_source') + except ValueError:pass + self.ensure_localedir() + appname=getattr(self,'appname',getattr(Context.g_module,Context.APPNAME,'set_your_app_name')) + podir=getattr(self,'podir','.') + inst=getattr(self,'install_path','${LOCALEDIR}') + linguas=self.path.find_node(os.path.join(podir,'LINGUAS')) + if linguas: + file=open(linguas.abspath()) + langs=[] + for line in file.readlines(): + if not line.startswith('#'): + langs+=line.split() + file.close() + re_linguas=re.compile('[-a-zA-Z_@.]+') + for lang in langs: + if re_linguas.match(lang): + node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po')) + task=self.create_task('po',node,node.change_ext('.mo')) + if inst: + filename=task.outputs[0].name + (langname,ext)=os.path.splitext(filename) + inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo' + self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env) + else: + Logs.pprint('RED',"Error no LINGUAS file found in po directory") +class po(Task.Task): + run_str='${MSGFMT} -o ${TGT} ${SRC}' + color='BLUE' +class intltool(Task.Task): + run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' + color='BLUE' +@conf +def find_msgfmt(conf): + conf.find_program('msgfmt',var='MSGFMT') +@conf +def find_intltool_merge(conf): + if not conf.env.PERL: + conf.find_program('perl',var='PERL') + conf.env.INTLCACHE_ST='--cache=%s' + conf.env.INTLFLAGS_DEFAULT=['-q','-u'] + conf.find_program('intltool-merge',interpreter='PERL',var='INTLTOOL') +def configure(conf): + conf.find_msgfmt() + conf.find_intltool_merge() + if conf.env.CC or conf.env.CXX: + conf.check(header_name='locale.h') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/irixcc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/irixcc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/irixcc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/irixcc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_irixcc(conf): + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('irixcc was not found') + try: + conf.cmd_and_log(cc+['-version']) + except Exception: + conf.fatal('%r -version could not be executed'%cc) + v['CC']=cc + v['CC_NAME']='irix' +@conf +def irixcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']='' + v['CC_TGT_F']=['-c','-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['cprogram_PATTERN']='%s' + v['cshlib_PATTERN']='lib%s.so' + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_irixcc() + conf.find_cpp() + conf.find_ar() + conf.irixcc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/javaw.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/javaw.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/javaw.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/javaw.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,319 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,tempfile,shutil +from waflib import Task,Utils,Errors,Node,Logs +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +from waflib.Tools import ccroot +ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS']) +SOURCE_RE='**/*.java' +JAR_RE='**/*' +class_check_source=''' +public class Test { + public static void main(String[] argv) { + Class lib; + if (argv.length < 1) { + System.err.println("Missing argument"); + System.exit(77); + } + try { + lib = Class.forName(argv[0]); + } catch (ClassNotFoundException e) { + System.err.println("ClassNotFoundException"); + System.exit(1); + } + lib = null; + System.exit(0); + } +} +''' +@feature('javac') +@before_method('process_source') +def apply_java(self): + Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[]) + outdir=getattr(self,'outdir',None) + if outdir: + if not isinstance(outdir,Node.Node): + outdir=self.path.get_bld().make_node(self.outdir) + else: + outdir=self.path.get_bld() + outdir.mkdir() + self.outdir=outdir + self.env['OUTDIR']=outdir.abspath() + self.javac_task=tsk=self.create_task('javac') + tmp=[] + srcdir=getattr(self,'srcdir','') + if isinstance(srcdir,Node.Node): + srcdir=[srcdir] + for x in Utils.to_list(srcdir): + if isinstance(x,Node.Node): + y=x + else: + y=self.path.find_dir(x) + if not y: + self.bld.fatal('Could not find the folder %s from %s'%(x,self.path)) + tmp.append(y) + tsk.srcdir=tmp + if getattr(self,'compat',None): + tsk.env.append_value('JAVACFLAGS',['-source',self.compat]) + if hasattr(self,'sourcepath'): + fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)] + names=os.pathsep.join([x.srcpath()for x in fold]) + else: + names=[x.srcpath()for x in tsk.srcdir] + if names: + tsk.env.append_value('JAVACFLAGS',['-sourcepath',names]) +@feature('javac') +@after_method('apply_java') +def use_javac_files(self): + lst=[] + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + if hasattr(y,'jar_task'): + lst.append(y.jar_task.outputs[0].abspath()) + self.javac_task.set_run_after(y.jar_task) + else: + for tsk in y.tasks: + self.javac_task.set_run_after(tsk) + if lst: + self.env.append_value('CLASSPATH',lst) +@feature('javac') +@after_method('apply_java','propagate_uselib_vars','use_javac_files') +def set_classpath(self): + self.env.append_value('CLASSPATH',getattr(self,'classpath',[])) + for x in self.tasks: + x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep +@feature('jar') +@after_method('apply_java','use_javac_files') +@before_method('process_source') +def jar_files(self): + destfile=getattr(self,'destfile','test.jar') + jaropts=getattr(self,'jaropts',[]) + manifest=getattr(self,'manifest',None) + basedir=getattr(self,'basedir',None) + if basedir: + if not isinstance(self.basedir,Node.Node): + basedir=self.path.get_bld().make_node(basedir) + else: + basedir=self.path.get_bld() + if not basedir: + self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self)) + self.jar_task=tsk=self.create_task('jar_create') + if manifest: + jarcreate=getattr(self,'jarcreate','cfm') + if not isinstance(manifest,Node.Node): + node=self.path.find_or_declare(manifest) + else: + node=manifest + tsk.dep_nodes.append(node) + jaropts.insert(0,node.abspath()) + else: + jarcreate=getattr(self,'jarcreate','cf') + if not isinstance(destfile,Node.Node): + destfile=self.path.find_or_declare(destfile) + if not destfile: + self.bld.fatal('invalid destfile %r for %r'%(destfile,self)) + tsk.set_outputs(destfile) + tsk.basedir=basedir + jaropts.append('-C') + jaropts.append(basedir.bldpath()) + jaropts.append('.') + tsk.env['JAROPTS']=jaropts + tsk.env['JARCREATE']=jarcreate + if getattr(self,'javac_task',None): + tsk.set_run_after(self.javac_task) +@feature('jar') +@after_method('jar_files') +def use_jar_files(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + self.jar_task.run_after.update(y.tasks) +class jar_create(Task.Task): + color='GREEN' + run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global JAR_RE + try: + self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])] + except Exception: + raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self)) + return super(jar_create,self).runnable_status() +class javac(Task.Task): + color='BLUE' + vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR'] + def uid(self): + lst=[self.__class__.__name__,self.generator.outdir.abspath()] + for x in self.srcdir: + lst.append(x.abspath()) + return Utils.h_list(lst) + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global SOURCE_RE + self.inputs=[] + for x in self.srcdir: + self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False)) + return super(javac,self).runnable_status() + def run(self): + env=self.env + gen=self.generator + bld=gen.bld + wd=bld.bldnode.abspath() + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + cmd=[] + cmd.extend(to_list(env['JAVAC'])) + cmd.extend(['-classpath']) + cmd.extend(to_list(env['CLASSPATH'])) + cmd.extend(['-d']) + cmd.extend(to_list(env['OUTDIR'])) + cmd.extend(to_list(env['JAVACFLAGS'])) + files=[a.path_from(bld.bldnode)for a in self.inputs] + tmp=None + try: + if len(str(files))+len(str(cmd))>8192: + (fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath()) + try: + os.write(fd,'\n'.join(files)) + finally: + if tmp: + os.close(fd) + if Logs.verbose: + Logs.debug('runner: %r'%(cmd+files)) + cmd.append('@'+tmp) + else: + cmd+=files + ret=self.exec_command(cmd,cwd=wd,env=env.env or None) + finally: + if tmp: + os.remove(tmp) + return ret + def post_run(self): + for n in self.generator.outdir.ant_glob('**/*.class'): + n.sig=Utils.h_file(n.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +@feature('javadoc') +@after_method('process_rule') +def create_javadoc(self): + tsk=self.create_task('javadoc') + tsk.classpath=getattr(self,'classpath',[]) + self.javadoc_package=Utils.to_list(self.javadoc_package) + if not isinstance(self.javadoc_output,Node.Node): + self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output) +class javadoc(Task.Task): + color='BLUE' + def __str__(self): + return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output) + def run(self): + env=self.env + bld=self.generator.bld + wd=bld.bldnode.abspath() + srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir + srcpath+=os.pathsep + srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir + classpath=env.CLASSPATH + classpath+=os.pathsep + classpath+=os.pathsep.join(self.classpath) + classpath="".join(classpath) + self.last_cmd=lst=[] + lst.extend(Utils.to_list(env['JAVADOC'])) + lst.extend(['-d',self.generator.javadoc_output.abspath()]) + lst.extend(['-sourcepath',srcpath]) + lst.extend(['-classpath',classpath]) + lst.extend(['-subpackages']) + lst.extend(self.generator.javadoc_package) + lst=[x for x in lst if x] + self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + def post_run(self): + nodes=self.generator.javadoc_output.ant_glob('**') + for x in nodes: + x.sig=Utils.h_file(x.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +def configure(self): + java_path=self.environ['PATH'].split(os.pathsep) + v=self.env + if'JAVA_HOME'in self.environ: + java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path + self.env['JAVA_HOME']=[self.environ['JAVA_HOME']] + for x in'javac java jar javadoc'.split(): + self.find_program(x,var=x.upper(),path_list=java_path) + if'CLASSPATH'in self.environ: + v['CLASSPATH']=self.environ['CLASSPATH'] + if not v['JAR']:self.fatal('jar is required for making java packages') + if not v['JAVAC']:self.fatal('javac is required for compiling java classes') + v['JARCREATE']='cf' + v['JAVACFLAGS']=[] +@conf +def check_java_class(self,classname,with_classpath=None): + javatestdir='.waf-javatest' + classpath=javatestdir + if self.env['CLASSPATH']: + classpath+=os.pathsep+self.env['CLASSPATH'] + if isinstance(with_classpath,str): + classpath+=os.pathsep+with_classpath + shutil.rmtree(javatestdir,True) + os.mkdir(javatestdir) + Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source) + self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False) + cmd=self.env['JAVA']+['-cp',classpath,'Test',classname] + self.to_log("%s\n"%str(cmd)) + found=self.exec_command(cmd,shell=False) + self.msg('Checking for java class %s'%classname,not found) + shutil.rmtree(javatestdir,True) + return found +@conf +def check_jni_headers(conf): + if not conf.env.CC_NAME and not conf.env.CXX_NAME: + conf.fatal('load a compiler first (gcc, g++, ..)') + if not conf.env.JAVA_HOME: + conf.fatal('set JAVA_HOME in the system environment') + javaHome=conf.env['JAVA_HOME'][0] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include') + if dir is None: + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers') + if dir is None: + conf.fatal('JAVA_HOME does not seem to be set properly') + f=dir.ant_glob('**/(jni|jni_md).h') + incDirs=[x.parent.abspath()for x in f] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]) + f=dir.ant_glob('**/*jvm.(so|dll|dylib)') + libDirs=[x.parent.abspath()for x in f]or[javaHome] + f=dir.ant_glob('**/*jvm.(lib)') + if f: + libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f] + if conf.env.DEST_OS=='freebsd': + conf.env.append_unique('LINKFLAGS_JAVA','-pthread') + for d in libDirs: + try: + conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA') + except Exception: + pass + else: + break + else: + conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/kde4.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/kde4.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/kde4.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/kde4.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,48 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Task,Utils +from waflib.TaskGen import feature +@feature('msgfmt') +def apply_msgfmt(self): + for lang in self.to_list(self.langs): + node=self.path.find_resource(lang+'.po') + task=self.create_task('msgfmt',node,node.change_ext('.mo')) + langname=lang.split('/') + langname=langname[-1] + inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}') + self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644)) +class msgfmt(Task.Task): + color='BLUE' + run_str='${MSGFMT} ${SRC} -o ${TGT}' +def configure(self): + kdeconfig=self.find_program('kde4-config') + prefix=self.cmd_and_log(kdeconfig+['--prefix']).strip() + fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError: + fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError:self.fatal('could not open %s'%fname) + try: + txt=Utils.readf(fname) + except EnvironmentError: + self.fatal('could not read %s'%fname) + txt=txt.replace('\\\n','\n') + fu=re.compile('#(.*)\n') + txt=fu.sub('',txt) + setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + found=setregexp.findall(txt) + for(_,key,val)in found: + self.env[key]=val + self.env['LIB_KDECORE']=['kdecore'] + self.env['LIB_KDEUI']=['kdeui'] + self.env['LIB_KIO']=['kio'] + self.env['LIB_KHTML']=['khtml'] + self.env['LIB_KPARTS']=['kparts'] + self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR] + self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']] + self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE']) + self.find_program('msgfmt',var='MSGFMT') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ldc2.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ldc2.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ldc2.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ldc2.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,36 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_ldc2(conf): + conf.find_program(['ldc2'],var='D') + out=conf.cmd_and_log(conf.env.D+['-version']) + if out.find("based on DMD v2.")==-1: + conf.fatal("detected compiler is not ldc2") +@conf +def common_flags_ldc2(conf): + v=conf.env + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-of%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' + v['LINKFLAGS_dshlib']=['-L-shared'] + v['DHEADER_ext']='.di' + v['DFLAGS_d_with_header']=['-H','-Hf'] + v['D_HDR_F']='%s' + v['LINKFLAGS']=[] + v['DFLAGS_dshlib']=['-relocation-model=pic'] +def configure(conf): + conf.find_ldc2() + conf.load('ar') + conf.load('d') + conf.common_flags_ldc2() + conf.d_platform_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/lua.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/lua.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/lua.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/lua.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,18 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.TaskGen import extension +from waflib import Task +@extension('.lua') +def add_lua(self,node): + tsk=self.create_task('luac',node,node.change_ext('.luac')) + inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None) + if inst_to: + self.bld.install_files(inst_to,tsk.outputs) + return tsk +class luac(Task.Task): + run_str='${LUAC} -s -o ${TGT} ${SRC}' + color='PINK' +def configure(conf): + conf.find_program('luac',var='LUAC') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/msvc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/msvc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/msvc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/msvc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,809 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,sys,re,tempfile +from waflib import Utils,Task,Logs,Options,Errors +from waflib.Logs import debug,warn +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +from waflib.Tools import ccroot,c,cxx,ar,winres +g_msvc_systemlibs=''' +aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet +cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs +credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d +ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp +faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid +gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop +kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi +mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree +msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm +netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp +odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 +osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu +ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm +rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 +shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 +traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg +version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm +wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp +'''.split() +all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm'),('amd64_x86','x86'),('amd64_arm','arm')] +all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')] +all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] +def options(opt): + opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='') + opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='') + opt.add_option('--msvc_lazy_autodetect',action='store_true',help='lazily check msvc target environments') +def setup_msvc(conf,versions,arch=False): + platforms=getattr(Options.options,'msvc_targets','').split(',') + if platforms==['']: + platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] + desired_versions=getattr(Options.options,'msvc_version','').split(',') + if desired_versions==['']: + desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] + versiondict=dict(versions) + for version in desired_versions: + try: + targets=dict(versiondict[version]) + for target in platforms: + try: + try: + realtarget,(p1,p2,p3)=targets[target] + except conf.errors.ConfigurationError: + del(targets[target]) + else: + compiler,revision=version.rsplit(' ',1) + if arch: + return compiler,revision,p1,p2,p3,realtarget + else: + return compiler,revision,p1,p2,p3 + except KeyError:continue + except KeyError:continue + conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') +@conf +def get_msvc_version(conf,compiler,version,target,vcvars): + debug('msvc: get_msvc_version: %r %r %r',compiler,version,target) + try: + conf.msvc_cnt+=1 + except AttributeError: + conf.msvc_cnt=1 + batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt) + batfile.write("""@echo off +set INCLUDE= +set LIB= +call "%s" %s +echo PATH=%%PATH%% +echo INCLUDE=%%INCLUDE%% +echo LIB=%%LIB%%;%%LIBPATH%% +"""%(vcvars,target)) + sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()]) + lines=sout.splitlines() + if not lines[0]: + lines.pop(0) + MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None + for line in lines: + if line.startswith('PATH='): + path=line[5:] + MSVC_PATH=path.split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR=[i for i in line[8:].split(';')if i] + elif line.startswith('LIB='): + MSVC_LIBDIR=[i for i in line[4:].split(';')if i] + if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): + conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') + env=dict(os.environ) + env.update(PATH=path) + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + cxx=conf.find_program(compiler_name,path_list=MSVC_PATH) + if'CL'in env: + del(env['CL']) + try: + try: + conf.cmd_and_log(cxx+['/help'],env=env) + except UnicodeError: + st=Utils.ex_stack() + if conf.logger: + conf.logger.error(st) + conf.fatal('msvc: Unicode error - check the code page?') + except Exception ,e: + debug('msvc: get_msvc_version: %r %r %r -> failure %s'%(compiler,version,target,str(e))) + conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)') + else: + debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target) + finally: + conf.env[compiler_name]='' + return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) +@conf +def gather_wsdk_versions(conf,versions): + version_pattern=re.compile('^v..?.?\...?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + try: + msvc_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') + except WindowsError: + continue + if path and os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')): + targets=[] + for target,arch in all_msvc_platforms: + try: + targets.append((target,(arch,get_compiler_env(conf,'wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))))) + except conf.errors.ConfigurationError: + pass + versions.append(('wsdk '+version[1:],targets)) +def gather_wince_supported_platforms(): + supported_wince_platforms=[] + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + ce_sdk='' + if not ce_sdk: + return supported_wince_platforms + ce_index=0 + while 1: + try: + sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index) + except WindowsError: + break + ce_index=ce_index+1 + sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device) + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir') + except WindowsError: + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation') + path,xml=os.path.split(path) + except WindowsError: + continue + path=str(path) + path,device=os.path.split(path) + if not device: + path,device=os.path.split(path) + platforms=[] + for arch,compiler in all_wince_platforms: + if os.path.isdir(os.path.join(path,device,'Lib',arch)): + platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch))) + if platforms: + supported_wince_platforms.append((device,platforms)) + return supported_wince_platforms +def gather_msvc_detected_versions(): + version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$') + detected_versions=[] + for vcver,vcvar in(('VCExpress','Exp'),('VisualStudio','')): + try: + prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + try: + prefix='SOFTWARE\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + continue + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + match=version_pattern.match(version) + if not match: + continue + else: + versionnumber=float(match.group(1)) + detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version)) + def fun(tup): + return tup[0] + detected_versions.sort(key=fun) + return detected_versions +def get_compiler_env(conf,compiler,version,bat_target,bat,select=None): + lazy=getattr(Options.options,'msvc_lazy_autodetect',False)or conf.env['MSVC_LAZY_AUTODETECT'] + def msvc_thunk(): + vs=conf.get_msvc_version(compiler,version,bat_target,bat) + if select: + return select(vs) + else: + return vs + return lazytup(msvc_thunk,lazy,([],[],[])) +class lazytup(object): + def __init__(self,fn,lazy=True,default=None): + self.fn=fn + self.default=default + if not lazy: + self.evaluate() + def __len__(self): + self.evaluate() + return len(self.value) + def __iter__(self): + self.evaluate() + for i,v in enumerate(self.value): + yield v + def __getitem__(self,i): + self.evaluate() + return self.value[i] + def __repr__(self): + if hasattr(self,'value'): + return repr(self.value) + elif self.default: + return repr(self.default) + else: + self.evaluate() + return repr(self.value) + def evaluate(self): + if hasattr(self,'value'): + return + self.value=self.fn() +@conf +def gather_msvc_targets(conf,versions,version,vc_path): + targets=[] + if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')): + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,get_compiler_env(conf,'msvc',version,target,os.path.join(vc_path,'vcvarsall.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')): + try: + targets.append(('x86',('x86',get_compiler_env(conf,'msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')): + try: + targets.append(('x86',('x86',get_compiler_env(conf,'msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + if targets: + versions.append(('msvc '+version,targets)) +@conf +def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms): + for device,platforms in supported_platforms: + cetargets=[] + for platform,compiler,include,lib in platforms: + winCEpath=os.path.join(vc_path,'ce') + if not os.path.isdir(winCEpath): + continue + if os.path.isdir(os.path.join(winCEpath,'lib',platform)): + bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)] + incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include] + libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib] + def combine_common(compiler_env): + (common_bindirs,_1,_2)=compiler_env + return(bindirs+common_bindirs,incdirs,libdirs) + try: + cetargets.append((platform,(platform,get_compiler_env(conf,'msvc',version,'x86',vsvars,combine_common)))) + except conf.errors.ConfigurationError: + continue + if cetargets: + versions.append((device+' '+version,cetargets)) +@conf +def gather_winphone_targets(conf,versions,version,vc_path,vsvars): + targets=[] + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,get_compiler_env(conf,'winphone',version,target,vsvars)))) + except conf.errors.ConfigurationError: + pass + if targets: + versions.append(('winphone '+version,targets)) +@conf +def gather_msvc_versions(conf,versions): + vc_paths=[] + for(v,version,reg)in gather_msvc_detected_versions(): + try: + try: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC") + except WindowsError: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++") + path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir') + vc_paths.append((version,os.path.abspath(str(path)))) + except WindowsError: + continue + wince_supported_platforms=gather_wince_supported_platforms() + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat') + if wince_supported_platforms and os.path.isfile(vsvars): + conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms) + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat') + if os.path.isfile(vsvars): + conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars) + break + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + conf.gather_msvc_targets(versions,version,vc_path) +@conf +def gather_icl_versions(conf,versions): + version_pattern=re.compile('^...?.?\....?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + pass + for target,arch in all_icl_platforms: + try: + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + continue + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def gather_intel_composer_versions(conf,versions): + version_pattern=re.compile('^...?.?\...?.?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + try: + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) + except WindowsError: + if targetDir=='EM64T_NATIVE': + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') + else: + raise WindowsError + uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey') + Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + compilervars_warning_attr='_compilervars_warning_key' + if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True): + setattr(conf,compilervars_warning_attr,False) + patch_url='http://software.intel.com/en-us/forums/topic/328487' + compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat') + for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'): + if vscomntool in os.environ: + vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe' + dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe' + if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)): + Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url)) + except WindowsError: + pass + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def get_msvc_versions(conf,eval_and_save=True): + if conf.env['MSVC_INSTALLED_VERSIONS']: + return conf.env['MSVC_INSTALLED_VERSIONS'] + lst=[] + conf.gather_icl_versions(lst) + conf.gather_intel_composer_versions(lst) + conf.gather_wsdk_versions(lst) + conf.gather_msvc_versions(lst) + if eval_and_save: + def checked_target(t): + target,(arch,paths)=t + try: + paths.evaluate() + except conf.errors.ConfigurationError: + return None + else: + return t + lst=[(version,list(filter(checked_target,targets)))for version,targets in lst] + conf.env['MSVC_INSTALLED_VERSIONS']=lst + return lst +@conf +def print_all_msvc_detected(conf): + for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']: + Logs.info(version) + for target,l in targets: + Logs.info("\t"+target) +@conf +def detect_msvc(conf,arch=False): + lazy_detect=getattr(Options.options,'msvc_lazy_autodetect',False)or conf.env['MSVC_LAZY_AUTODETECT'] + versions=get_msvc_versions(conf,not lazy_detect) + return setup_msvc(conf,versions,arch) +@conf +def find_lt_names_msvc(self,libname,is_static=False): + lt_names=['lib%s.la'%libname,'%s.la'%libname,] + for path in self.env['LIBPATH']: + for la in lt_names: + laf=os.path.join(path,la) + dll=None + if os.path.exists(laf): + ltdict=Utils.read_la_file(laf) + lt_libdir=None + if ltdict.get('libdir',''): + lt_libdir=ltdict['libdir'] + if not is_static and ltdict.get('library_names',''): + dllnames=ltdict['library_names'].split() + dll=dllnames[0].lower() + dll=re.sub('\.dll$','',dll) + return(lt_libdir,dll,False) + elif ltdict.get('old_library',''): + olib=ltdict['old_library'] + if os.path.exists(os.path.join(path,olib)): + return(path,olib,True) + elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)): + return(lt_libdir,olib,True) + else: + return(None,olib,True) + else: + raise self.errors.WafError('invalid libtool object file: %s'%laf) + return(None,None,None) +@conf +def libname_msvc(self,libname,is_static=False): + lib=libname.lower() + lib=re.sub('\.lib$','',lib) + if lib in g_msvc_systemlibs: + return lib + lib=re.sub('^lib','',lib) + if lib=='m': + return None + (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static) + if lt_path!=None and lt_libname!=None: + if lt_static==True: + return os.path.join(lt_path,lt_libname) + if lt_path!=None: + _libpaths=[lt_path]+self.env['LIBPATH'] + else: + _libpaths=self.env['LIBPATH'] + static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,] + dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,] + libnames=static_libs + if not is_static: + libnames=dynamic_libs+static_libs + for path in _libpaths: + for libn in libnames: + if os.path.exists(os.path.join(path,libn)): + debug('msvc: lib found: %s'%os.path.join(path,libn)) + return re.sub('\.lib$','',libn) + self.fatal("The library %r could not be found"%libname) + return re.sub('\.lib$','',libname) +@conf +def check_lib_msvc(self,libname,is_static=False,uselib_store=None): + libn=self.libname_msvc(libname,is_static) + if not uselib_store: + uselib_store=libname.upper() + if False and is_static: + self.env['STLIB_'+uselib_store]=[libn] + else: + self.env['LIB_'+uselib_store]=[libn] +@conf +def check_libs_msvc(self,libnames,is_static=False): + for libname in Utils.to_list(libnames): + self.check_lib_msvc(libname,is_static) +def configure(conf): + conf.autodetect(True) + conf.find_msvc() + conf.msvc_common_flags() + conf.cc_load_tools() + conf.cxx_load_tools() + conf.cc_add_flags() + conf.cxx_add_flags() + conf.link_add_flags() + conf.visual_studio_add_flags() +@conf +def no_autodetect(conf): + conf.env.NO_MSVC_DETECT=1 + configure(conf) +@conf +def autodetect(conf,arch=False): + v=conf.env + if v.NO_MSVC_DETECT: + return + if arch: + compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True) + v['DEST_CPU']=arch + else: + compiler,version,path,includes,libdirs=conf.detect_msvc() + v['PATH']=path + v['INCLUDES']=includes + v['LIBPATH']=libdirs + v['MSVC_COMPILER']=compiler + try: + v['MSVC_VERSION']=float(version) + except Exception: + v['MSVC_VERSION']=float(version[:-3]) +def _get_prog_names(conf,compiler): + if compiler=='intel': + compiler_name='ICL' + linker_name='XILINK' + lib_name='XILIB' + else: + compiler_name='CL' + linker_name='LINK' + lib_name='LIB' + return compiler_name,linker_name,lib_name +@conf +def find_msvc(conf): + if sys.platform=='cygwin': + conf.fatal('MSVC module does not work under cygwin Python!') + v=conf.env + path=v['PATH'] + compiler=v['MSVC_COMPILER'] + version=v['MSVC_VERSION'] + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11) + cxx=conf.find_program(compiler_name,var='CXX',path_list=path) + env=dict(conf.environ) + if path:env.update(PATH=';'.join(path)) + if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env): + conf.fatal('the msvc compiler could not be identified') + v['CC']=v['CXX']=cxx + v['CC_NAME']=v['CXX_NAME']='msvc' + if not v['LINK_CXX']: + link=conf.find_program(linker_name,path_list=path) + if link:v['LINK_CXX']=link + else:conf.fatal('%s was not found (linker)'%linker_name) + v['LINK']=link + if not v['LINK_CC']: + v['LINK_CC']=v['LINK_CXX'] + if not v['AR']: + stliblink=conf.find_program(lib_name,path_list=path,var='AR') + if not stliblink:return + v['ARFLAGS']=['/NOLOGO'] + if v.MSVC_MANIFEST: + conf.find_program('MT',path_list=path,var='MT') + v['MTFLAGS']=['/NOLOGO'] + try: + conf.load('winres') + except Errors.WafError: + warn('Resource compiler not found. Compiling resource file is disabled') +@conf +def visual_studio_add_flags(self): + v=self.env + try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x]) + except Exception:pass + try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x]) + except Exception:pass +@conf +def msvc_common_flags(conf): + v=conf.env + v['DEST_BINFMT']='pe' + v.append_value('CFLAGS',['/nologo']) + v.append_value('CXXFLAGS',['/nologo']) + v['DEFINES_ST']='/D%s' + v['CC_SRC_F']='' + v['CC_TGT_F']=['/c','/Fo'] + v['CXX_SRC_F']='' + v['CXX_TGT_F']=['/c','/Fo'] + if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6): + v['CC_TGT_F']=['/FC']+v['CC_TGT_F'] + v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F'] + v['CPPPATH_ST']='/I%s' + v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:' + v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE'] + v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE'] + v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX'] + v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS'] + v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE'] + v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT'] + v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD'] + v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd'] + v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd'] + v['LIB_ST']='%s.lib' + v['LIBPATH_ST']='/LIBPATH:%s' + v['STLIB_ST']='%s.lib' + v['STLIBPATH_ST']='/LIBPATH:%s' + v.append_value('LINKFLAGS',['/NOLOGO']) + if v['MSVC_MANIFEST']: + v.append_value('LINKFLAGS',['/MANIFEST']) + v['CFLAGS_cshlib']=[] + v['CXXFLAGS_cxxshlib']=[] + v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL'] + v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='%s.lib' + v['IMPLIB_ST']='/IMPLIB:%s' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib' + v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe' +@after_method('apply_link') +@feature('c','cxx') +def apply_flags_msvc(self): + if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None): + return + is_static=isinstance(self.link_task,ccroot.stlink_task) + subsystem=getattr(self,'subsystem','') + if subsystem: + subsystem='/subsystem:%s'%subsystem + flags=is_static and'ARFLAGS'or'LINKFLAGS' + self.env.append_value(flags,subsystem) + if not is_static: + for f in self.env.LINKFLAGS: + d=f.lower() + if d[1:]=='debug': + pdbnode=self.link_task.outputs[0].change_ext('.pdb') + self.link_task.outputs.append(pdbnode) + if getattr(self,'install_task',None): + self.pdb_install_task=self.bld.install_files(self.install_task.dest,pdbnode,env=self.env) + break +@feature('cprogram','cshlib','cxxprogram','cxxshlib') +@after_method('apply_link') +def apply_manifest(self): + if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None): + out_node=self.link_task.outputs[0] + man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') + self.link_task.outputs.append(man_node) + self.link_task.do_manifest=True +def exec_mf(self): + env=self.env + mtool=env['MT'] + if not mtool: + return 0 + self.do_manifest=False + outfile=self.outputs[0].abspath() + manifest=None + for out_node in self.outputs: + if out_node.name.endswith('.manifest'): + manifest=out_node.abspath() + break + if manifest is None: + return 0 + mode='' + if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features: + mode='1' + elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features: + mode='2' + debug('msvc: embedding manifest in mode %r'%mode) + lst=[]+mtool + lst.extend(Utils.to_list(env['MTFLAGS'])) + lst.extend(['-manifest',manifest]) + lst.append('-outputresource:%s;%s'%(outfile,mode)) + return self.exec_command(lst) +def quote_response_command(self,flag): + if flag.find(' ')>-1: + for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'): + if flag.startswith(x): + flag='%s"%s"'%(x,flag[len(x):]) + break + else: + flag='"%s"'%flag + return flag +def exec_response_command(self,cmd,**kw): + try: + tmp=None + if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:])) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret +def exec_command_msvc(self,*k,**kw): + if isinstance(k[0],list): + lst=[] + carry='' + for a in k[0]: + if a=='/Fo'or a=='/doc'or a[-1]==':': + carry=a + else: + lst.append(carry+a) + carry='' + k=[lst] + if self.env['PATH']: + env=dict(self.env.env or os.environ) + env.update(PATH=';'.join(self.env['PATH'])) + kw['env']=env + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + ret=self.exec_response_command(k[0],**kw) + if not ret and getattr(self,'do_manifest',None): + ret=self.exec_mf() + return ret +def wrap_class(class_name): + cls=Task.classes.get(class_name,None) + if not cls: + return None + derived_class=type(class_name,(cls,),{}) + def exec_command(self,*k,**kw): + if self.env['CC_NAME']=='msvc': + return self.exec_command_msvc(*k,**kw) + else: + return super(derived_class,self).exec_command(*k,**kw) + derived_class.exec_command=exec_command + derived_class.exec_response_command=exec_response_command + derived_class.quote_response_command=quote_response_command + derived_class.exec_command_msvc=exec_command_msvc + derived_class.exec_mf=exec_mf + if hasattr(cls,'hcode'): + derived_class.hcode=cls.hcode + return derived_class +for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split(): + wrap_class(k) +def make_winapp(self,family): + append=self.env.append_unique + append('DEFINES','WINAPI_FAMILY=%s'%family) + append('CXXFLAGS','/ZW') + append('CXXFLAGS','/TP') + for lib_path in self.env.LIBPATH: + append('CXXFLAGS','/AI%s'%lib_path) +@feature('winphoneapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_winphone_app(self): + make_winapp(self,'WINAPI_FAMILY_PHONE_APP') + conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib') + conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib') +@feature('winapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_windows_app(self): + make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/nasm.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/nasm.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/nasm.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/nasm.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,16 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +import waflib.Tools.asm +from waflib.TaskGen import feature +@feature('asm') +def apply_nasm_vars(self): + self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[]))) +def configure(conf): + conf.find_program(['nasm','yasm'],var='AS') + conf.env.AS_TGT_F=['-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.load('asm') + conf.env.ASMPATH_ST='-I%s'+os.sep diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/perl.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/perl.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/perl.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/perl.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,90 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib import Task,Options,Utils +from waflib.Configure import conf +from waflib.TaskGen import extension,feature,before_method +@before_method('apply_incpaths','apply_link','propagate_uselib_vars') +@feature('perlext') +def init_perlext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT') + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN'] +@extension('.xs') +def xsubpp_file(self,node): + outnode=node.change_ext('.c') + self.create_task('xsubpp',node,outnode) + self.source.append(outnode) +class xsubpp(Task.Task): + run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' + color='BLUE' + ext_out=['.h'] +@conf +def check_perl_version(self,minver=None): + res=True + if minver: + cver='.'.join(map(str,minver)) + else: + cver='' + self.start_msg('Checking for minimum perl version %s'%cver) + perl=Utils.to_list(getattr(Options.options,'perlbinary',None)) + if not perl: + perl=self.find_program('perl',var='PERL') + if not perl: + self.end_msg("Perl not found",color="YELLOW") + return False + self.env['PERL']=perl + version=self.cmd_and_log(self.env.PERL+["-e",'printf \"%vd\", $^V']) + if not version: + res=False + version="Unknown" + elif not minver is None: + ver=tuple(map(int,version.split("."))) + if ver +#ifdef __cplusplus +extern "C" { +#endif + void Py_Initialize(void); + void Py_Finalize(void); +#ifdef __cplusplus +} +#endif +int main(int argc, char **argv) +{ + (void)argc; (void)argv; + Py_Initialize(); + Py_Finalize(); + return 0; +} +''' +INST=''' +import sys, py_compile +py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True) +''' +DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib'] +@before_method('process_source') +@feature('py') +def feature_py(self): + self.install_path=getattr(self,'install_path','${PYTHONDIR}') + install_from=getattr(self,'install_from',None) + if install_from and not isinstance(install_from,Node.Node): + install_from=self.path.find_dir(install_from) + self.install_from=install_from + ver=self.env.PYTHON_VERSION + if not ver: + self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version') + if int(ver.replace('.',''))>31: + self.install_32=True +@extension('.py') +def process_py(self,node): + assert(getattr(self,'install_path')),'add features="py"' + if self.install_path: + if self.install_from: + self.bld.install_files(self.install_path,[node],cwd=self.install_from,relative_trick=True) + else: + self.bld.install_files(self.install_path,[node],relative_trick=True) + lst=[] + if self.env.PYC: + lst.append('pyc') + if self.env.PYO: + lst.append('pyo') + if self.install_path: + if self.install_from: + pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.install_from)),self.env) + else: + pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.path)),self.env) + else: + pyd=node.abspath() + for ext in lst: + if self.env.PYTAG: + name=node.name[:-3] + pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext)) + pyobj.parent.mkdir() + else: + pyobj=node.change_ext(".%s"%ext) + tsk=self.create_task(ext,node,pyobj) + tsk.pyd=pyd + if self.install_path: + self.bld.install_files(os.path.dirname(pyd),pyobj,cwd=node.parent.get_bld(),relative_trick=True) +class pyc(Task.Task): + color='PINK' + def run(self): + cmd=[Utils.subst_vars('${PYTHON}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] + ret=self.generator.bld.exec_command(cmd) + return ret +class pyo(Task.Task): + color='PINK' + def run(self): + cmd=[Utils.subst_vars('${PYTHON}',self.env),Utils.subst_vars('${PYFLAGS_OPT}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] + ret=self.generator.bld.exec_command(cmd) + return ret +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +@after_method('apply_bundle') +def init_pyext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEXT'in self.uselib: + self.uselib.append('PYEXT') + self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN + self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN + try: + if not self.install_path: + return + except AttributeError: + self.install_path='${PYTHONARCHDIR}' +@feature('pyext') +@before_method('apply_link','apply_bundle') +def set_bundle(self): + if Utils.unversioned_sys_platform()=='darwin': + self.mac_bundle=True +@before_method('propagate_uselib_vars') +@feature('pyembed') +def init_pyembed(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEMBED'in self.uselib: + self.uselib.append('PYEMBED') +@conf +def get_python_variables(self,variables,imports=None): + if not imports: + try: + imports=self.python_imports + except AttributeError: + imports=DISTUTILS_IMP + program=list(imports) + program.append('') + for v in variables: + program.append("print(repr(%s))"%v) + os_env=dict(os.environ) + try: + del os_env['MACOSX_DEPLOYMENT_TARGET'] + except KeyError: + pass + try: + out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env) + except Errors.WafError: + self.fatal('The distutils module is unusable: install "python-devel"?') + self.to_log(out) + return_values=[] + for s in out.splitlines(): + s=s.strip() + if not s: + continue + if s=='None': + return_values.append(None) + elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'): + return_values.append(eval(s)) + elif s[0].isdigit(): + return_values.append(int(s)) + else:break + return return_values +@conf +def test_pyembed(self,mode,msg='Testing pyembed configuration'): + self.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg=msg,fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(mode,mode)) +@conf +def test_pyext(self,mode,msg='Testing pyext configuration'): + self.check(header_name='Python.h',define_name='HAVE_PYEXT',msg=msg,fragment=FRAG,errmsg='Could not build python extensions',features='%s %sshlib pyext'%(mode,mode)) +@conf +def python_cross_compile(self,features='pyembed pyext'): + features=Utils.to_list(features) + if not('PYTHON_LDFLAGS'in self.environ or'PYTHON_PYEXT_LDFLAGS'in self.environ or'PYTHON_PYEMBED_LDFLAGS'in self.environ): + return False + for x in'PYTHON_VERSION PYTAG pyext_PATTERN'.split(): + if not x in self.environ: + self.fatal('Please set %s in the os environment'%x) + else: + self.env[x]=self.environ[x] + xx=self.env.CXX_NAME and'cxx'or'c' + if'pyext'in features: + flags=self.environ.get('PYTHON_PYEXT_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None)) + if flags is None: + self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required') + else: + self.parse_flags(flags,'PYEXT') + self.test_pyext(xx) + if'pyembed'in features: + flags=self.environ.get('PYTHON_PYEMBED_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None)) + if flags is None: + self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required') + else: + self.parse_flags(flags,'PYEMBED') + self.test_pyembed(xx) + return True +@conf +def check_python_headers(conf,features='pyembed pyext'): + features=Utils.to_list(features) + assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'" + env=conf.env + if not env['CC_NAME']and not env['CXX_NAME']: + conf.fatal('load a compiler first (gcc, g++, ..)') + if conf.python_cross_compile(features): + return + if not env['PYTHON_VERSION']: + conf.check_python_version() + pybin=env.PYTHON + if not pybin: + conf.fatal('Could not find the python executable') + v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() + try: + lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v]) + except RuntimeError: + conf.fatal("Python development headers not found (-v for details).") + vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)] + conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals))) + dct=dict(zip(v,lst)) + x='MACOSX_DEPLOYMENT_TARGET' + if dct[x]: + env[x]=conf.environ[x]=dct[x] + env['pyext_PATTERN']='%s'+dct['SO'] + num='.'.join(env['PYTHON_VERSION'].split('.')[:2]) + conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False) + if env.PYTHON_CONFIG: + all_flags=[['--cflags','--libs','--ldflags']] + if sys.hexversion<0x2070000: + all_flags=[[k]for k in all_flags[0]] + xx=env.CXX_NAME and'cxx'or'c' + if'pyembed'in features: + for flags in all_flags: + conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags) + try: + conf.test_pyembed(xx) + except conf.errors.ConfigurationError: + if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEMBED',[dct['LIBDIR']]) + conf.test_pyembed(xx) + else: + raise + if'pyext'in features: + for flags in all_flags: + conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags) + try: + conf.test_pyext(xx) + except conf.errors.ConfigurationError: + if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEXT',[dct['LIBDIR']]) + conf.test_pyext(xx) + else: + raise + conf.define('HAVE_PYTHON_H',1) + return + all_flags=dct['LDFLAGS']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEMBED') + all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEXT') + result=None + if not dct["LDVERSION"]: + dct["LDVERSION"]=env['PYTHON_VERSION'] + for name in('python'+dct['LDVERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')): + if not result and env['LIBPATH_PYEMBED']: + path=env['LIBPATH_PYEMBED'] + conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name) + if not result and dct['LIBDIR']: + path=[dct['LIBDIR']] + conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name) + if not result and dct['LIBPL']: + path=[dct['LIBPL']] + conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name) + if not result: + path=[os.path.join(dct['prefix'],"libs")] + conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name) + if result: + break + if result: + env['LIBPATH_PYEMBED']=path + env.append_value('LIB_PYEMBED',[name]) + else: + conf.to_log("\n\n### LIB NOT FOUND\n") + if Utils.is_win32 or dct['Py_ENABLE_SHARED']: + env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED'] + env['LIB_PYEXT']=env['LIB_PYEMBED'] + conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],)) + env['INCLUDES_PYEXT']=[dct['INCLUDEPY']] + env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']] + if env['CC_NAME']=='gcc': + env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing']) + if env['CXX_NAME']=='gcc': + env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing']) + if env.CC_NAME=="msvc": + from distutils.msvccompiler import MSVCCompiler + dist_compiler=MSVCCompiler() + dist_compiler.initialize() + env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared) + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!') +@conf +def check_python_version(conf,minver=None): + assert minver is None or isinstance(minver,tuple) + pybin=conf.env['PYTHON'] + if not pybin: + conf.fatal('could not find the python executable') + cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))'] + Logs.debug('python: Running python command %r'%cmd) + lines=conf.cmd_and_log(cmd).split() + assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines) + pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4])) + result=(minver is None)or(pyver_tuple>=minver) + if result: + pyver='.'.join([str(x)for x in pyver_tuple[:2]]) + conf.env['PYTHON_VERSION']=pyver + if'PYTHONDIR'in conf.env: + pydir=conf.env['PYTHONDIR'] + elif'PYTHONDIR'in conf.environ: + pydir=conf.environ['PYTHONDIR'] + else: + if Utils.is_win32: + (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0) or ''"]) + else: + python_LIBDEST=None + (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) + if python_LIBDEST is None: + if conf.env['LIBDIR']: + python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver) + else: + python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver) + if'PYTHONARCHDIR'in conf.env: + pyarchdir=conf.env['PYTHONARCHDIR'] + elif'PYTHONARCHDIR'in conf.environ: + pyarchdir=conf.environ['PYTHONARCHDIR'] + else: + (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) + if not pyarchdir: + pyarchdir=pydir + if hasattr(conf,'define'): + conf.define('PYTHONDIR',pydir) + conf.define('PYTHONARCHDIR',pyarchdir) + conf.env['PYTHONDIR']=pydir + conf.env['PYTHONARCHDIR']=pyarchdir + pyver_full='.'.join(map(str,pyver_tuple[:3])) + if minver is None: + conf.msg('Checking for python version',pyver_full) + else: + minver_str='.'.join(map(str,minver)) + conf.msg('Checking for python version >= %s'%(minver_str,),pyver_full,color=result and'GREEN'or'YELLOW') + if not result: + conf.fatal('The python version is too old, expecting %r'%(minver,)) +PYTHON_MODULE_TEMPLATE=''' +import %s as current_module +version = getattr(current_module, '__version__', None) +if version is not None: + print(str(version)) +else: + print('unknown version') +''' +@conf +def check_python_module(conf,module_name,condition=''): + msg="Checking for python module '%s'"%module_name + if condition: + msg='%s (%s)'%(msg,condition) + conf.start_msg(msg) + try: + ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name]) + except Exception: + conf.end_msg(False) + conf.fatal('Could not find the python module %r'%module_name) + ret=ret.strip() + if condition: + conf.end_msg(ret) + if ret=='unknown version': + conf.fatal('Could not check the %s version'%module_name) + from distutils.version import LooseVersion + def num(*k): + if isinstance(k[0],int): + return LooseVersion('.'.join([str(x)for x in k])) + else: + return LooseVersion(k[0]) + d={'num':num,'ver':LooseVersion(ret)} + ev=eval(condition,{},d) + if not ev: + conf.fatal('The %s version does not satisfy the requirements'%module_name) + else: + if ret=='unknown version': + conf.end_msg(True) + else: + conf.end_msg(ret) +def configure(conf): + v=conf.env + v['PYTHON']=Options.options.python or os.environ.get('PYTHON',sys.executable) + if Options.options.pythondir: + v['PYTHONDIR']=Options.options.pythondir + if Options.options.pythonarchdir: + v['PYTHONARCHDIR']=Options.options.pythonarchdir + conf.find_program('python',var='PYTHON') + v['PYFLAGS']='' + v['PYFLAGS_OPT']='-O' + v['PYC']=getattr(Options.options,'pyc',1) + v['PYO']=getattr(Options.options,'pyo',1) + try: + v.PYTAG=conf.cmd_and_log(conf.env.PYTHON+['-c',"import imp;print(imp.get_tag())"]).strip() + except Errors.WafError: + pass +def options(opt): + pyopt=opt.add_option_group("Python Options") + pyopt.add_option('--nopyc',dest='pyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]') + pyopt.add_option('--nopyo',dest='pyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]') + pyopt.add_option('--python',dest="python",help='python binary to be used [Default: %s]'%sys.executable) + pyopt.add_option('--pythondir',dest='pythondir',help='Installation path for python modules (py, platform-independent .py and .pyc files)') + pyopt.add_option('--pythonarchdir',dest='pythonarchdir',help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt4.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt4.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt4.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt4.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,442 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml=False + ContentHandler=object +else: + has_xml=True +import os,sys +from waflib.Tools import cxx +from waflib import Task,Utils,Options,Errors,Context +from waflib.TaskGen import feature,after_method,extension +from waflib.Configure import conf +from waflib import Logs +MOC_H=['.h','.hpp','.hxx','.hh'] +EXT_RCC=['.qrc'] +EXT_UI=['.ui'] +EXT_QT4=['.cpp','.cc','.cxx','.C'] +QT4_LIBS="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner" +class qxx(Task.classes['cxx']): + def __init__(self,*k,**kw): + Task.Task.__init__(self,*k,**kw) + self.moc_done=0 + def runnable_status(self): + if self.moc_done: + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + self.add_moc_tasks() + return Task.Task.runnable_status(self) + def create_moc_task(self,h_node,m_node): + try: + moc_cache=self.generator.bld.moc_cache + except AttributeError: + moc_cache=self.generator.bld.moc_cache={} + try: + return moc_cache[h_node] + except KeyError: + tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) + tsk.set_inputs(h_node) + tsk.set_outputs(m_node) + if self.generator: + self.generator.tasks.append(tsk) + gen=self.generator.bld.producer + gen.outstanding.insert(0,tsk) + gen.total+=1 + return tsk + def moc_h_ext(self): + ext=[] + try: + ext=Options.options.qt_header_ext.split() + except AttributeError: + pass + if not ext: + ext=MOC_H + return ext + def add_moc_tasks(self): + node=self.inputs[0] + bld=self.generator.bld + try: + self.signature() + except KeyError: + pass + else: + delattr(self,'cache_sig') + include_nodes=[node.parent]+self.generator.includes_nodes + moctasks=[] + mocfiles=set([]) + for d in bld.raw_deps.get(self.uid(),[]): + if not d.endswith('.moc'): + continue + if d in mocfiles: + continue + mocfiles.add(d) + h_node=None + base2=d[:-4] + for x in include_nodes: + for e in self.moc_h_ext(): + h_node=x.find_node(base2+e) + if h_node: + break + if h_node: + m_node=h_node.change_ext('.moc') + break + else: + for k in EXT_QT4: + if base2.endswith(k): + for x in include_nodes: + h_node=x.find_node(base2) + if h_node: + break + if h_node: + m_node=h_node.change_ext(k+'.moc') + break + if not h_node: + raise Errors.WafError('No source found for %r which is a moc file'%d) + task=self.create_moc_task(h_node,m_node) + moctasks.append(task) + self.run_after.update(set(moctasks)) + self.moc_done=1 +class trans_update(Task.Task): + run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' + color='BLUE' +Task.update_outputs(trans_update) +class XMLHandler(ContentHandler): + def __init__(self): + self.buf=[] + self.files=[] + def startElement(self,name,attrs): + if name=='file': + self.buf=[] + def endElement(self,name): + if name=='file': + self.files.append(str(''.join(self.buf))) + def characters(self,cars): + self.buf.append(cars) +@extension(*EXT_RCC) +def create_rcc_task(self,node): + rcnode=node.change_ext('_rc.cpp') + self.create_task('rcc',node,rcnode) + cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) + try: + self.compiled_tasks.append(cpptask) + except AttributeError: + self.compiled_tasks=[cpptask] + return cpptask +@extension(*EXT_UI) +def create_uic_task(self,node): + uictask=self.create_task('ui4',node) + uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] +@extension('.ts') +def add_lang(self,node): + self.lang=self.to_list(getattr(self,'lang',[]))+[node] +@feature('qt4') +@after_method('apply_link') +def apply_qt4(self): + if getattr(self,'lang',None): + qmtasks=[] + for x in self.to_list(self.lang): + if isinstance(x,str): + x=self.path.find_resource(x+'.ts') + qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) + if getattr(self,'update',None)and Options.options.trans_qt4: + cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] + for x in qmtasks: + self.create_task('trans_update',cxxnodes,x.inputs) + if getattr(self,'langname',None): + qmnodes=[x.outputs[0]for x in qmtasks] + rcnode=self.langname + if isinstance(rcnode,str): + rcnode=self.path.find_or_declare(rcnode+'.qrc') + t=self.create_task('qm2rcc',qmnodes,rcnode) + k=create_rcc_task(self,t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + lst=[] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag)<2:continue + f=flag[0:2] + if f in('-D','-I','/D','/I'): + if(f[0]=='/'): + lst.append('-'+flag[1:]) + else: + lst.append(flag) + self.env.append_value('MOC_FLAGS',lst) +@extension(*EXT_QT4) +def cxx_hook(self,node): + return self.create_compiled_task('qxx',node) +class rcc(Task.Task): + color='BLUE' + run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out=['.h'] + def rcname(self): + return os.path.splitext(self.inputs[0].name)[0] + def scan(self): + if not has_xml: + Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + return([],[]) + parser=make_parser() + curHandler=XMLHandler() + parser.setContentHandler(curHandler) + fi=open(self.inputs[0].abspath(),'r') + try: + parser.parse(fi) + finally: + fi.close() + nodes=[] + names=[] + root=self.inputs[0].parent + for x in curHandler.files: + nd=root.find_resource(x) + if nd:nodes.append(nd) + else:names.append(x) + return(nodes,names) +class moc(Task.Task): + color='BLUE' + run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' + def keyword(self): + return"Creating" + def __str__(self): + return self.outputs[0].path_from(self.generator.bld.launch_node()) +class ui4(Task.Task): + color='BLUE' + run_str='${QT_UIC} ${SRC} -o ${TGT}' + ext_out=['.h'] +class ts2qm(Task.Task): + color='BLUE' + run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' +class qm2rcc(Task.Task): + color='BLUE' + after='ts2qm' + def run(self): + txt='\n'.join(['%s'%k.path_from(self.outputs[0].parent)for k in self.inputs]) + code='\n\n%s\n\n'%txt + self.outputs[0].write(code) +def configure(self): + self.find_qt4_binaries() + self.set_qt4_libs_to_check() + self.set_qt4_defines() + self.find_qt4_libraries() + self.add_qt4_rpath() + self.simplify_qt4_libs() +@conf +def find_qt4_binaries(self): + env=self.env + opt=Options.options + qtdir=getattr(opt,'qtdir','') + qtbin=getattr(opt,'qtbin','') + paths=[] + if qtdir: + qtbin=os.path.join(qtdir,'bin') + if not qtdir: + qtdir=os.environ.get('QT4_ROOT','') + qtbin=os.environ.get('QT4_BIN',None)or os.path.join(qtdir,'bin') + if qtbin: + paths=[qtbin] + if not qtdir: + paths=os.environ.get('PATH','').split(os.pathsep) + paths.append('/usr/share/qt4/bin/') + try: + lst=Utils.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + qtdir='/usr/local/Trolltech/%s/'%lst[0] + qtbin=os.path.join(qtdir,'bin') + paths.append(qtbin) + cand=None + prev_ver=['4','0','0'] + for qmk in('qmake-qt4','qmake4','qmake'): + try: + qmake=self.find_program(qmk,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + try: + version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip() + except self.errors.WafError: + pass + else: + if version: + new_ver=version.split('.') + if new_ver>prev_ver: + cand=qmake + prev_ver=new_ver + if cand: + self.env.QMAKE=cand + else: + self.fatal('Could not find qmake for qt4') + qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep + def find_bin(lst,var): + if var in env: + return + for f in lst: + try: + ret=self.find_program(f,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + env[var]=ret + break + find_bin(['uic-qt3','uic3'],'QT_UIC3') + find_bin(['uic-qt4','uic'],'QT_UIC') + if not env.QT_UIC: + self.fatal('cannot find the uic compiler for qt4') + self.start_msg('Checking for uic version') + uicver=self.cmd_and_log(env.QT_UIC+["-version"],output=Context.BOTH) + uicver=''.join(uicver).strip() + uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') + self.end_msg(uicver) + if uicver.find(' 3.')!=-1: + self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') + find_bin(['moc-qt4','moc'],'QT_MOC') + find_bin(['rcc-qt4','rcc'],'QT_RCC') + find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE') + find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE') + env['UIC3_ST']='%s -o %s' + env['UIC_ST']='%s -o %s' + env['MOC_ST']='-o' + env['ui_PATTERN']='ui_%s.h' + env['QT_LRELEASE_FLAGS']=['-silent'] + env.MOCCPPPATH_ST='-I%s' + env.MOCDEFINES_ST='-D%s' +@conf +def find_qt4_libraries(self): + qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None) + if not qtlibs: + try: + qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip() + except Errors.WafError: + qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep + qtlibs=os.path.join(qtdir,'lib') + self.msg('Found the Qt4 libraries in',qtlibs) + qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip() + env=self.env + if not'PKG_CONFIG_PATH'in os.environ: + os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs) + try: + if os.environ.get("QT4_XCOMPILE",None): + raise self.errors.ConfigurationError() + self.check_cfg(atleast_pkgconfig_version='0.1') + except self.errors.ConfigurationError: + for i in self.qt4_vars: + uselib=i.upper() + if Utils.unversioned_sys_platform()=="darwin": + frameworkName=i+".framework" + qtDynamicLib=os.path.join(qtlibs,frameworkName,i) + if os.path.exists(qtDynamicLib): + env.append_unique('FRAMEWORK_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) + elif env.DEST_OS!="win32": + qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") + qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") + if os.path.exists(qtDynamicLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + elif os.path.exists(qtStaticLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtStaticLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for k in("lib%s.a","lib%s4.a","%s.lib","%s4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + uselib=i.upper()+"_debug" + for k in("lib%sd.a","lib%sd4.a","%sd.lib","%sd4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for i in self.qt4_vars_debug+self.qt4_vars: + self.check_cfg(package=i,args='--cflags --libs',mandatory=False) +@conf +def simplify_qt4_libs(self): + env=self.env + def process_lib(vars_,coreval): + for d in vars_: + var=d.upper() + if var=='QTCORE': + continue + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if lib in core: + continue + accu.append(lib) + env['LIBPATH_'+var]=accu + process_lib(self.qt4_vars,'LIBPATH_QTCORE') + process_lib(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def add_qt4_rpath(self): + env=self.env + if getattr(Options.options,'want_rpath',False): + def process_rpath(vars_,coreval): + for d in vars_: + var=d.upper() + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if var!='QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var]=accu + process_rpath(self.qt4_vars,'LIBPATH_QTCORE') + process_rpath(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def set_qt4_libs_to_check(self): + if not hasattr(self,'qt4_vars'): + self.qt4_vars=QT4_LIBS + self.qt4_vars=Utils.to_list(self.qt4_vars) + if not hasattr(self,'qt4_vars_debug'): + self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars] + self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug) +@conf +def set_qt4_defines(self): + if sys.platform!='win32': + return + for x in self.qt4_vars: + y=x[2:].upper() + self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) + self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) +def options(opt): + opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') + opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') + for i in'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i,type='string',default='',dest=i) + opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt5.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt5.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt5.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/qt5.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,489 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml=False + ContentHandler=object +else: + has_xml=True +import os,sys +from waflib.Tools import cxx +from waflib import Task,Utils,Options,Errors,Context +from waflib.TaskGen import feature,after_method,extension +from waflib.Configure import conf +from waflib import Logs +MOC_H=['.h','.hpp','.hxx','.hh'] +EXT_RCC=['.qrc'] +EXT_UI=['.ui'] +EXT_QT5=['.cpp','.cc','.cxx','.C'] +QT5_LIBS=''' +qtmain +Qt5Bluetooth +Qt5CLucene +Qt5Concurrent +Qt5Core +Qt5DBus +Qt5Declarative +Qt5DesignerComponents +Qt5Designer +Qt5Gui +Qt5Help +Qt5MultimediaQuick_p +Qt5Multimedia +Qt5MultimediaWidgets +Qt5Network +Qt5Nfc +Qt5OpenGL +Qt5Positioning +Qt5PrintSupport +Qt5Qml +Qt5QuickParticles +Qt5Quick +Qt5QuickTest +Qt5Script +Qt5ScriptTools +Qt5Sensors +Qt5SerialPort +Qt5Sql +Qt5Svg +Qt5Test +Qt5WebKit +Qt5WebKitWidgets +Qt5Widgets +Qt5WinExtras +Qt5X11Extras +Qt5XmlPatterns +Qt5Xml''' +class qxx(Task.classes['cxx']): + def __init__(self,*k,**kw): + Task.Task.__init__(self,*k,**kw) + self.moc_done=0 + def runnable_status(self): + if self.moc_done: + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + self.add_moc_tasks() + return Task.Task.runnable_status(self) + def create_moc_task(self,h_node,m_node): + try: + moc_cache=self.generator.bld.moc_cache + except AttributeError: + moc_cache=self.generator.bld.moc_cache={} + try: + return moc_cache[h_node] + except KeyError: + tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) + tsk.set_inputs(h_node) + tsk.set_outputs(m_node) + if self.generator: + self.generator.tasks.append(tsk) + gen=self.generator.bld.producer + gen.outstanding.insert(0,tsk) + gen.total+=1 + return tsk + else: + delattr(self,'cache_sig') + def moc_h_ext(self): + ext=[] + try: + ext=Options.options.qt_header_ext.split() + except AttributeError: + pass + if not ext: + ext=MOC_H + return ext + def add_moc_tasks(self): + node=self.inputs[0] + bld=self.generator.bld + try: + self.signature() + except KeyError: + pass + else: + delattr(self,'cache_sig') + include_nodes=[node.parent]+self.generator.includes_nodes + moctasks=[] + mocfiles=set([]) + for d in bld.raw_deps.get(self.uid(),[]): + if not d.endswith('.moc'): + continue + if d in mocfiles: + continue + mocfiles.add(d) + h_node=None + base2=d[:-4] + for x in include_nodes: + for e in self.moc_h_ext(): + h_node=x.find_node(base2+e) + if h_node: + break + if h_node: + m_node=h_node.change_ext('.moc') + break + else: + for k in EXT_QT5: + if base2.endswith(k): + for x in include_nodes: + h_node=x.find_node(base2) + if h_node: + break + if h_node: + m_node=h_node.change_ext(k+'.moc') + break + if not h_node: + raise Errors.WafError('No source found for %r which is a moc file'%d) + task=self.create_moc_task(h_node,m_node) + moctasks.append(task) + self.run_after.update(set(moctasks)) + self.moc_done=1 +class trans_update(Task.Task): + run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' + color='BLUE' +Task.update_outputs(trans_update) +class XMLHandler(ContentHandler): + def __init__(self): + self.buf=[] + self.files=[] + def startElement(self,name,attrs): + if name=='file': + self.buf=[] + def endElement(self,name): + if name=='file': + self.files.append(str(''.join(self.buf))) + def characters(self,cars): + self.buf.append(cars) +@extension(*EXT_RCC) +def create_rcc_task(self,node): + rcnode=node.change_ext('_rc.cpp') + self.create_task('rcc',node,rcnode) + cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) + try: + self.compiled_tasks.append(cpptask) + except AttributeError: + self.compiled_tasks=[cpptask] + return cpptask +@extension(*EXT_UI) +def create_uic_task(self,node): + uictask=self.create_task('ui5',node) + uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] +@extension('.ts') +def add_lang(self,node): + self.lang=self.to_list(getattr(self,'lang',[]))+[node] +@feature('qt5') +@after_method('apply_link') +def apply_qt5(self): + if getattr(self,'lang',None): + qmtasks=[] + for x in self.to_list(self.lang): + if isinstance(x,str): + x=self.path.find_resource(x+'.ts') + qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) + if getattr(self,'update',None)and Options.options.trans_qt5: + cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] + for x in qmtasks: + self.create_task('trans_update',cxxnodes,x.inputs) + if getattr(self,'langname',None): + qmnodes=[x.outputs[0]for x in qmtasks] + rcnode=self.langname + if isinstance(rcnode,str): + rcnode=self.path.find_or_declare(rcnode+'.qrc') + t=self.create_task('qm2rcc',qmnodes,rcnode) + k=create_rcc_task(self,t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + lst=[] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag)<2:continue + f=flag[0:2] + if f in('-D','-I','/D','/I'): + if(f[0]=='/'): + lst.append('-'+flag[1:]) + else: + lst.append(flag) + self.env.append_value('MOC_FLAGS',lst) +@extension(*EXT_QT5) +def cxx_hook(self,node): + return self.create_compiled_task('qxx',node) +class rcc(Task.Task): + color='BLUE' + run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out=['.h'] + def rcname(self): + return os.path.splitext(self.inputs[0].name)[0] + def scan(self): + if not has_xml: + Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + return([],[]) + parser=make_parser() + curHandler=XMLHandler() + parser.setContentHandler(curHandler) + fi=open(self.inputs[0].abspath(),'r') + try: + parser.parse(fi) + finally: + fi.close() + nodes=[] + names=[] + root=self.inputs[0].parent + for x in curHandler.files: + nd=root.find_resource(x) + if nd:nodes.append(nd) + else:names.append(x) + return(nodes,names) +class moc(Task.Task): + color='BLUE' + run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' +class ui5(Task.Task): + color='BLUE' + run_str='${QT_UIC} ${SRC} -o ${TGT}' + ext_out=['.h'] +class ts2qm(Task.Task): + color='BLUE' + run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' +class qm2rcc(Task.Task): + color='BLUE' + after='ts2qm' + def run(self): + txt='\n'.join(['%s'%k.path_from(self.outputs[0].parent)for k in self.inputs]) + code='\n\n%s\n\n'%txt + self.outputs[0].write(code) +def configure(self): + self.find_qt5_binaries() + self.set_qt5_libs_to_check() + self.set_qt5_defines() + self.find_qt5_libraries() + self.add_qt5_rpath() + self.simplify_qt5_libs() +@conf +def find_qt5_binaries(self): + env=self.env + opt=Options.options + qtdir=getattr(opt,'qtdir','') + qtbin=getattr(opt,'qtbin','') + paths=[] + if qtdir: + qtbin=os.path.join(qtdir,'bin') + if not qtdir: + qtdir=os.environ.get('QT5_ROOT','') + qtbin=os.environ.get('QT5_BIN',None)or os.path.join(qtdir,'bin') + if qtbin: + paths=[qtbin] + if not qtdir: + paths=os.environ.get('PATH','').split(os.pathsep) + paths.append('/usr/share/qt5/bin/') + try: + lst=Utils.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + qtdir='/usr/local/Trolltech/%s/'%lst[0] + qtbin=os.path.join(qtdir,'bin') + paths.append(qtbin) + cand=None + prev_ver=['5','0','0'] + for qmk in('qmake-qt5','qmake5','qmake'): + try: + qmake=self.find_program(qmk,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + try: + version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip() + except self.errors.WafError: + pass + else: + if version: + new_ver=version.split('.') + if new_ver>prev_ver: + cand=qmake + prev_ver=new_ver + if not cand: + try: + self.find_program('qtchooser') + except self.errors.ConfigurationError: + pass + else: + cmd=self.env.QTCHOOSER+['-qt=5','-run-tool=qmake'] + try: + version=self.cmd_and_log(cmd+['-query','QT_VERSION']) + except self.errors.WafError: + pass + else: + cand=cmd + if cand: + self.env.QMAKE=cand + else: + self.fatal('Could not find qmake for qt5') + self.env.QT_INSTALL_BINS=qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep + paths.insert(0,qtbin) + def find_bin(lst,var): + if var in env: + return + for f in lst: + try: + ret=self.find_program(f,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + env[var]=ret + break + find_bin(['uic-qt5','uic'],'QT_UIC') + if not env.QT_UIC: + self.fatal('cannot find the uic compiler for qt5') + self.start_msg('Checking for uic version') + uicver=self.cmd_and_log(env.QT_UIC+['-version'],output=Context.BOTH) + uicver=''.join(uicver).strip() + uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') + self.end_msg(uicver) + if uicver.find(' 3.')!=-1 or uicver.find(' 4.')!=-1: + self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path') + find_bin(['moc-qt5','moc'],'QT_MOC') + find_bin(['rcc-qt5','rcc'],'QT_RCC') + find_bin(['lrelease-qt5','lrelease'],'QT_LRELEASE') + find_bin(['lupdate-qt5','lupdate'],'QT_LUPDATE') + env['UIC_ST']='%s -o %s' + env['MOC_ST']='-o' + env['ui_PATTERN']='ui_%s.h' + env['QT_LRELEASE_FLAGS']=['-silent'] + env.MOCCPPPATH_ST='-I%s' + env.MOCDEFINES_ST='-D%s' +@conf +def find_qt5_libraries(self): + qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT5_LIBDIR",None) + if not qtlibs: + try: + qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip() + except Errors.WafError: + qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep + qtlibs=os.path.join(qtdir,'lib') + self.msg('Found the Qt5 libraries in',qtlibs) + qtincludes=os.environ.get("QT5_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip() + env=self.env + if not'PKG_CONFIG_PATH'in os.environ: + os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib'%(qtlibs,qtlibs) + try: + if os.environ.get("QT5_XCOMPILE",None): + raise self.errors.ConfigurationError() + self.check_cfg(atleast_pkgconfig_version='0.1') + except self.errors.ConfigurationError: + for i in self.qt5_vars: + uselib=i.upper() + if Utils.unversioned_sys_platform()=="darwin": + frameworkName=i+".framework" + qtDynamicLib=os.path.join(qtlibs,frameworkName,i) + if os.path.exists(qtDynamicLib): + env.append_unique('FRAMEWORK_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) + elif env.DEST_OS!="win32": + qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") + qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") + if os.path.exists(qtDynamicLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + elif os.path.exists(qtStaticLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtStaticLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for k in("lib%s.a","lib%s5.a","%s.lib","%s5.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i.replace('Qt5','Qt'))) + uselib=i.upper()+"_debug" + for k in("lib%sd.a","lib%sd5.a","%sd.lib","%sd5.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i.replace('Qt5','Qt'))) + else: + for i in self.qt5_vars_debug+self.qt5_vars: + self.check_cfg(package=i,args='--cflags --libs',mandatory=False) +@conf +def simplify_qt5_libs(self): + env=self.env + def process_lib(vars_,coreval): + for d in vars_: + var=d.upper() + if var=='QTCORE': + continue + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if lib in core: + continue + accu.append(lib) + env['LIBPATH_'+var]=accu + process_lib(self.qt5_vars,'LIBPATH_QTCORE') + process_lib(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def add_qt5_rpath(self): + env=self.env + if getattr(Options.options,'want_rpath',False): + def process_rpath(vars_,coreval): + for d in vars_: + var=d.upper() + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if var!='QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var]=accu + process_rpath(self.qt5_vars,'LIBPATH_QTCORE') + process_rpath(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def set_qt5_libs_to_check(self): + if not hasattr(self,'qt5_vars'): + self.qt5_vars=QT5_LIBS + self.qt5_vars=Utils.to_list(self.qt5_vars) + if not hasattr(self,'qt5_vars_debug'): + self.qt5_vars_debug=[a+'_debug'for a in self.qt5_vars] + self.qt5_vars_debug=Utils.to_list(self.qt5_vars_debug) +@conf +def set_qt5_defines(self): + if sys.platform!='win32': + return + for x in self.qt5_vars: + y=x.replace('Qt5','Qt')[2:].upper() + self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) + self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) +def options(opt): + opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') + opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') + for i in'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i,type='string',default='',dest=i) + opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt5",default=False) diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ruby.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ruby.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ruby.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/ruby.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,101 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib import Options,Utils,Task +from waflib.TaskGen import before_method,feature,extension +from waflib.Configure import conf +@feature('rubyext') +@before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link') +def init_rubyext(self): + self.install_path='${ARCHDIR_RUBY}' + self.uselib=self.to_list(getattr(self,'uselib','')) + if not'RUBY'in self.uselib: + self.uselib.append('RUBY') + if not'RUBYEXT'in self.uselib: + self.uselib.append('RUBYEXT') +@feature('rubyext') +@before_method('apply_link','propagate_uselib') +def apply_ruby_so_name(self): + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN'] +@conf +def check_ruby_version(self,minver=()): + if Options.options.rubybinary: + self.env.RUBY=Options.options.rubybinary + else: + self.find_program('ruby',var='RUBY') + ruby=self.env.RUBY + try: + version=self.cmd_and_log(ruby+['-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() + except Exception: + self.fatal('could not determine ruby version') + self.env.RUBY_VERSION=version + try: + ver=tuple(map(int,version.split("."))) + except Exception: + self.fatal('unsupported ruby version %r'%version) + cver='' + if minver: + cver='> '+'.'.join(str(x)for x in minver) + if ver=(1,9,0): + ruby_hdrdir=read_config('rubyhdrdir') + cpppath+=ruby_hdrdir + if version>=(2,0,0): + cpppath+=read_config('rubyarchhdrdir') + cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])] + self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file',link_header_test=False) + self.env.LIBPATH_RUBYEXT=read_config('libdir') + self.env.LIBPATH_RUBYEXT+=archdir + self.env.INCLUDES_RUBYEXT=cpppath + self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS') + self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0] + flags=read_config('LDSHARED') + while flags and flags[0][0]!='-': + flags=flags[1:] + if len(flags)>1 and flags[1]=="ppc": + flags=flags[2:] + self.env.LINKFLAGS_RUBYEXT=flags + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS') + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED') + if Options.options.rubyarchdir: + self.env.ARCHDIR_RUBY=Options.options.rubyarchdir + else: + self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0] + if Options.options.rubylibdir: + self.env.LIBDIR_RUBY=Options.options.rubylibdir + else: + self.env.LIBDIR_RUBY=read_config('sitelibdir')[0] +@conf +def check_ruby_module(self,module_name): + self.start_msg('Ruby module %s'%module_name) + try: + self.cmd_and_log(self.env.RUBY+['-e','require \'%s\';puts 1'%module_name]) + except Exception: + self.end_msg(False) + self.fatal('Could not find the ruby module %r'%module_name) + self.end_msg(True) +@extension('.rb') +def process(self,node): + return self.create_task('run_ruby',node) +class run_ruby(Task.Task): + run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' +def options(opt): + opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files') + opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path') + opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,46 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_scc(conf): + v=conf.env + cc=conf.find_program('cc',var='CC') + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v.CC_NAME='sun' + conf.get_suncc_version(cc) +@conf +def scc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-xcode=pic32','-DPIC'] + v['LINKFLAGS_cshlib']=['-G'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_scc() + conf.find_ar() + conf.scc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncxx.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncxx.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncxx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/suncxx.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,46 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_sxx(conf): + v=conf.env + cc=conf.find_program(['CC','c++'],var='CXX') + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v.CXX_NAME='sun' + conf.get_suncc_version(cc) +@conf +def sxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-xcode=pic32','-DPIC'] + v['LINKFLAGS_cxxshlib']=['-G'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=['-Bstatic'] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_sxx() + conf.find_ar() + conf.sxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/tex.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/tex.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/tex.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/tex.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,317 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Utils,Task,Errors,Logs,Node +from waflib.TaskGen import feature,before_method +re_bibunit=re.compile(r'\\(?Pputbib)\[(?P[^\[\]]*)\]',re.M) +def bibunitscan(self): + node=self.inputs[0] + nodes=[] + if not node:return nodes + code=node.read() + for match in re_bibunit.finditer(code): + path=match.group('file') + if path: + for k in('','.bib'): + Logs.debug('tex: trying %s%s'%(path,k)) + fi=node.parent.find_resource(path+k) + if fi: + nodes.append(fi) + else: + Logs.debug('tex: could not find %s'%path) + Logs.debug("tex: found the following bibunit files: %s"%nodes) + return nodes +exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps','.sty'] +exts_tex=['.ltx','.tex'] +re_tex=re.compile(r'\\(?Pusepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P[^{}]*)}',re.M) +g_bibtex_re=re.compile('bibdata',re.M) +g_glossaries_re=re.compile('\\@newglossary',re.M) +class tex(Task.Task): + bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False) + bibtex_fun.__doc__=""" + Execute the program **bibtex** + """ + makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False) + makeindex_fun.__doc__=""" + Execute the program **makeindex** + """ + makeglossaries_fun,_=Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',shell=False) + makeglossaries_fun.__doc__=""" + Execute the program **makeglossaries** + """ + def exec_command(self,cmd,**kw): + bld=self.generator.bld + Logs.info('runner: %r'%cmd) + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + return Utils.subprocess.Popen(cmd,**kw).wait() + def scan_aux(self,node): + nodes=[node] + re_aux=re.compile(r'\\@input{(?P[^{}]*)}',re.M) + def parse_node(node): + code=node.read() + for match in re_aux.finditer(code): + path=match.group('file') + found=node.parent.find_or_declare(path) + if found and found not in nodes: + Logs.debug('tex: found aux node '+found.abspath()) + nodes.append(found) + parse_node(found) + parse_node(node) + return nodes + def scan(self): + node=self.inputs[0] + nodes=[] + names=[] + seen=[] + if not node:return(nodes,names) + def parse_node(node): + if node in seen: + return + seen.append(node) + code=node.read() + global re_tex + for match in re_tex.finditer(code): + multibib=match.group('type') + if multibib and multibib.startswith('bibliography'): + multibib=multibib[len('bibliography'):] + if multibib.startswith('style'): + continue + else: + multibib=None + for path in match.group('file').split(','): + if path: + add_name=True + found=None + for k in exts_deps_tex: + for up in self.texinputs_nodes: + Logs.debug('tex: trying %s%s'%(path,k)) + found=up.find_resource(path+k) + if found: + break + for tsk in self.generator.tasks: + if not found or found in tsk.outputs: + break + else: + nodes.append(found) + add_name=False + for ext in exts_tex: + if found.name.endswith(ext): + parse_node(found) + break + if found and multibib and found.name.endswith('.bib'): + try: + self.multibibs.append(found) + except AttributeError: + self.multibibs=[found] + if add_name: + names.append(path) + parse_node(node) + for x in nodes: + x.parent.get_bld().mkdir() + Logs.debug("tex: found the following : %s and names %s"%(nodes,names)) + return(nodes,names) + def check_status(self,msg,retcode): + if retcode!=0: + raise Errors.WafError("%r command exit status %r"%(msg,retcode)) + def bibfile(self): + for aux_node in self.aux_nodes: + try: + ct=aux_node.read() + except EnvironmentError: + Logs.error('Error reading %s: %r'%aux_node.abspath()) + continue + if g_bibtex_re.findall(ct): + Logs.info('calling bibtex') + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) + self.env.SRCFILE=aux_node.name[:-4] + self.check_status('error when calling bibtex',self.bibtex_fun()) + for node in getattr(self,'multibibs',[]): + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) + self.env.SRCFILE=node.name[:-4] + self.check_status('error when calling bibtex',self.bibtex_fun()) + def bibunits(self): + try: + bibunits=bibunitscan(self) + except OSError: + Logs.error('error bibunitscan') + else: + if bibunits: + fn=['bu'+str(i)for i in range(1,len(bibunits)+1)] + if fn: + Logs.info('calling bibtex on bibunits') + for f in fn: + self.env.env={'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()} + self.env.SRCFILE=f + self.check_status('error when calling bibtex',self.bibtex_fun()) + def makeindex(self): + self.idx_node=self.inputs[0].change_ext('.idx') + try: + idx_path=self.idx_node.abspath() + os.stat(idx_path) + except OSError: + Logs.info('index file %s absent, not calling makeindex'%idx_path) + else: + Logs.info('calling makeindex') + self.env.SRCFILE=self.idx_node.name + self.env.env={} + self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun()) + def bibtopic(self): + p=self.inputs[0].parent.get_bld() + if os.path.exists(os.path.join(p.abspath(),'btaux.aux')): + self.aux_nodes+=p.ant_glob('*[0-9].aux') + def makeglossaries(self): + src_file=self.inputs[0].abspath() + base_file=os.path.basename(src_file) + base,_=os.path.splitext(base_file) + for aux_node in self.aux_nodes: + try: + ct=aux_node.read() + except EnvironmentError: + Logs.error('Error reading %s: %r'%aux_node.abspath()) + continue + if g_glossaries_re.findall(ct): + if not self.env.MAKEGLOSSARIES: + raise Errors.WafError("The program 'makeglossaries' is missing!") + Logs.warn('calling makeglossaries') + self.env.SRCFILE=base + self.check_status('error when calling makeglossaries %s'%base,self.makeglossaries_fun()) + return + def texinputs(self): + return os.pathsep.join([k.abspath()for k in self.texinputs_nodes])+os.pathsep + def run(self): + env=self.env + if not env['PROMPT_LATEX']: + env.append_value('LATEXFLAGS','-interaction=batchmode') + env.append_value('PDFLATEXFLAGS','-interaction=batchmode') + env.append_value('XELATEXFLAGS','-interaction=batchmode') + self.cwd=self.inputs[0].parent.get_bld().abspath() + Logs.info('first pass on %s'%self.__class__.__name__) + cur_hash=self.hash_aux_nodes() + self.call_latex() + self.hash_aux_nodes() + self.bibtopic() + self.bibfile() + self.bibunits() + self.makeindex() + self.makeglossaries() + for i in range(10): + prev_hash=cur_hash + cur_hash=self.hash_aux_nodes() + if not cur_hash: + Logs.error('No aux.h to process') + if cur_hash and cur_hash==prev_hash: + break + Logs.info('calling %s'%self.__class__.__name__) + self.call_latex() + def hash_aux_nodes(self): + try: + self.aux_nodes + except AttributeError: + try: + self.aux_nodes=self.scan_aux(self.inputs[0].change_ext('.aux')) + except IOError: + return None + return Utils.h_list([Utils.h_file(x.abspath())for x in self.aux_nodes]) + def call_latex(self): + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'TEXINPUTS':self.texinputs()}) + self.env.SRCFILE=self.inputs[0].abspath() + self.check_status('error when calling latex',self.texfun()) +class latex(tex): + texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False) +class pdflatex(tex): + texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False) +class xelatex(tex): + texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False) +class dvips(Task.Task): + run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class dvipdf(Task.Task): + run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class pdf2ps(Task.Task): + run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +@feature('tex') +@before_method('process_source') +def apply_tex(self): + if not getattr(self,'type',None)in('latex','pdflatex','xelatex'): + self.type='pdflatex' + outs=Utils.to_list(getattr(self,'outs',[])) + self.env['PROMPT_LATEX']=getattr(self,'prompt',1) + deps_lst=[] + if getattr(self,'deps',None): + deps=self.to_list(self.deps) + for dep in deps: + if isinstance(dep,str): + n=self.path.find_resource(dep) + if not n: + self.bld.fatal('Could not find %r for %r'%(dep,self)) + if not n in deps_lst: + deps_lst.append(n) + elif isinstance(dep,Node.Node): + deps_lst.append(dep) + for node in self.to_nodes(self.source): + if self.type=='latex': + task=self.create_task('latex',node,node.change_ext('.dvi')) + elif self.type=='pdflatex': + task=self.create_task('pdflatex',node,node.change_ext('.pdf')) + elif self.type=='xelatex': + task=self.create_task('xelatex',node,node.change_ext('.pdf')) + task.env=self.env + if deps_lst: + for n in deps_lst: + if not n in task.dep_nodes: + task.dep_nodes.append(n) + if hasattr(self,'texinputs_nodes'): + task.texinputs_nodes=self.texinputs_nodes + else: + task.texinputs_nodes=[node.parent,node.parent.get_bld(),self.path,self.path.get_bld()] + lst=os.environ.get('TEXINPUTS','') + if self.env.TEXINPUTS: + lst+=os.pathsep+self.env.TEXINPUTS + if lst: + lst=lst.split(os.pathsep) + for x in lst: + if x: + if os.path.isabs(x): + p=self.bld.root.find_node(x) + if p: + task.texinputs_nodes.append(p) + else: + Logs.error('Invalid TEXINPUTS folder %s'%x) + else: + Logs.error('Cannot resolve relative paths in TEXINPUTS %s'%x) + if self.type=='latex': + if'ps'in outs: + tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps')) + tsk.env.env=dict(os.environ) + if'pdf'in outs: + tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf')) + tsk.env.env=dict(os.environ) + elif self.type=='pdflatex': + if'ps'in outs: + self.create_task('pdf2ps',task.outputs,node.change_ext('.ps')) + self.source=[] +def configure(self): + v=self.env + for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split(): + try: + self.find_program(p,var=p.upper()) + except self.errors.ConfigurationError: + pass + v['DVIPSFLAGS']='-Ppdf' diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/vala.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/vala.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/vala.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/vala.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,212 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Context,Task,Utils,Logs,Options,Errors,Node +from waflib.TaskGen import extension,taskgen_method +from waflib.Configure import conf +class valac(Task.Task): + vars=["VALAC","VALAC_VERSION","VALAFLAGS"] + ext_out=['.h'] + def run(self): + cmd=self.env.VALAC+self.env.VALAFLAGS + resources=getattr(self,'vala_exclude',[]) + cmd.extend([a.abspath()for a in self.inputs if a not in resources]) + ret=self.exec_command(cmd,cwd=self.vala_dir_node.abspath()) + if ret: + return ret + if self.generator.dump_deps_node: + self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) + return ret +valac=Task.update_outputs(valac) +@taskgen_method +def init_vala_task(self): + self.profile=getattr(self,'profile','gobject') + if self.profile=='gobject': + self.uselib=Utils.to_list(getattr(self,'uselib',[])) + if not'GOBJECT'in self.uselib: + self.uselib.append('GOBJECT') + def addflags(flags): + self.env.append_value('VALAFLAGS',flags) + if self.profile: + addflags('--profile=%s'%self.profile) + valatask=self.valatask + if hasattr(self,'vala_dir'): + if isinstance(self.vala_dir,str): + valatask.vala_dir_node=self.path.get_bld().make_node(self.vala_dir) + try: + valatask.vala_dir_node.mkdir() + except OSError: + raise self.bld.fatal('Cannot create the vala dir %r'%valatask.vala_dir_node) + else: + valatask.vala_dir_node=self.vala_dir + else: + valatask.vala_dir_node=self.path.get_bld() + addflags('--directory=%s'%valatask.vala_dir_node.abspath()) + if hasattr(self,'thread'): + if self.profile=='gobject': + if not'GTHREAD'in self.uselib: + self.uselib.append('GTHREAD') + else: + Logs.warn("Profile %s means no threading support"%self.profile) + self.thread=False + if self.thread: + addflags('--thread') + self.is_lib='cprogram'not in self.features + if self.is_lib: + addflags('--library=%s'%self.target) + h_node=valatask.vala_dir_node.find_or_declare('%s.h'%self.target) + valatask.outputs.append(h_node) + addflags('--header=%s'%h_node.name) + valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi'%self.target)) + if getattr(self,'gir',None): + gir_node=valatask.vala_dir_node.find_or_declare('%s.gir'%self.gir) + addflags('--gir=%s'%gir_node.name) + valatask.outputs.append(gir_node) + self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None)) + if self.vala_target_glib: + addflags('--target-glib=%s'%self.vala_target_glib) + addflags(['--define=%s'%x for x in Utils.to_list(getattr(self,'vala_defines',[]))]) + packages_private=Utils.to_list(getattr(self,'packages_private',[])) + addflags(['--pkg=%s'%x for x in packages_private]) + def _get_api_version(): + api_version='1.0' + if hasattr(Context.g_module,'API_VERSION'): + version=Context.g_module.API_VERSION.split(".") + if version[0]=="0": + api_version="0."+version[1] + else: + api_version=version[0]+".0" + return api_version + self.includes=Utils.to_list(getattr(self,'includes',[])) + self.uselib=self.to_list(getattr(self,'uselib',[])) + valatask.install_path=getattr(self,'install_path','') + valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi') + valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE']) + valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version())) + valatask.install_binding=getattr(self,'install_binding',True) + self.packages=packages=Utils.to_list(getattr(self,'packages',[])) + self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[])) + if hasattr(self,'use'): + local_packages=Utils.to_list(self.use)[:] + seen=[] + while len(local_packages)>0: + package=local_packages.pop() + if package in seen: + continue + seen.append(package) + try: + package_obj=self.bld.get_tgen_by_name(package) + except Errors.WafError: + continue + package_name=package_obj.target + for task in package_obj.tasks: + for output in task.outputs: + if output.name==package_name+".vapi": + valatask.set_run_after(task) + if package_name not in packages: + packages.append(package_name) + if output.parent not in vapi_dirs: + vapi_dirs.append(output.parent) + if output.parent not in self.includes: + self.includes.append(output.parent) + if hasattr(package_obj,'use'): + lst=self.to_list(package_obj.use) + lst.reverse() + local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages + addflags(['--pkg=%s'%p for p in packages]) + for vapi_dir in vapi_dirs: + if isinstance(vapi_dir,Node.Node): + v_node=vapi_dir + else: + v_node=self.path.find_dir(vapi_dir) + if not v_node: + Logs.warn('Unable to locate Vala API directory: %r'%vapi_dir) + else: + addflags('--vapidir=%s'%v_node.abspath()) + self.dump_deps_node=None + if self.is_lib and self.packages: + self.dump_deps_node=valatask.vala_dir_node.find_or_declare('%s.deps'%self.target) + valatask.outputs.append(self.dump_deps_node) + self.includes.append(self.bld.srcnode.abspath()) + self.includes.append(self.bld.bldnode.abspath()) + if self.is_lib and valatask.install_binding: + headers_list=[o for o in valatask.outputs if o.suffix()==".h"] + try: + self.install_vheader.source=headers_list + except AttributeError: + self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env) + vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))] + try: + self.install_vapi.source=vapi_list + except AttributeError: + self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env) + gir_list=[o for o in valatask.outputs if o.suffix()=='.gir'] + try: + self.install_gir.source=gir_list + except AttributeError: + self.install_gir=self.bld.install_files(getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),gir_list,self.env) + if hasattr(self,'vala_resources'): + nodes=self.to_nodes(self.vala_resources) + valatask.vala_exclude=getattr(valatask,'vala_exclude',[])+nodes + valatask.inputs.extend(nodes) + for x in nodes: + addflags(['--gresources',x.abspath()]) +@extension('.vala','.gs') +def vala_file(self,node): + try: + valatask=self.valatask + except AttributeError: + valatask=self.valatask=self.create_task('valac') + self.init_vala_task() + valatask.inputs.append(node) + name=node.name[:node.name.rfind('.')]+'.c' + c_node=valatask.vala_dir_node.find_or_declare(name) + valatask.outputs.append(c_node) + self.source.append(c_node) +@conf +def find_valac(self,valac_name,min_version): + valac=self.find_program(valac_name,var='VALAC') + try: + output=self.cmd_and_log(valac+['--version']) + except Exception: + valac_version=None + else: + ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.') + valac_version=tuple([int(x)for x in ver]) + self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version) + if valac and valac_version= %r"%(valac_name,valac_version,min_version)) + self.env['VALAC_VERSION']=valac_version + return valac +@conf +def check_vala(self,min_version=(0,8,0),branch=None): + if not branch: + branch=min_version[:2] + try: + find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version) + except self.errors.ConfigurationError: + find_valac(self,'valac',min_version) +@conf +def check_vala_deps(self): + if not self.env['HAVE_GOBJECT']: + pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) + if not self.env['HAVE_GTHREAD']: + pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) +def configure(self): + self.load('gnu_dirs') + self.check_vala_deps() + self.check_vala() + self.add_os_flags('VALAFLAGS') + self.env.append_unique('VALAFLAGS',['-C']) +def options(opt): + opt.load('gnu_dirs') + valaopts=opt.add_option_group('Vala Compiler Options') + valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/waf_unit_test.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/waf_unit_test.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/waf_unit_test.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/waf_unit_test.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,106 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib.TaskGen import feature,after_method,taskgen_method +from waflib import Utils,Task,Logs,Options +testlock=Utils.threading.Lock() +@feature('test') +@after_method('apply_link') +def make_test(self): + if getattr(self,'link_task',None): + self.create_task('utest',self.link_task.outputs) +@taskgen_method +def add_test_results(self,tup): + Logs.debug("ut: %r",tup) + self.utest_result=tup + try: + self.bld.utest_results.append(tup) + except AttributeError: + self.bld.utest_results=[tup] +class utest(Task.Task): + color='PINK' + after=['vnum','inst'] + vars=[] + def runnable_status(self): + if getattr(Options.options,'no_tests',False): + return Task.SKIP_ME + ret=super(utest,self).runnable_status() + if ret==Task.SKIP_ME: + if getattr(Options.options,'all_tests',False): + return Task.RUN_ME + return ret + def add_path(self,dct,path,var): + dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')]) + def get_test_env(self): + try: + fu=getattr(self.generator.bld,'all_test_paths') + except AttributeError: + fu=os.environ.copy() + lst=[] + for g in self.generator.bld.groups: + for tg in g: + if getattr(tg,'link_task',None): + s=tg.link_task.outputs[0].parent.abspath() + if s not in lst: + lst.append(s) + if Utils.is_win32: + self.add_path(fu,lst,'PATH') + elif Utils.unversioned_sys_platform()=='darwin': + self.add_path(fu,lst,'DYLD_LIBRARY_PATH') + self.add_path(fu,lst,'LD_LIBRARY_PATH') + else: + self.add_path(fu,lst,'LD_LIBRARY_PATH') + self.generator.bld.all_test_paths=fu + return fu + def run(self): + filename=self.inputs[0].abspath() + self.ut_exec=getattr(self.generator,'ut_exec',[filename]) + if getattr(self.generator,'ut_fun',None): + self.generator.ut_fun(self) + cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath() + testcmd=getattr(self.generator,'ut_cmd',False)or getattr(Options.options,'testcmd',False) + if testcmd: + self.ut_exec=(testcmd%" ".join(self.ut_exec)).split(' ') + proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=self.get_test_env(),stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE) + (stdout,stderr)=proc.communicate() + self.waf_unit_test_results=tup=(filename,proc.returncode,stdout,stderr) + testlock.acquire() + try: + return self.generator.add_test_results(tup) + finally: + testlock.release() + def post_run(self): + super(utest,self).post_run() + if getattr(Options.options,'clear_failed_tests',False)and self.waf_unit_test_results[1]: + self.generator.bld.task_sigs[self.uid()]=None +def summary(bld): + lst=getattr(bld,'utest_results',[]) + if lst: + Logs.pprint('CYAN','execution summary') + total=len(lst) + tfail=len([x for x in lst if x[1]]) + Logs.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total)) + for(f,code,out,err)in lst: + if not code: + Logs.pprint('CYAN',' %s'%f) + Logs.pprint('CYAN',' tests that fail %d/%d'%(tfail,total)) + for(f,code,out,err)in lst: + if code: + Logs.pprint('CYAN',' %s'%f) +def set_exit_code(bld): + lst=getattr(bld,'utest_results',[]) + for(f,code,out,err)in lst: + if code: + msg=[] + if out: + msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8'))) + if err: + msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8'))) + bld.fatal(os.linesep.join(msg)) +def options(opt): + opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests') + opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests') + opt.add_option('--clear-failed',action='store_true',default=False,help='Force failed unit tests to run again next time',dest='clear_failed_tests') + opt.add_option('--testcmd',action='store',default=False,help='Run the unit tests using the test-cmd string'' example "--test-cmd="valgrind --error-exitcode=1'' %s" to run under valgrind',dest='testcmd') diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/winres.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/winres.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/winres.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/winres.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re,traceback +from waflib import Task,Logs,Utils +from waflib.TaskGen import extension +from waflib.Tools import c_preproc +@extension('.rc') +def rc_file(self,node): + obj_ext='.rc.o' + if self.env['WINRC_TGT_F']=='/fo': + obj_ext='.res' + rctask=self.create_task('winrc',node,node.change_ext(obj_ext)) + try: + self.compiled_tasks.append(rctask) + except AttributeError: + self.compiled_tasks=[rctask] +re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE) +class rc_parser(c_preproc.c_parser): + def filter_comments(self,filepath): + code=Utils.readf(filepath) + if c_preproc.use_trigraphs: + for(a,b)in c_preproc.trig_def:code=code.split(a).join(b) + code=c_preproc.re_nl.sub('',code) + code=c_preproc.re_cpp.sub(c_preproc.repl,code) + ret=[] + for m in re.finditer(re_lines,code): + if m.group(2): + ret.append((m.group(2),m.group(3))) + else: + ret.append(('include',m.group(5))) + return ret + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>c_preproc.recursion_limit: + raise c_preproc.PreprocError("recursion limit exceeded") + pc=self.parse_cache + Logs.debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=self.filter_comments(filepath) + lines.append((c_preproc.POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise c_preproc.PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + Logs.error("parsing %s failed"%filepath) + traceback.print_exc() +class winrc(Task.Task): + run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' + color='BLUE' + def scan(self): + tmp=rc_parser(self.generator.includes_nodes) + tmp.start(self.inputs[0],self.env) + nodes=tmp.nodes + names=tmp.names + if Logs.verbose: + Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(self),nodes,names)) + return(nodes,names) +def configure(conf): + v=conf.env + v['WINRC_TGT_F']='-o' + v['WINRC_SRC_F']='-i' + if not conf.env.WINRC: + if v.CC_NAME=='msvc': + conf.find_program('RC',var='WINRC',path_list=v['PATH']) + v['WINRC_TGT_F']='/fo' + v['WINRC_SRC_F']='' + else: + conf.find_program('windres',var='WINRC',path_list=v['PATH']) + if not conf.env.WINRC: + conf.fatal('winrc was not found!') + v['WINRCFLAGS']=[] diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlc.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlc.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlc.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,43 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlc(conf): + cc=conf.find_program(['xlc_r','xlc'],var='CC') + conf.get_xlc_version(cc) + conf.env.CC_NAME='xlc' +@conf +def xlc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlc() + conf.find_ar() + conf.xlc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlcxx.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlcxx.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlcxx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Tools/xlcxx.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,43 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlcxx(conf): + cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX') + conf.get_xlc_version(cxx) + conf.env.CXX_NAME='xlc++' +@conf +def xlcxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cxxprogram']=['-Wl,-brtl'] + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=[] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlcxx() + conf.find_ar() + conf.xlcxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Utils.py lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Utils.py --- lilv-0.24.4~dfsg0/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Utils.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/.waf-1.8.22-985a667c86981bbd06ccfe1f94032940/waflib/Utils.py 2019-06-06 20:12:09.000000000 +0000 @@ -0,0 +1,468 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,sys,errno,traceback,inspect,re,shutil,datetime,gc,platform +import subprocess +from collections import deque,defaultdict +try: + import _winreg as winreg +except ImportError: + try: + import winreg + except ImportError: + winreg=None +from waflib import Errors +try: + from collections import UserDict +except ImportError: + from UserDict import UserDict +try: + from hashlib import md5 +except ImportError: + try: + from md5 import md5 + except ImportError: + pass +try: + import threading +except ImportError: + if not'JOBS'in os.environ: + os.environ['JOBS']='1' + class threading(object): + pass + class Lock(object): + def acquire(self): + pass + def release(self): + pass + threading.Lock=threading.Thread=Lock +else: + run_old=threading.Thread.run + def run(*args,**kwargs): + try: + run_old(*args,**kwargs) + except(KeyboardInterrupt,SystemExit): + raise + except Exception: + sys.excepthook(*sys.exc_info()) + threading.Thread.run=run +SIG_NIL='iluvcuteoverload' +O644=420 +O755=493 +rot_chr=['\\','|','/','-'] +rot_idx=0 +try: + from collections import OrderedDict as ordered_iter_dict +except ImportError: + class ordered_iter_dict(dict): + def __init__(self,*k,**kw): + self.lst=[] + dict.__init__(self,*k,**kw) + def clear(self): + dict.clear(self) + self.lst=[] + def __setitem__(self,key,value): + dict.__setitem__(self,key,value) + try: + self.lst.remove(key) + except ValueError: + pass + self.lst.append(key) + def __delitem__(self,key): + dict.__delitem__(self,key) + try: + self.lst.remove(key) + except ValueError: + pass + def __iter__(self): + for x in self.lst: + yield x + def keys(self): + return self.lst +is_win32=os.sep=='\\'or sys.platform=='win32' +def readf(fname,m='r',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + if encoding: + txt=txt.decode(encoding) + else: + txt=txt.decode() + else: + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + return txt +def writef(fname,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + f=open(fname,m) + try: + f.write(data) + finally: + f.close() +def h_file(fname): + f=open(fname,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() +def readf_win32(f,m='r',encoding='ISO8859-1'): + flags=os.O_NOINHERIT|os.O_RDONLY + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot read from %r'%f) + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + if encoding: + txt=txt.decode(encoding) + else: + txt=txt.decode() + else: + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + return txt +def writef_win32(f,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot write to %r'%f) + f=os.fdopen(fd,m) + try: + f.write(data) + finally: + f.close() +def h_file_win32(fname): + try: + fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT) + except OSError: + raise IOError('Cannot read from %r'%fname) + f=os.fdopen(fd,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() +readf_unix=readf +writef_unix=writef +h_file_unix=h_file +if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000: + readf=readf_win32 + writef=writef_win32 + h_file=h_file_win32 +try: + x=''.encode('hex') +except LookupError: + import binascii + def to_hex(s): + ret=binascii.hexlify(s) + if not isinstance(ret,str): + ret=ret.decode('utf-8') + return ret +else: + def to_hex(s): + return s.encode('hex') +to_hex.__doc__=""" +Return the hexadecimal representation of a string + +:param s: string to convert +:type s: string +""" +def listdir_win32(s): + if not s: + try: + import ctypes + except ImportError: + return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')] + else: + dlen=4 + maxdrives=26 + buf=ctypes.create_string_buffer(maxdrives*dlen) + ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf)) + return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))] + if len(s)==2 and s[1]==":": + s+=os.sep + if not os.path.isdir(s): + e=OSError('%s is not a directory'%s) + e.errno=errno.ENOENT + raise e + return os.listdir(s) +listdir=os.listdir +if is_win32: + listdir=listdir_win32 +def num2ver(ver): + if isinstance(ver,str): + ver=tuple(ver.split('.')) + if isinstance(ver,tuple): + ret=0 + for i in range(4): + if i0x3000000: + ret=ret.encode('iso8859-1','xmlcharrefreplace') + return ret +reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") +def subst_vars(expr,params): + def repl_var(m): + if m.group(1): + return'\\' + if m.group(2): + return'$' + try: + return params.get_flat(m.group(3)) + except AttributeError: + return params[m.group(3)] + return reg_subst.sub(repl_var,expr) +def destos_to_binfmt(key): + if key=='darwin': + return'mac-o' + elif key in('win32','cygwin','uwin','msys'): + return'pe' + return'elf' +def unversioned_sys_platform(): + s=sys.platform + if s.startswith('java'): + from java.lang import System + s=System.getProperty('os.name') + if s=='Mac OS X': + return'darwin' + elif s.startswith('Windows '): + return'win32' + elif s=='OS/2': + return'os2' + elif s=='HP-UX': + return'hp-ux' + elif s in('SunOS','Solaris'): + return'sunos' + else:s=s.lower() + if s=='powerpc': + return'darwin' + if s=='win32'or s=='os2': + return s + if s=='cli'and os.name=='nt': + return'win32' + return re.split('\d+$',s)[0] +def nada(*k,**kw): + pass +class Timer(object): + def __init__(self): + self.start_time=datetime.datetime.utcnow() + def __str__(self): + delta=datetime.datetime.utcnow()-self.start_time + days=delta.days + hours,rem=divmod(delta.seconds,3600) + minutes,seconds=divmod(rem,60) + seconds+=delta.microseconds*1e-6 + result='' + if days: + result+='%dd'%days + if days or hours: + result+='%dh'%hours + if days or hours or minutes: + result+='%dm'%minutes + return'%s%.3fs'%(result,seconds) +if is_win32: + old=shutil.copy2 + def copy2(src,dst): + old(src,dst) + shutil.copystat(src,dst) + setattr(shutil,'copy2',copy2) +if os.name=='java': + try: + gc.disable() + gc.enable() + except NotImplementedError: + gc.disable=gc.enable +def read_la_file(path): + sp=re.compile(r'^([^=]+)=\'(.*)\'$') + dc={} + for line in readf(path).splitlines(): + try: + _,left,right,_=sp.split(line.strip()) + dc[left]=right + except ValueError: + pass + return dc +def nogc(fun): + def f(*k,**kw): + try: + gc.disable() + ret=fun(*k,**kw) + finally: + gc.enable() + return ret + f.__doc__=fun.__doc__ + return f +def run_once(fun): + cache={} + def wrap(k): + try: + return cache[k] + except KeyError: + ret=fun(k) + cache[k]=ret + return ret + wrap.__cache__=cache + wrap.__name__=fun.__name__ + return wrap +def get_registry_app_path(key,filename): + if not winreg: + return None + try: + result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0]) + except WindowsError: + pass + else: + if os.path.isfile(result): + return result +def lib64(): + if os.sep=='/': + if platform.architecture()[0]=='64bit': + if os.path.exists('/usr/lib64')and not os.path.exists('/usr/lib32'): + return'64' + return'' +def sane_path(p): + return os.path.abspath(os.path.expanduser(p)) diff -Nru lilv-0.24.4~dfsg0/waflib/ansiterm.py lilv-0.24.6/waflib/ansiterm.py --- lilv-0.24.4~dfsg0/waflib/ansiterm.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/ansiterm.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,238 +1,342 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file -import os,re,sys +""" +Emulate a vt100 terminal in cmd.exe + +By wrapping sys.stdout / sys.stderr with Ansiterm, +the vt100 escape characters will be interpreted and +the equivalent actions will be performed with Win32 +console commands. + +""" + +import os, re, sys from waflib import Utils -wlock=Utils.threading.Lock() + +wlock = Utils.threading.Lock() + try: - from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,c_wchar,POINTER,c_long + from ctypes import Structure, windll, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long except ImportError: + class AnsiTerm(object): - def __init__(self,stream): - self.stream=stream + def __init__(self, stream): + self.stream = stream try: - self.errors=self.stream.errors + self.errors = self.stream.errors except AttributeError: - pass - self.encoding=self.stream.encoding - def write(self,txt): + pass # python 2.5 + self.encoding = self.stream.encoding + + def write(self, txt): try: wlock.acquire() self.stream.write(txt) self.stream.flush() finally: wlock.release() + def fileno(self): return self.stream.fileno() + def flush(self): self.stream.flush() + def isatty(self): return self.stream.isatty() else: + class COORD(Structure): - _fields_=[("X",c_short),("Y",c_short)] + _fields_ = [("X", c_short), ("Y", c_short)] + class SMALL_RECT(Structure): - _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)] + _fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)] + class CONSOLE_SCREEN_BUFFER_INFO(Structure): - _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_ushort),("Window",SMALL_RECT),("MaximumWindowSize",COORD)] + _fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_ushort), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)] + class CONSOLE_CURSOR_INFO(Structure): - _fields_=[('dwSize',c_ulong),('bVisible',c_int)] + _fields_ = [('dwSize', c_ulong), ('bVisible', c_int)] + try: - _type=unicode + _type = unicode except NameError: - _type=str - to_int=lambda number,default:number and int(number)or default - STD_OUTPUT_HANDLE=-11 - STD_ERROR_HANDLE=-12 - windll.kernel32.GetStdHandle.argtypes=[c_ulong] - windll.kernel32.GetStdHandle.restype=c_ulong - windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)] - windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long - windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort] - windll.kernel32.SetConsoleTextAttribute.restype=c_long - windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)] - windll.kernel32.FillConsoleOutputCharacterW.restype=c_long - windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)] - windll.kernel32.FillConsoleOutputAttribute.restype=c_long - windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)] - windll.kernel32.SetConsoleCursorPosition.restype=c_long - windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)] - windll.kernel32.SetConsoleCursorInfo.restype=c_long + _type = str + + to_int = lambda number, default: number and int(number) or default + + STD_OUTPUT_HANDLE = -11 + STD_ERROR_HANDLE = -12 + + windll.kernel32.GetStdHandle.argtypes = [c_ulong] + windll.kernel32.GetStdHandle.restype = c_ulong + windll.kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)] + windll.kernel32.GetConsoleScreenBufferInfo.restype = c_long + windll.kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort] + windll.kernel32.SetConsoleTextAttribute.restype = c_long + windll.kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputCharacterW.restype = c_long + windll.kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ] + windll.kernel32.FillConsoleOutputAttribute.restype = c_long + windll.kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ] + windll.kernel32.SetConsoleCursorPosition.restype = c_long + windll.kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)] + windll.kernel32.SetConsoleCursorInfo.restype = c_long + class AnsiTerm(object): - def __init__(self,s): - self.stream=s + """ + emulate a vt100 terminal in cmd.exe + """ + def __init__(self, s): + self.stream = s try: - self.errors=s.errors + self.errors = s.errors except AttributeError: - pass - self.encoding=s.encoding - self.cursor_history=[] - handle=(s.fileno()==2)and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE - self.hconsole=windll.kernel32.GetStdHandle(handle) - self._sbinfo=CONSOLE_SCREEN_BUFFER_INFO() - self._csinfo=CONSOLE_CURSOR_INFO() - windll.kernel32.GetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) - self._orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO() - r=windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._orig_sbinfo)) - self._isatty=r==1 + pass # python2.5 + self.encoding = s.encoding + self.cursor_history = [] + + handle = (s.fileno() == 2) and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE + self.hconsole = windll.kernel32.GetStdHandle(handle) + + self._sbinfo = CONSOLE_SCREEN_BUFFER_INFO() + + self._csinfo = CONSOLE_CURSOR_INFO() + windll.kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo)) + + # just to double check that the console is usable + self._orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO() + r = windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo)) + self._isatty = r == 1 + def screen_buffer_info(self): - windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._sbinfo)) + """ + Updates self._sbinfo and returns it + """ + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo)) return self._sbinfo - def clear_line(self,param): - mode=param and int(param)or 0 - sbinfo=self.screen_buffer_info() - if mode==1: - line_start=COORD(0,sbinfo.CursorPosition.Y) - line_length=sbinfo.Size.X - elif mode==2: - line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y) - line_length=sbinfo.Size.X-sbinfo.CursorPosition.X - else: - line_start=sbinfo.CursorPosition - line_length=sbinfo.Size.X-sbinfo.CursorPosition.X - chars_written=c_ulong() - windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written)) - windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written)) - def clear_screen(self,param): - mode=to_int(param,0) - sbinfo=self.screen_buffer_info() - if mode==1: - clear_start=COORD(0,0) - clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y - elif mode==2: - clear_start=COORD(0,0) - clear_length=sbinfo.Size.X*sbinfo.Size.Y - windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start) - else: - clear_start=sbinfo.CursorPosition - clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y)) - chars_written=c_ulong() - windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written)) - windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written)) - def push_cursor(self,param): - sbinfo=self.screen_buffer_info() + + def clear_line(self, param): + mode = param and int(param) or 0 + sbinfo = self.screen_buffer_info() + if mode == 1: # Clear from beginning of line to cursor position + line_start = COORD(0, sbinfo.CursorPosition.Y) + line_length = sbinfo.Size.X + elif mode == 2: # Clear entire line + line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y) + line_length = sbinfo.Size.X - sbinfo.CursorPosition.X + else: # Clear from cursor position to end of line + line_start = sbinfo.CursorPosition + line_length = sbinfo.Size.X - sbinfo.CursorPosition.X + chars_written = c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written)) + + def clear_screen(self, param): + mode = to_int(param, 0) + sbinfo = self.screen_buffer_info() + if mode == 1: # Clear from beginning of screen to cursor position + clear_start = COORD(0, 0) + clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y + elif mode == 2: # Clear entire screen and return cursor to home + clear_start = COORD(0, 0) + clear_length = sbinfo.Size.X * sbinfo.Size.Y + windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start) + else: # Clear from cursor position to end of screen + clear_start = sbinfo.CursorPosition + clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y)) + chars_written = c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written)) + + def push_cursor(self, param): + sbinfo = self.screen_buffer_info() self.cursor_history.append(sbinfo.CursorPosition) - def pop_cursor(self,param): + + def pop_cursor(self, param): if self.cursor_history: - old_pos=self.cursor_history.pop() - windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos) - def set_cursor(self,param): - y,sep,x=param.partition(';') - x=to_int(x,1)-1 - y=to_int(y,1)-1 - sbinfo=self.screen_buffer_info() - new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y)) - windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) - def set_column(self,param): - x=to_int(param,1)-1 - sbinfo=self.screen_buffer_info() - new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y) - windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) - def move_cursor(self,x_offset=0,y_offset=0): - sbinfo=self.screen_buffer_info() - new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y)) - windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) - def move_up(self,param): - self.move_cursor(y_offset=-to_int(param,1)) - def move_down(self,param): - self.move_cursor(y_offset=to_int(param,1)) - def move_left(self,param): - self.move_cursor(x_offset=-to_int(param,1)) - def move_right(self,param): - self.move_cursor(x_offset=to_int(param,1)) - def next_line(self,param): - sbinfo=self.screen_buffer_info() - self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1)) - def prev_line(self,param): - sbinfo=self.screen_buffer_info() - self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1)) - def rgb2bgr(self,c): - return((c&1)<<2)|(c&2)|((c&4)>>2) - def set_color(self,param): - cols=param.split(';') - sbinfo=self.screen_buffer_info() - attr=sbinfo.Attributes + old_pos = self.cursor_history.pop() + windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos) + + def set_cursor(self, param): + y, sep, x = param.partition(';') + x = to_int(x, 1) - 1 + y = to_int(y, 1) - 1 + sbinfo = self.screen_buffer_info() + new_pos = COORD( + min(max(0, x), sbinfo.Size.X), + min(max(0, y), sbinfo.Size.Y) + ) + windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos) + + def set_column(self, param): + x = to_int(param, 1) - 1 + sbinfo = self.screen_buffer_info() + new_pos = COORD( + min(max(0, x), sbinfo.Size.X), + sbinfo.CursorPosition.Y + ) + windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos) + + def move_cursor(self, x_offset=0, y_offset=0): + sbinfo = self.screen_buffer_info() + new_pos = COORD( + min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X), + min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y) + ) + windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos) + + def move_up(self, param): + self.move_cursor(y_offset = -to_int(param, 1)) + + def move_down(self, param): + self.move_cursor(y_offset = to_int(param, 1)) + + def move_left(self, param): + self.move_cursor(x_offset = -to_int(param, 1)) + + def move_right(self, param): + self.move_cursor(x_offset = to_int(param, 1)) + + def next_line(self, param): + sbinfo = self.screen_buffer_info() + self.move_cursor( + x_offset = -sbinfo.CursorPosition.X, + y_offset = to_int(param, 1) + ) + + def prev_line(self, param): + sbinfo = self.screen_buffer_info() + self.move_cursor( + x_offset = -sbinfo.CursorPosition.X, + y_offset = -to_int(param, 1) + ) + + def rgb2bgr(self, c): + return ((c&1) << 2) | (c&2) | ((c&4)>>2) + + def set_color(self, param): + cols = param.split(';') + sbinfo = self.screen_buffer_info() + attr = sbinfo.Attributes for c in cols: - c=to_int(c,0) - if 29>4)|((attr&0x07)<<4) - windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr) + c = to_int(c, 0) + if 29 < c < 38: # fgcolor + attr = (attr & 0xfff0) | self.rgb2bgr(c - 30) + elif 39 < c < 48: # bgcolor + attr = (attr & 0xff0f) | (self.rgb2bgr(c - 40) << 4) + elif c == 0: # reset + attr = self._orig_sbinfo.Attributes + elif c == 1: # strong + attr |= 0x08 + elif c == 4: # blink not available -> bg intensity + attr |= 0x80 + elif c == 7: # negative + attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4) + + windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr) + def show_cursor(self,param): - self._csinfo.bVisible=1 - windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) + self._csinfo.bVisible = 1 + windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo)) + def hide_cursor(self,param): - self._csinfo.bVisible=0 - windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) - ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,} - ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') - def write(self,text): + self._csinfo.bVisible = 0 + windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo)) + + ansi_command_table = { + 'A': move_up, + 'B': move_down, + 'C': move_right, + 'D': move_left, + 'E': next_line, + 'F': prev_line, + 'G': set_column, + 'H': set_cursor, + 'f': set_cursor, + 'J': clear_screen, + 'K': clear_line, + 'h': show_cursor, + 'l': hide_cursor, + 'm': set_color, + 's': push_cursor, + 'u': pop_cursor, + } + # Match either the escape sequence or text not containing escape sequence + ansi_tokens = re.compile(r'(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') + def write(self, text): try: wlock.acquire() if self._isatty: - for param,cmd,txt in self.ansi_tokens.findall(text): + for param, cmd, txt in self.ansi_tokens.findall(text): if cmd: - cmd_func=self.ansi_command_table.get(cmd) + cmd_func = self.ansi_command_table.get(cmd) if cmd_func: - cmd_func(self,param) + cmd_func(self, param) else: self.writeconsole(txt) else: + # no support for colors in the console, just output the text: + # eclipse or msys may be able to interpret the escape sequences self.stream.write(text) finally: wlock.release() - def writeconsole(self,txt): - chars_written=c_ulong() - writeconsole=windll.kernel32.WriteConsoleA - if isinstance(txt,_type): - writeconsole=windll.kernel32.WriteConsoleW - done=0 - todo=len(txt) - chunk=32<<10 - while todo!=0: - doing=min(chunk,todo) - buf=txt[done:done+doing] - r=writeconsole(self.hconsole,buf,doing,byref(chars_written),None) - if r==0: - chunk>>=1 + + def writeconsole(self, txt): + chars_written = c_ulong() + writeconsole = windll.kernel32.WriteConsoleA + if isinstance(txt, _type): + writeconsole = windll.kernel32.WriteConsoleW + + # MSDN says that there is a shared buffer of 64 KB for the console + # writes. Attempt to not get ERROR_NOT_ENOUGH_MEMORY, see waf issue #746 + done = 0 + todo = len(txt) + chunk = 32<<10 + while todo != 0: + doing = min(chunk, todo) + buf = txt[done:done+doing] + r = writeconsole(self.hconsole, buf, doing, byref(chars_written), None) + if r == 0: + chunk >>= 1 continue - done+=doing - todo-=doing + done += doing + todo -= doing + + def fileno(self): return self.stream.fileno() + def flush(self): pass + def isatty(self): return self._isatty - if sys.stdout.isatty()or sys.stderr.isatty(): - handle=sys.stdout.isatty()and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE - console=windll.kernel32.GetStdHandle(handle) - sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + + if sys.stdout.isatty() or sys.stderr.isatty(): + handle = sys.stdout.isatty() and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE + console = windll.kernel32.GetStdHandle(handle) + sbinfo = CONSOLE_SCREEN_BUFFER_INFO() def get_term_cols(): - windll.kernel32.GetConsoleScreenBufferInfo(console,byref(sbinfo)) - return sbinfo.Size.X-1 + windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo)) + # Issue 1401 - the progress bar cannot reach the last character + return sbinfo.Size.X - 1 + +# just try and see try: - import struct,fcntl,termios + import struct, fcntl, termios except ImportError: pass else: - if(sys.stdout.isatty()or sys.stderr.isatty())and os.environ.get('TERM','')not in('dumb','emacs'): - FD=sys.stdout.isatty()and sys.stdout.fileno()or sys.stderr.fileno() + if (sys.stdout.isatty() or sys.stderr.isatty()) and os.environ.get('TERM', '') not in ('dumb', 'emacs'): + FD = sys.stdout.isatty() and sys.stdout.fileno() or sys.stderr.fileno() def fun(): - return struct.unpack("HHHH",fcntl.ioctl(FD,termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[1] + return struct.unpack("HHHH", fcntl.ioctl(FD, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)))[1] try: fun() except Exception as e: pass else: - get_term_cols=fun + get_term_cols = fun + diff -Nru lilv-0.24.4~dfsg0/waflib/Build.py lilv-0.24.6/waflib/Build.py --- lilv-0.24.4~dfsg0/waflib/Build.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Build.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,168 +1,346 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) -import os,sys,errno,re,shutil,stat +""" +Classes related to the build phase (build, clean, install, step, etc) + +The inheritance tree is the following: + +""" + +import os, sys, errno, re, shutil, stat try: import cPickle except ImportError: import pickle as cPickle -from waflib import Node,Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors -CACHE_DIR='c4che' -CACHE_SUFFIX='_cache.py' -INSTALL=1337 -UNINSTALL=-1337 -SAVED_ATTRS='root node_sigs task_sigs imp_sigs raw_deps node_deps'.split() -CFG_FILES='cfg_files' -POST_AT_ONCE=0 -POST_LAZY=1 -PROTOCOL=-1 -if sys.platform=='cli': - PROTOCOL=0 +from waflib import Node, Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors + +CACHE_DIR = 'c4che' +"""Name of the cache directory""" + +CACHE_SUFFIX = '_cache.py' +"""ConfigSet cache files for variants are written under :py:attr:´waflib.Build.CACHE_DIR´ in the form ´variant_name´_cache.py""" + +INSTALL = 1337 +"""Positive value '->' install, see :py:attr:`waflib.Build.BuildContext.is_install`""" + +UNINSTALL = -1337 +"""Negative value '<-' uninstall, see :py:attr:`waflib.Build.BuildContext.is_install`""" + +SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split() +"""Build class members to save between the runs; these should be all dicts +except for `root` which represents a :py:class:`waflib.Node.Node` instance +""" + +CFG_FILES = 'cfg_files' +"""Files from the build directory to hash before starting the build (``config.h`` written during the configuration)""" + +POST_AT_ONCE = 0 +"""Post mode: all task generators are posted before any task executed""" + +POST_LAZY = 1 +"""Post mode: post the task generators group after group, the tasks in the next group are created when the tasks in the previous groups are done""" + +PROTOCOL = -1 +if sys.platform == 'cli': + PROTOCOL = 0 + class BuildContext(Context.Context): '''executes the build''' - cmd='build' - variant='' - def __init__(self,**kw): - super(BuildContext,self).__init__(**kw) - self.is_install=0 - self.top_dir=kw.get('top_dir',Context.top_dir) - self.out_dir=kw.get('out_dir',Context.out_dir) - self.run_dir=kw.get('run_dir',Context.run_dir) - self.launch_dir=Context.launch_dir - self.post_mode=POST_LAZY - self.cache_dir=kw.get('cache_dir') + + cmd = 'build' + variant = '' + + def __init__(self, **kw): + super(BuildContext, self).__init__(**kw) + + self.is_install = 0 + """Non-zero value when installing or uninstalling file""" + + self.top_dir = kw.get('top_dir', Context.top_dir) + """See :py:attr:`waflib.Context.top_dir`; prefer :py:attr:`waflib.Build.BuildContext.srcnode`""" + + self.out_dir = kw.get('out_dir', Context.out_dir) + """See :py:attr:`waflib.Context.out_dir`; prefer :py:attr:`waflib.Build.BuildContext.bldnode`""" + + self.run_dir = kw.get('run_dir', Context.run_dir) + """See :py:attr:`waflib.Context.run_dir`""" + + self.launch_dir = Context.launch_dir + """See :py:attr:`waflib.Context.out_dir`; prefer :py:meth:`waflib.Build.BuildContext.launch_node`""" + + self.post_mode = POST_LAZY + """Whether to post the task generators at once or group-by-group (default is group-by-group)""" + + self.cache_dir = kw.get('cache_dir') if not self.cache_dir: - self.cache_dir=os.path.join(self.out_dir,CACHE_DIR) - self.all_envs={} - self.node_sigs={} - self.task_sigs={} - self.imp_sigs={} - self.node_deps={} - self.raw_deps={} - self.task_gen_cache_names={} - self.jobs=Options.options.jobs - self.targets=Options.options.targets - self.keep=Options.options.keep - self.progress_bar=Options.options.progress_bar - self.deps_man=Utils.defaultdict(list) - self.current_group=0 - self.groups=[] - self.group_names={} + self.cache_dir = os.path.join(self.out_dir, CACHE_DIR) + + self.all_envs = {} + """Map names to :py:class:`waflib.ConfigSet.ConfigSet`, the empty string must map to the default environment""" + + # ======================================= # + # cache variables + + self.node_sigs = {} + """Dict mapping build nodes to task identifier (uid), it indicates whether a task created a particular file (persists across builds)""" + + self.task_sigs = {} + """Dict mapping task identifiers (uid) to task signatures (persists across builds)""" + + self.imp_sigs = {} + """Dict mapping task identifiers (uid) to implicit task dependencies used for scanning targets (persists across builds)""" + + self.node_deps = {} + """Dict mapping task identifiers (uid) to node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" + + self.raw_deps = {} + """Dict mapping task identifiers (uid) to custom data returned by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" + + self.task_gen_cache_names = {} + + self.jobs = Options.options.jobs + """Amount of jobs to run in parallel""" + + self.targets = Options.options.targets + """List of targets to build (default: \\*)""" + + self.keep = Options.options.keep + """Whether the build should continue past errors""" + + self.progress_bar = Options.options.progress_bar + """ + Level of progress status: + + 0. normal output + 1. progress bar + 2. IDE output + 3. No output at all + """ + + # Manual dependencies. + self.deps_man = Utils.defaultdict(list) + """Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`""" + + # just the structure here + self.current_group = 0 + """ + Current build group + """ + + self.groups = [] + """ + List containing lists of task generators + """ + + self.group_names = {} + """ + Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group` + """ + for v in SAVED_ATTRS: - if not hasattr(self,v): - setattr(self,v,{}) + if not hasattr(self, v): + setattr(self, v, {}) + def get_variant_dir(self): + """Getter for the variant_dir attribute""" if not self.variant: return self.out_dir - return os.path.join(self.out_dir,os.path.normpath(self.variant)) - variant_dir=property(get_variant_dir,None) - def __call__(self,*k,**kw): - kw['bld']=self - ret=TaskGen.task_gen(*k,**kw) - self.task_gen_cache_names={} - self.add_to_group(ret,group=kw.get('group')) + return os.path.join(self.out_dir, os.path.normpath(self.variant)) + variant_dir = property(get_variant_dir, None) + + def __call__(self, *k, **kw): + """ + Create a task generator and add it to the current build group. The following forms are equivalent:: + + def build(bld): + tg = bld(a=1, b=2) + + def build(bld): + tg = bld() + tg.a = 1 + tg.b = 2 + + def build(bld): + tg = TaskGen.task_gen(a=1, b=2) + bld.add_to_group(tg, None) + + :param group: group name to add the task generator to + :type group: string + """ + kw['bld'] = self + ret = TaskGen.task_gen(*k, **kw) + self.task_gen_cache_names = {} # reset the cache, each time + self.add_to_group(ret, group=kw.get('group')) return ret + def __copy__(self): + """ + Build contexts cannot be copied + + :raises: :py:class:`waflib.Errors.WafError` + """ raise Errors.WafError('build contexts cannot be copied') + def load_envs(self): - node=self.root.find_node(self.cache_dir) + """ + The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method + creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those + files and stores them in :py:attr:`waflib.Build.BuildContext.allenvs`. + """ + node = self.root.find_node(self.cache_dir) if not node: raise Errors.WafError('The project was not configured: run "waf configure" first!') - lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True) + lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True) + if not lst: raise Errors.WafError('The cache directory is empty: reconfigure the project') + for x in lst: - name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/') - env=ConfigSet.ConfigSet(x.abspath()) - self.all_envs[name]=env + name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/') + env = ConfigSet.ConfigSet(x.abspath()) + self.all_envs[name] = env for f in env[CFG_FILES]: - newnode=self.root.find_resource(f) + newnode = self.root.find_resource(f) if not newnode or not newnode.exists(): - raise Errors.WafError('Missing configuration file %r, reconfigure the project!'%f) + raise Errors.WafError('Missing configuration file %r, reconfigure the project!' % f) + def init_dirs(self): - if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)): + """ + Initialize the project directory and the build directory by creating the nodes + :py:attr:`waflib.Build.BuildContext.srcnode` and :py:attr:`waflib.Build.BuildContext.bldnode` + corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory is + created if necessary. + """ + if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)): raise Errors.WafError('The project was not configured: run "waf configure" first!') - self.path=self.srcnode=self.root.find_dir(self.top_dir) - self.bldnode=self.root.make_node(self.variant_dir) + + self.path = self.srcnode = self.root.find_dir(self.top_dir) + self.bldnode = self.root.make_node(self.variant_dir) self.bldnode.mkdir() + def execute(self): + """ + Restore data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`. + Overrides from :py:func:`waflib.Context.Context.execute` + """ self.restore() if not self.all_envs: self.load_envs() self.execute_build() + def execute_build(self): - Logs.info("Waf: Entering directory `%s'",self.variant_dir) + """ + Execute the build by: + + * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) + * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions + * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks + * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions + """ + + Logs.info("Waf: Entering directory `%s'", self.variant_dir) self.recurse([self.run_dir]) self.pre_build() - self.timer=Utils.Timer() + + # display the time elapsed in the progress bar + self.timer = Utils.Timer() + try: self.compile() finally: - if self.progress_bar==1 and sys.stderr.isatty(): - c=self.producer.processed or 1 - m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL) - Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on}) - Logs.info("Waf: Leaving directory `%s'",self.variant_dir) + if self.progress_bar == 1 and sys.stderr.isatty(): + c = self.producer.processed or 1 + m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) + Logs.info(m, extra={'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2' : Logs.colors.cursor_on}) + Logs.info("Waf: Leaving directory `%s'", self.variant_dir) try: - self.producer.bld=None + self.producer.bld = None del self.producer except AttributeError: pass self.post_build() + def restore(self): + """ + Load data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS` + """ try: - env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) + env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py')) except EnvironmentError: pass else: - if env.version').ljust(cols) - msg=Logs.indicator%(left,bar,right) + return '' + + n = len(str(total)) + + Utils.rot_idx += 1 + ind = Utils.rot_chr[Utils.rot_idx % 4] + + pc = (100. * idx)/total + fs = "[%%%dd/%%d][%%s%%2d%%%%%%s][%s][" % (n, ind) + left = fs % (idx, total, col1, pc, col2) + right = '][%s%s%s]' % (col1, self.timer, col2) + + cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2) + if cols < 7: + cols = 7 + + ratio = ((cols * idx)//total) - 1 + + bar = ('='*ratio+'>').ljust(cols) + msg = Logs.indicator % (left, bar, right) + return msg - def declare_chain(self,*k,**kw): - return TaskGen.declare_chain(*k,**kw) + + def declare_chain(self, *k, **kw): + """ + Wraps :py:func:`waflib.TaskGen.declare_chain` for convenience + """ + return TaskGen.declare_chain(*k, **kw) + def pre_build(self): - for m in getattr(self,'pre_funs',[]): + """Executes user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`""" + for m in getattr(self, 'pre_funs', []): m(self) + def post_build(self): - for m in getattr(self,'post_funs',[]): + """Executes user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`""" + for m in getattr(self, 'post_funs', []): m(self) - def add_pre_fun(self,meth): + + def add_pre_fun(self, meth): + """ + Binds a callback method to execute after the scripts are read and before the build starts:: + + def mycallback(bld): + print("Hello, world!") + + def build(bld): + bld.add_pre_fun(mycallback) + """ try: self.pre_funs.append(meth) except AttributeError: - self.pre_funs=[meth] - def add_post_fun(self,meth): + self.pre_funs = [meth] + + def add_post_fun(self, meth): + """ + Binds a callback method to execute immediately after the build is successful:: + + def call_ldconfig(bld): + bld.exec_command('/sbin/ldconfig') + + def build(bld): + if bld.cmd == 'install': + bld.add_pre_fun(call_ldconfig) + """ try: self.post_funs.append(meth) except AttributeError: - self.post_funs=[meth] - def get_group(self,x): + self.post_funs = [meth] + + def get_group(self, x): + """ + Returns the build group named `x`, or the current group if `x` is None + + :param x: name or number or None + :type x: string, int or None + """ if not self.groups: self.add_group() if x is None: @@ -288,490 +591,922 @@ if x in self.group_names: return self.group_names[x] return self.groups[x] - def add_to_group(self,tgen,group=None): - assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.Task)) - tgen.bld=self + + def add_to_group(self, tgen, group=None): + """Adds a task or a task generator to the build; there is no attempt to remove it if it was already added.""" + assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.Task)) + tgen.bld = self self.get_group(group).append(tgen) - def get_group_name(self,g): - if not isinstance(g,list): - g=self.groups[g] + + def get_group_name(self, g): + """ + Returns the name of the input build group + + :param g: build group object or build group index + :type g: integer or list + :return: name + :rtype: string + """ + if not isinstance(g, list): + g = self.groups[g] for x in self.group_names: - if id(self.group_names[x])==id(g): + if id(self.group_names[x]) == id(g): return x - return'' - def get_group_idx(self,tg): - se=id(tg) - for i,tmp in enumerate(self.groups): + return '' + + def get_group_idx(self, tg): + """ + Returns the index of the group containing the task generator given as argument:: + + def build(bld): + tg = bld(name='nada') + 0 == bld.get_group_idx(tg) + + :param tg: Task generator object + :type tg: :py:class:`waflib.TaskGen.task_gen` + :rtype: int + """ + se = id(tg) + for i, tmp in enumerate(self.groups): for t in tmp: - if id(t)==se: + if id(t) == se: return i return None - def add_group(self,name=None,move=True): + + def add_group(self, name=None, move=True): + """ + Adds a new group of tasks/task generators. By default the new group becomes + the default group for new task generators (make sure to create build groups in order). + + :param name: name for this group + :type name: string + :param move: set this new group as default group (True by default) + :type move: bool + :raises: :py:class:`waflib.Errors.WafError` if a group by the name given already exists + """ if name and name in self.group_names: - raise Errors.WafError('add_group: name %s already present',name) - g=[] - self.group_names[name]=g + raise Errors.WafError('add_group: name %s already present', name) + g = [] + self.group_names[name] = g self.groups.append(g) if move: - self.current_group=len(self.groups)-1 - def set_group(self,idx): - if isinstance(idx,str): - g=self.group_names[idx] - for i,tmp in enumerate(self.groups): - if id(g)==id(tmp): - self.current_group=i + self.current_group = len(self.groups) - 1 + + def set_group(self, idx): + """ + Sets the build group at position idx as current so that newly added + task generators are added to this one by default:: + + def build(bld): + bld(rule='touch ${TGT}', target='foo.txt') + bld.add_group() # now the current group is 1 + bld(rule='touch ${TGT}', target='bar.txt') + bld.set_group(0) # now the current group is 0 + bld(rule='touch ${TGT}', target='truc.txt') # build truc.txt before bar.txt + + :param idx: group name or group index + :type idx: string or int + """ + if isinstance(idx, str): + g = self.group_names[idx] + for i, tmp in enumerate(self.groups): + if id(g) == id(tmp): + self.current_group = i break else: - self.current_group=idx + self.current_group = idx + def total(self): - total=0 + """ + Approximate task count: this value may be inaccurate if task generators + are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`). + The value :py:attr:`waflib.Runner.Parallel.total` is updated during the task execution. + + :rtype: int + """ + total = 0 for group in self.groups: for tg in group: try: - total+=len(tg.tasks) + total += len(tg.tasks) except AttributeError: - total+=1 + total += 1 return total + def get_targets(self): - to_post=[] - min_grp=0 + """ + This method returns a pair containing the index of the last build group to post, + and the list of task generator objects corresponding to the target names. + + This is used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator` + to perform partial builds:: + + $ waf --targets=myprogram,myshlib + + :return: the minimum build group index, and list of task generators + :rtype: tuple + """ + to_post = [] + min_grp = 0 for name in self.targets.split(','): - tg=self.get_tgen_by_name(name) - m=self.get_group_idx(tg) - if m>min_grp: - min_grp=m - to_post=[tg] - elif m==min_grp: + tg = self.get_tgen_by_name(name) + m = self.get_group_idx(tg) + if m > min_grp: + min_grp = m + to_post = [tg] + elif m == min_grp: to_post.append(tg) - return(min_grp,to_post) + return (min_grp, to_post) + def get_all_task_gen(self): - lst=[] + """ + Returns a list of all task generators for troubleshooting purposes. + """ + lst = [] for g in self.groups: lst.extend(g) return lst + def post_group(self): + """ + Post task generators from the group indexed by self.current_group; used internally + by :py:meth:`waflib.Build.BuildContext.get_build_iterator` + """ def tgpost(tg): try: - f=tg.post + f = tg.post except AttributeError: pass else: f() - if self.targets=='*': + + if self.targets == '*': for tg in self.groups[self.current_group]: tgpost(tg) elif self.targets: - if self.current_group self.current_group: + for tg in g: + if is_post(tg, ln): + return True + + if self.post_mode == POST_LAZY and ln != self.srcnode: + # partial folder builds require all targets from a previous build group + if is_post_group(): + ln = self.srcnode + + for tg in self.groups[self.current_group]: + if is_post(tg, ln): + tgpost(tg) + + def get_tasks_group(self, idx): + """ + Returns all task instances for the build group at position idx, + used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator` + + :rtype: list of :py:class:`waflib.Task.Task` + """ + tasks = [] for tg in self.groups[idx]: try: tasks.extend(tg.tasks) - except AttributeError: + except AttributeError: # not a task generator tasks.append(tg) return tasks + def get_build_iterator(self): - if self.targets and self.targets!='*': - (self._min_grp,self._exact_tg)=self.get_targets() - if self.post_mode!=POST_LAZY: - for self.current_group,_ in enumerate(self.groups): + """ + Creates a Python generator object that returns lists of tasks that may be processed in parallel. + + :return: tasks which can be executed immediately + :rtype: generator returning lists of :py:class:`waflib.Task.Task` + """ + if self.targets and self.targets != '*': + (self._min_grp, self._exact_tg) = self.get_targets() + + if self.post_mode != POST_LAZY: + for self.current_group, _ in enumerate(self.groups): self.post_group() - for self.current_group,_ in enumerate(self.groups): - if self.post_mode!=POST_AT_ONCE: + + for self.current_group, _ in enumerate(self.groups): + # first post the task generators for the group + if self.post_mode != POST_AT_ONCE: self.post_group() - tasks=self.get_tasks_group(self.current_group) + + # then extract the tasks + tasks = self.get_tasks_group(self.current_group) + + # if the constraints are set properly (ext_in/ext_out, before/after) + # the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds) + # (but leave set_file_constraints for the installation step) + # + # if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary + # Task.set_file_constraints(tasks) Task.set_precedence_constraints(tasks) - self.cur_tasks=tasks + + self.cur_tasks = tasks if tasks: yield tasks + while 1: - yield[] - def install_files(self,dest,files,**kw): + # the build stops once there are no tasks to process + yield [] + + def install_files(self, dest, files, **kw): + """ + Creates a task generator to install files on the system:: + + def build(bld): + bld.install_files('${DATADIR}', self.path.find_resource('wscript')) + + :param dest: path representing the destination directory + :type dest: :py:class:`waflib.Node.Node` or string (absolute path) + :param files: input files + :type files: list of strings or list of :py:class:`waflib.Node.Node` + :param env: configuration set to expand *dest* + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param relative_trick: preserve the folder hierarchy when installing whole folders + :type relative_trick: bool + :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node` + :type cwd: :py:class:`waflib.Node.Node` + :param postpone: execute the task immediately to perform the installation (False by default) + :type postpone: bool + """ assert(dest) - tg=self(features='install_task',install_to=dest,install_from=files,**kw) - tg.dest=tg.install_to - tg.type='install_files' - if not kw.get('postpone',True): + tg = self(features='install_task', install_to=dest, install_from=files, **kw) + tg.dest = tg.install_to + tg.type = 'install_files' + if not kw.get('postpone', True): tg.post() return tg - def install_as(self,dest,srcfile,**kw): + + def install_as(self, dest, srcfile, **kw): + """ + Creates a task generator to install a file on the system with a different name:: + + def build(bld): + bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755) + + :param dest: destination file + :type dest: :py:class:`waflib.Node.Node` or string (absolute path) + :param srcfile: input file + :type srcfile: string or :py:class:`waflib.Node.Node` + :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node` + :type cwd: :py:class:`waflib.Node.Node` + :param env: configuration set for performing substitutions in dest + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param postpone: execute the task immediately to perform the installation (False by default) + :type postpone: bool + """ assert(dest) - tg=self(features='install_task',install_to=dest,install_from=srcfile,**kw) - tg.dest=tg.install_to - tg.type='install_as' - if not kw.get('postpone',True): + tg = self(features='install_task', install_to=dest, install_from=srcfile, **kw) + tg.dest = tg.install_to + tg.type = 'install_as' + if not kw.get('postpone', True): tg.post() return tg - def symlink_as(self,dest,src,**kw): + + def symlink_as(self, dest, src, **kw): + """ + Creates a task generator to install a symlink:: + + def build(bld): + bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') + + :param dest: absolute path of the symlink + :type dest: :py:class:`waflib.Node.Node` or string (absolute path) + :param src: link contents, which is a relative or absolute path which may exist or not + :type src: string + :param env: configuration set for performing substitutions in dest + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started + :type add: bool + :param postpone: execute the task immediately to perform the installation + :type postpone: bool + :param relative_trick: make the symlink relative (default: ``False``) + :type relative_trick: bool + """ assert(dest) - tg=self(features='install_task',install_to=dest,install_from=src,**kw) - tg.dest=tg.install_to - tg.type='symlink_as' - tg.link=src - if not kw.get('postpone',True): + tg = self(features='install_task', install_to=dest, install_from=src, **kw) + tg.dest = tg.install_to + tg.type = 'symlink_as' + tg.link = src + # TODO if add: self.add_to_group(tsk) + if not kw.get('postpone', True): tg.post() return tg + @TaskGen.feature('install_task') -@TaskGen.before_method('process_rule','process_source') +@TaskGen.before_method('process_rule', 'process_source') def process_install_task(self): + """Creates the installation task for the current task generator; uses :py:func:`waflib.Build.add_install_task` internally.""" self.add_install_task(**self.__dict__) + @TaskGen.taskgen_method -def add_install_task(self,**kw): +def add_install_task(self, **kw): + """ + Creates the installation task for the current task generator, and executes it immediately if necessary + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` + """ if not self.bld.is_install: return if not kw['install_to']: return - if kw['type']=='symlink_as'and Utils.is_win32: + + if kw['type'] == 'symlink_as' and Utils.is_win32: if kw.get('win32_install'): - kw['type']='install_as' + kw['type'] = 'install_as' else: + # just exit return - tsk=self.install_task=self.create_task('inst') - tsk.chmod=kw.get('chmod',Utils.O644) - tsk.link=kw.get('link','')or kw.get('install_from','') - tsk.relative_trick=kw.get('relative_trick',False) - tsk.type=kw['type'] - tsk.install_to=tsk.dest=kw['install_to'] - tsk.install_from=kw['install_from'] - tsk.relative_base=kw.get('cwd')or kw.get('relative_base',self.path) - tsk.install_user=kw.get('install_user') - tsk.install_group=kw.get('install_group') + + tsk = self.install_task = self.create_task('inst') + tsk.chmod = kw.get('chmod', Utils.O644) + tsk.link = kw.get('link', '') or kw.get('install_from', '') + tsk.relative_trick = kw.get('relative_trick', False) + tsk.type = kw['type'] + tsk.install_to = tsk.dest = kw['install_to'] + tsk.install_from = kw['install_from'] + tsk.relative_base = kw.get('cwd') or kw.get('relative_base', self.path) + tsk.install_user = kw.get('install_user') + tsk.install_group = kw.get('install_group') tsk.init_files() - if not kw.get('postpone',True): + if not kw.get('postpone', True): tsk.run_now() return tsk + @TaskGen.taskgen_method -def add_install_files(self,**kw): - kw['type']='install_files' +def add_install_files(self, **kw): + """ + Creates an installation task for files + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` + """ + kw['type'] = 'install_files' return self.add_install_task(**kw) + @TaskGen.taskgen_method -def add_install_as(self,**kw): - kw['type']='install_as' +def add_install_as(self, **kw): + """ + Creates an installation task for a single file + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` + """ + kw['type'] = 'install_as' return self.add_install_task(**kw) + @TaskGen.taskgen_method -def add_symlink_as(self,**kw): - kw['type']='symlink_as' +def add_symlink_as(self, **kw): + """ + Creates an installation task for a symbolic link + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` + """ + kw['type'] = 'symlink_as' return self.add_install_task(**kw) + class inst(Task.Task): + """Task that installs files or symlinks; it is typically executed by :py:class:`waflib.Build.InstallContext` and :py:class:`waflib.Build.UnInstallContext`""" def __str__(self): - return'' + """Returns an empty string to disable the standard task display""" + return '' + def uid(self): - lst=self.inputs+self.outputs+[self.link,self.generator.path.abspath()] + """Returns a unique identifier for the task""" + lst = self.inputs + self.outputs + [self.link, self.generator.path.abspath()] return Utils.h_list(lst) + def init_files(self): - if self.type=='symlink_as': - inputs=[] + """ + Initializes the task input and output nodes + """ + if self.type == 'symlink_as': + inputs = [] else: - inputs=self.generator.to_nodes(self.install_from) - if self.type=='install_as': - assert len(inputs)==1 + inputs = self.generator.to_nodes(self.install_from) + if self.type == 'install_as': + assert len(inputs) == 1 self.set_inputs(inputs) - dest=self.get_install_path() - outputs=[] - if self.type=='symlink_as': + + dest = self.get_install_path() + outputs = [] + if self.type == 'symlink_as': if self.relative_trick: - self.link=os.path.relpath(self.link,os.path.dirname(dest)) + self.link = os.path.relpath(self.link, os.path.dirname(dest)) outputs.append(self.generator.bld.root.make_node(dest)) - elif self.type=='install_as': + elif self.type == 'install_as': outputs.append(self.generator.bld.root.make_node(dest)) else: for y in inputs: if self.relative_trick: - destfile=os.path.join(dest,y.path_from(self.relative_base)) + destfile = os.path.join(dest, y.path_from(self.relative_base)) else: - destfile=os.path.join(dest,y.name) + destfile = os.path.join(dest, y.name) outputs.append(self.generator.bld.root.make_node(destfile)) self.set_outputs(outputs) + def runnable_status(self): - ret=super(inst,self).runnable_status() - if ret==Task.SKIP_ME and self.generator.bld.is_install: + """ + Installation tasks are always executed, so this method returns either :py:const:`waflib.Task.ASK_LATER` or :py:const:`waflib.Task.RUN_ME`. + """ + ret = super(inst, self).runnable_status() + if ret == Task.SKIP_ME and self.generator.bld.is_install: return Task.RUN_ME return ret + def post_run(self): + """ + Disables any post-run operations + """ pass - def get_install_path(self,destdir=True): - if isinstance(self.install_to,Node.Node): - dest=self.install_to.abspath() + + def get_install_path(self, destdir=True): + """ + Returns the destination path where files will be installed, pre-pending `destdir`. + + Relative paths will be interpreted relative to `PREFIX` if no `destdir` is given. + + :rtype: string + """ + if isinstance(self.install_to, Node.Node): + dest = self.install_to.abspath() else: - dest=Utils.subst_vars(self.install_to,self.env) + dest = os.path.normpath(Utils.subst_vars(self.install_to, self.env)) + if not os.path.isabs(dest): + dest = os.path.join(self.env.PREFIX, dest) if destdir and Options.options.destdir: - dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep)) + dest = os.path.join(Options.options.destdir, os.path.splitdrive(dest)[1].lstrip(os.sep)) return dest - def copy_fun(self,src,tgt): - if Utils.is_win32 and len(tgt)>259 and not tgt.startswith('\\\\?\\'): - tgt='\\\\?\\'+tgt - shutil.copy2(src,tgt) + + def copy_fun(self, src, tgt): + """ + Copies a file from src to tgt, preserving permissions and trying to work + around path limitations on Windows platforms. On Unix-like platforms, + the owner/group of the target file may be set through install_user/install_group + + :param src: absolute path + :type src: string + :param tgt: absolute path + :type tgt: string + """ + # override this if you want to strip executables + # kw['tsk'].source is the task that created the files in the build + if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'): + tgt = '\\\\?\\' + tgt + shutil.copy2(src, tgt) self.fix_perms(tgt) - def rm_empty_dirs(self,tgt): + + def rm_empty_dirs(self, tgt): + """ + Removes empty folders recursively when uninstalling. + + :param tgt: absolute path + :type tgt: string + """ while tgt: - tgt=os.path.dirname(tgt) + tgt = os.path.dirname(tgt) try: os.rmdir(tgt) except OSError: break + def run(self): - is_install=self.generator.bld.is_install - if not is_install: + """ + Performs file or symlink installation + """ + is_install = self.generator.bld.is_install + if not is_install: # unnecessary? return + for x in self.outputs: - if is_install==INSTALL: + if is_install == INSTALL: x.parent.mkdir() - if self.type=='symlink_as': - fun=is_install==INSTALL and self.do_link or self.do_unlink - fun(self.link,self.outputs[0].abspath()) - else: - fun=is_install==INSTALL and self.do_install or self.do_uninstall - launch_node=self.generator.bld.launch_node() - for x,y in zip(self.inputs,self.outputs): - fun(x.abspath(),y.abspath(),x.path_from(launch_node)) + if self.type == 'symlink_as': + fun = is_install == INSTALL and self.do_link or self.do_unlink + fun(self.link, self.outputs[0].abspath()) + else: + fun = is_install == INSTALL and self.do_install or self.do_uninstall + launch_node = self.generator.bld.launch_node() + for x, y in zip(self.inputs, self.outputs): + fun(x.abspath(), y.abspath(), x.path_from(launch_node)) + def run_now(self): - status=self.runnable_status() - if status not in(Task.RUN_ME,Task.SKIP_ME): - raise Errors.TaskNotReady('Could not process %r: status %r'%(self,status)) + """ + Try executing the installation task right now + + :raises: :py:class:`waflib.Errors.TaskNotReady` + """ + status = self.runnable_status() + if status not in (Task.RUN_ME, Task.SKIP_ME): + raise Errors.TaskNotReady('Could not process %r: status %r' % (self, status)) self.run() - self.hasrun=Task.SUCCESS - def do_install(self,src,tgt,lbl,**kw): + self.hasrun = Task.SUCCESS + + def do_install(self, src, tgt, lbl, **kw): + """ + Copies a file from src to tgt with given file permissions. The actual copy is only performed + if the source and target file sizes or timestamps differ. When the copy occurs, + the file is always first removed and then copied so as to prevent stale inodes. + + :param src: file name as absolute path + :type src: string + :param tgt: file destination, as absolute path + :type tgt: string + :param lbl: file source description + :type lbl: string + :param chmod: installation mode + :type chmod: int + :raises: :py:class:`waflib.Errors.WafError` if the file cannot be written + """ if not Options.options.force: + # check if the file is already there to avoid a copy try: - st1=os.stat(tgt) - st2=os.stat(src) + st1 = os.stat(tgt) + st2 = os.stat(src) except OSError: pass else: - if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: + # same size and identical timestamps -> make no copy + if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size: if not self.generator.bld.progress_bar: - Logs.info('- install %s (from %s)',tgt,lbl) + + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + + Logs.info('%s- install %s%s%s (from %s)', c1, c2, tgt, c1, lbl) return False + if not self.generator.bld.progress_bar: - Logs.info('+ install %s (from %s)',tgt,lbl) + + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + + Logs.info('%s+ install %s%s%s (from %s)', c1, c2, tgt, c1, lbl) + + # Give best attempt at making destination overwritable, + # like the 'install' utility used by 'make install' does. try: - os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) + os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode)) except EnvironmentError: pass + + # following is for shared libs and stale inodes (-_-) try: os.remove(tgt) except OSError: pass + try: - self.copy_fun(src,tgt) + self.copy_fun(src, tgt) except EnvironmentError as e: if not os.path.exists(src): - Logs.error('File %r does not exist',src) + Logs.error('File %r does not exist', src) elif not os.path.isfile(src): - Logs.error('Input %r is not a file',src) - raise Errors.WafError('Could not install the file %r'%tgt,e) - def fix_perms(self,tgt): + Logs.error('Input %r is not a file', src) + raise Errors.WafError('Could not install the file %r' % tgt, e) + + def fix_perms(self, tgt): + """ + Change the ownership of the file/folder/link pointed by the given path + This looks up for `install_user` or `install_group` attributes + on the task or on the task generator:: + + def build(bld): + bld.install_as('${PREFIX}/wscript', + 'wscript', + install_user='nobody', install_group='nogroup') + bld.symlink_as('${PREFIX}/wscript_link', + Utils.subst_vars('${PREFIX}/wscript', bld.env), + install_user='nobody', install_group='nogroup') + """ if not Utils.is_win32: - user=getattr(self,'install_user',None)or getattr(self.generator,'install_user',None) - group=getattr(self,'install_group',None)or getattr(self.generator,'install_group',None) + user = getattr(self, 'install_user', None) or getattr(self.generator, 'install_user', None) + group = getattr(self, 'install_group', None) or getattr(self.generator, 'install_group', None) if user or group: - Utils.lchown(tgt,user or-1,group or-1) + Utils.lchown(tgt, user or -1, group or -1) if not os.path.islink(tgt): - os.chmod(tgt,self.chmod) - def do_link(self,src,tgt,**kw): - if os.path.islink(tgt)and os.readlink(tgt)==src: + os.chmod(tgt, self.chmod) + + def do_link(self, src, tgt, **kw): + """ + Creates a symlink from tgt to src. + + :param src: file name as absolute path + :type src: string + :param tgt: file destination, as absolute path + :type tgt: string + """ + if os.path.islink(tgt) and os.readlink(tgt) == src: if not self.generator.bld.progress_bar: - Logs.info('- symlink %s (to %s)',tgt,src) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- symlink %s%s%s (to %s)', c1, c2, tgt, c1, src) else: try: os.remove(tgt) except OSError: pass if not self.generator.bld.progress_bar: - Logs.info('+ symlink %s (to %s)',tgt,src) - os.symlink(src,tgt) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s+ symlink %s%s%s (to %s)', c1, c2, tgt, c1, src) + os.symlink(src, tgt) self.fix_perms(tgt) - def do_uninstall(self,src,tgt,lbl,**kw): + + def do_uninstall(self, src, tgt, lbl, **kw): + """ + See :py:meth:`waflib.Build.inst.do_install` + """ if not self.generator.bld.progress_bar: - Logs.info('- remove %s',tgt) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1) + + #self.uninstall.append(tgt) try: os.remove(tgt) except OSError as e: - if e.errno!=errno.ENOENT: - if not getattr(self,'uninstall_error',None): - self.uninstall_error=True + if e.errno != errno.ENOENT: + if not getattr(self, 'uninstall_error', None): + self.uninstall_error = True Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') - if Logs.verbose>1: - Logs.warn('Could not remove %s (error code %r)',e.filename,e.errno) + if Logs.verbose > 1: + Logs.warn('Could not remove %s (error code %r)', e.filename, e.errno) self.rm_empty_dirs(tgt) - def do_unlink(self,src,tgt,**kw): + + def do_unlink(self, src, tgt, **kw): + """ + See :py:meth:`waflib.Build.inst.do_link` + """ try: if not self.generator.bld.progress_bar: - Logs.info('- remove %s',tgt) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1) os.remove(tgt) except OSError: pass self.rm_empty_dirs(tgt) + class InstallContext(BuildContext): '''installs the targets on the system''' - cmd='install' - def __init__(self,**kw): - super(InstallContext,self).__init__(**kw) - self.is_install=INSTALL + cmd = 'install' + + def __init__(self, **kw): + super(InstallContext, self).__init__(**kw) + self.is_install = INSTALL + class UninstallContext(InstallContext): '''removes the targets installed''' - cmd='uninstall' - def __init__(self,**kw): - super(UninstallContext,self).__init__(**kw) - self.is_install=UNINSTALL + cmd = 'uninstall' + + def __init__(self, **kw): + super(UninstallContext, self).__init__(**kw) + self.is_install = UNINSTALL + class CleanContext(BuildContext): '''cleans the project''' - cmd='clean' + cmd = 'clean' def execute(self): + """ + See :py:func:`waflib.Build.BuildContext.execute`. + """ self.restore() if not self.all_envs: self.load_envs() + self.recurse([self.run_dir]) try: self.clean() finally: self.store() + def clean(self): + """ + Remove most files from the build directory, and reset all caches. + + Custom lists of files to clean can be declared as `bld.clean_files`. + For example, exclude `build/program/myprogram` from getting removed:: + + def build(bld): + bld.clean_files = bld.bldnode.ant_glob('**', + excl='.lock* config.log c4che/* config.h program/myprogram', + quiet=True, generator=True) + """ Logs.debug('build: clean called') - if hasattr(self,'clean_files'): + + if hasattr(self, 'clean_files'): for n in self.clean_files: n.delete() - elif self.bldnode!=self.srcnode: - lst=[] + elif self.bldnode != self.srcnode: + # would lead to a disaster if top == out + lst = [] for env in self.all_envs.values(): - lst.extend(self.root.find_or_declare(f)for f in env[CFG_FILES]) - for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True): + lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES]) + excluded_dirs = '.lock* *conf_check_*/** config.log %s/*' % CACHE_DIR + for n in self.bldnode.ant_glob('**/*', excl=excluded_dirs, quiet=True): if n in lst: continue n.delete() - self.root.children={} + self.root.children = {} + for v in SAVED_ATTRS: - if v=='root': + if v == 'root': continue - setattr(self,v,{}) + setattr(self, v, {}) + class ListContext(BuildContext): '''lists the targets to execute''' - cmd='list' + cmd = 'list' + def execute(self): + """ + In addition to printing the name of each build target, + a description column will include text for each task + generator which has a "description" field set. + + See :py:func:`waflib.Build.BuildContext.execute`. + """ self.restore() if not self.all_envs: self.load_envs() + self.recurse([self.run_dir]) self.pre_build() - self.timer=Utils.Timer() + + # display the time elapsed in the progress bar + self.timer = Utils.Timer() + for g in self.groups: for tg in g: try: - f=tg.post + f = tg.post except AttributeError: pass else: f() + try: + # force the cache initialization self.get_tgen_by_name('') except Errors.WafError: pass - targets=sorted(self.task_gen_cache_names) - line_just=max(len(t)for t in targets)if targets else 0 + + targets = sorted(self.task_gen_cache_names) + + # figure out how much to left-justify, for largest target name + line_just = max(len(t) for t in targets) if targets else 0 + for target in targets: - tgen=self.task_gen_cache_names[target] - descript=getattr(tgen,'description','') + tgen = self.task_gen_cache_names[target] + + # Support displaying the description for the target + # if it was set on the tgen + descript = getattr(tgen, 'description', '') if descript: - target=target.ljust(line_just) - descript=': %s'%descript - Logs.pprint('GREEN',target,label=descript) + target = target.ljust(line_just) + descript = ': %s' % descript + + Logs.pprint('GREEN', target, label=descript) + class StepContext(BuildContext): '''executes tasks in a step-by-step fashion, for debugging''' - cmd='step' - def __init__(self,**kw): - super(StepContext,self).__init__(**kw) - self.files=Options.options.files + cmd = 'step' + + def __init__(self, **kw): + super(StepContext, self).__init__(**kw) + self.files = Options.options.files + def compile(self): + """ + Overrides :py:meth:`waflib.Build.BuildContext.compile` to perform a partial build + on tasks matching the input/output pattern given (regular expression matching):: + + $ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o + $ waf step --files=in:foo.cpp.1.o # link task only + + """ if not self.files: Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') BuildContext.compile(self) return - targets=[] - if self.targets and self.targets!='*': - targets=self.targets.split(',') + + targets = [] + if self.targets and self.targets != '*': + targets = self.targets.split(',') + for g in self.groups: for tg in g: if targets and tg.name not in targets: continue + try: - f=tg.post + f = tg.post except AttributeError: pass else: f() + for pat in self.files.split(','): - matcher=self.get_matcher(pat) + matcher = self.get_matcher(pat) for tg in g: - if isinstance(tg,Task.Task): - lst=[tg] + if isinstance(tg, Task.Task): + lst = [tg] else: - lst=tg.tasks + lst = tg.tasks for tsk in lst: - do_exec=False + do_exec = False for node in tsk.inputs: - if matcher(node,output=False): - do_exec=True + if matcher(node, output=False): + do_exec = True break for node in tsk.outputs: - if matcher(node,output=True): - do_exec=True + if matcher(node, output=True): + do_exec = True break if do_exec: - ret=tsk.run() - Logs.info('%s -> exit %r',tsk,ret) - def get_matcher(self,pat): - inn=True - out=True + ret = tsk.run() + Logs.info('%s -> exit %r', tsk, ret) + + def get_matcher(self, pat): + """ + Converts a step pattern into a function + + :param: pat: pattern of the form in:truc.c,out:bar.o + :returns: Python function that uses Node objects as inputs and returns matches + :rtype: function + """ + # this returns a function + inn = True + out = True if pat.startswith('in:'): - out=False - pat=pat.replace('in:','') + out = False + pat = pat.replace('in:', '') elif pat.startswith('out:'): - inn=False - pat=pat.replace('out:','') - anode=self.root.find_node(pat) - pattern=None + inn = False + pat = pat.replace('out:', '') + + anode = self.root.find_node(pat) + pattern = None if not anode: if not pat.startswith('^'): - pat='^.+?%s'%pat + pat = '^.+?%s' % pat if not pat.endswith('$'): - pat='%s$'%pat - pattern=re.compile(pat) - def match(node,output): + pat = '%s$' % pat + pattern = re.compile(pat) + + def match(node, output): if output and not out: return False if not output and not inn: return False + if anode: - return anode==node + return anode == node else: return pattern.match(node.abspath()) return match + class EnvContext(BuildContext): - fun=cmd=None + """Subclass EnvContext to create commands that require configuration data in 'env'""" + fun = cmd = None def execute(self): + """ + See :py:func:`waflib.Build.BuildContext.execute`. + """ self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) + diff -Nru lilv-0.24.4~dfsg0/waflib/ConfigSet.py lilv-0.24.6/waflib/ConfigSet.py --- lilv-0.24.4~dfsg0/waflib/ConfigSet.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/ConfigSet.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,165 +1,361 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) + +""" + +ConfigSet: a special dict + +The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings) +""" + +import copy, re, os +from waflib import Logs, Utils +re_imp = re.compile(r'^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) -import copy,re,os -from waflib import Logs,Utils -re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M) class ConfigSet(object): - __slots__=('table','parent') - def __init__(self,filename=None): - self.table={} + """ + A copy-on-write dict with human-readable serialized format. The serialization format + is human-readable (python-like) and performed by using eval() and repr(). + For high performance prefer pickle. Do not store functions as they are not serializable. + + The values can be accessed by attributes or by keys:: + + from waflib.ConfigSet import ConfigSet + env = ConfigSet() + env.FOO = 'test' + env['FOO'] = 'test' + """ + __slots__ = ('table', 'parent') + def __init__(self, filename=None): + self.table = {} + """ + Internal dict holding the object values + """ + #self.parent = None + if filename: self.load(filename) - def __contains__(self,key): + + def __contains__(self, key): + """ + Enables the *in* syntax:: + + if 'foo' in env: + print(env['foo']) + """ if key in self.table: return True try: return self.parent.__contains__(key) except AttributeError: - return False + return False # parent may not exist + def keys(self): - keys=set() - cur=self + """Dict interface""" + keys = set() + cur = self while cur: keys.update(cur.table.keys()) - cur=getattr(cur,'parent',None) - keys=list(keys) + cur = getattr(cur, 'parent', None) + keys = list(keys) keys.sort() return keys + def __iter__(self): return iter(self.keys()) + def __str__(self): - return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()]) - def __getitem__(self,key): + """Text representation of the ConfigSet (for debugging purposes)""" + return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()]) + + def __getitem__(self, key): + """ + Dictionary interface: get value from key:: + + def configure(conf): + conf.env['foo'] = {} + print(env['foo']) + """ try: while 1: - x=self.table.get(key) + x = self.table.get(key) if not x is None: return x - self=self.parent + self = self.parent except AttributeError: - return[] - def __setitem__(self,key,value): - self.table[key]=value - def __delitem__(self,key): - self[key]=[] - def __getattr__(self,name): + return [] + + def __setitem__(self, key, value): + """ + Dictionary interface: set value from key + """ + self.table[key] = value + + def __delitem__(self, key): + """ + Dictionary interface: mark the value as missing + """ + self[key] = [] + + def __getattr__(self, name): + """ + Attribute access provided for convenience. The following forms are equivalent:: + + def configure(conf): + conf.env.value + conf.env['value'] + """ if name in self.__slots__: - return object.__getattribute__(self,name) + return object.__getattribute__(self, name) else: return self[name] - def __setattr__(self,name,value): + + def __setattr__(self, name, value): + """ + Attribute access provided for convenience. The following forms are equivalent:: + + def configure(conf): + conf.env.value = x + env['value'] = x + """ if name in self.__slots__: - object.__setattr__(self,name,value) + object.__setattr__(self, name, value) else: - self[name]=value - def __delattr__(self,name): + self[name] = value + + def __delattr__(self, name): + """ + Attribute access provided for convenience. The following forms are equivalent:: + + def configure(conf): + del env.value + del env['value'] + """ if name in self.__slots__: - object.__delattr__(self,name) + object.__delattr__(self, name) else: del self[name] + def derive(self): - newenv=ConfigSet() - newenv.parent=self + """ + Returns a new ConfigSet deriving from self. The copy returned + will be a shallow copy:: + + from waflib.ConfigSet import ConfigSet + env = ConfigSet() + env.append_value('CFLAGS', ['-O2']) + child = env.derive() + child.CFLAGS.append('test') # warning! this will modify 'env' + child.CFLAGS = ['-O3'] # new list, ok + child.append_value('CFLAGS', ['-O3']) # ok + + Use :py:func:`ConfigSet.detach` to detach the child from the parent. + """ + newenv = ConfigSet() + newenv.parent = self return newenv + def detach(self): - tbl=self.get_merged_dict() + """ + Detaches this instance from its parent (if present) + + Modifying the parent :py:class:`ConfigSet` will not change the current object + Modifying this :py:class:`ConfigSet` will not modify the parent one. + """ + tbl = self.get_merged_dict() try: - delattr(self,'parent') + delattr(self, 'parent') except AttributeError: pass else: - keys=tbl.keys() + keys = tbl.keys() for x in keys: - tbl[x]=copy.deepcopy(tbl[x]) - self.table=tbl + tbl[x] = copy.deepcopy(tbl[x]) + self.table = tbl return self - def get_flat(self,key): - s=self[key] - if isinstance(s,str): + + def get_flat(self, key): + """ + Returns a value as a string. If the input is a list, the value returned is space-separated. + + :param key: key to use + :type key: string + """ + s = self[key] + if isinstance(s, str): return s - return' '.join(s) - def _get_list_value_for_modification(self,key): + return ' '.join(s) + + def _get_list_value_for_modification(self, key): + """ + Returns a list value for further modification. + + The list may be modified inplace and there is no need to do this afterwards:: + + self.table[var] = value + """ try: - value=self.table[key] + value = self.table[key] except KeyError: try: - value=self.parent[key] + value = self.parent[key] except AttributeError: - value=[] + value = [] else: - if isinstance(value,list): - value=value[:] + if isinstance(value, list): + # force a copy + value = value[:] else: - value=[value] - self.table[key]=value + value = [value] + self.table[key] = value else: - if not isinstance(value,list): - self.table[key]=value=[value] + if not isinstance(value, list): + self.table[key] = value = [value] return value - def append_value(self,var,val): - if isinstance(val,str): - val=[val] - current_value=self._get_list_value_for_modification(var) + + def append_value(self, var, val): + """ + Appends a value to the specified config key:: + + def build(bld): + bld.env.append_value('CFLAGS', ['-O2']) + + The value must be a list or a tuple + """ + if isinstance(val, str): # if there were string everywhere we could optimize this + val = [val] + current_value = self._get_list_value_for_modification(var) current_value.extend(val) - def prepend_value(self,var,val): - if isinstance(val,str): - val=[val] - self.table[var]=val+self._get_list_value_for_modification(var) - def append_unique(self,var,val): - if isinstance(val,str): - val=[val] - current_value=self._get_list_value_for_modification(var) + + def prepend_value(self, var, val): + """ + Prepends a value to the specified item:: + + def configure(conf): + conf.env.prepend_value('CFLAGS', ['-O2']) + + The value must be a list or a tuple + """ + if isinstance(val, str): + val = [val] + self.table[var] = val + self._get_list_value_for_modification(var) + + def append_unique(self, var, val): + """ + Appends a value to the specified item only if it's not already present:: + + def build(bld): + bld.env.append_unique('CFLAGS', ['-O2', '-g']) + + The value must be a list or a tuple + """ + if isinstance(val, str): + val = [val] + current_value = self._get_list_value_for_modification(var) + for x in val: if x not in current_value: current_value.append(x) + def get_merged_dict(self): - table_list=[] - env=self + """ + Computes the merged dictionary from the fusion of self and all its parent + + :rtype: a ConfigSet object + """ + table_list = [] + env = self while 1: - table_list.insert(0,env.table) + table_list.insert(0, env.table) try: - env=env.parent + env = env.parent except AttributeError: break - merged_table={} + merged_table = {} for table in table_list: merged_table.update(table) return merged_table - def store(self,filename): + + def store(self, filename): + """ + Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files. + + :param filename: file to use + :type filename: string + """ try: os.makedirs(os.path.split(filename)[0]) except OSError: pass - buf=[] - merged_table=self.get_merged_dict() - keys=list(merged_table.keys()) + + buf = [] + merged_table = self.get_merged_dict() + keys = list(merged_table.keys()) keys.sort() + try: - fun=ascii + fun = ascii except NameError: - fun=repr + fun = repr + for k in keys: - if k!='undo_stack': - buf.append('%s = %s\n'%(k,fun(merged_table[k]))) - Utils.writef(filename,''.join(buf)) - def load(self,filename): - tbl=self.table - code=Utils.readf(filename,m='rU') + if k != 'undo_stack': + buf.append('%s = %s\n' % (k, fun(merged_table[k]))) + Utils.writef(filename, ''.join(buf)) + + def load(self, filename): + """ + Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`. + + :param filename: file to use + :type filename: string + """ + tbl = self.table + code = Utils.readf(filename, m='r') for m in re_imp.finditer(code): - g=m.group - tbl[g(2)]=eval(g(3)) - Logs.debug('env: %s',self.table) - def update(self,d): + g = m.group + tbl[g(2)] = eval(g(3)) + Logs.debug('env: %s', self.table) + + def update(self, d): + """ + Dictionary interface: replace values with the ones from another dict + + :param d: object to use the value from + :type d: dict-like object + """ self.table.update(d) + def stash(self): - orig=self.table - tbl=self.table=self.table.copy() + """ + Stores the object state to provide transactionality semantics:: + + env = ConfigSet() + env.stash() + try: + env.append_value('CFLAGS', '-O3') + call_some_method(env) + finally: + env.revert() + + The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store` + """ + orig = self.table + tbl = self.table = self.table.copy() for x in tbl.keys(): - tbl[x]=copy.deepcopy(tbl[x]) - self.undo_stack=self.undo_stack+[orig] + tbl[x] = copy.deepcopy(tbl[x]) + self.undo_stack = self.undo_stack + [orig] + def commit(self): + """ + Commits transactional changes. See :py:meth:`ConfigSet.stash` + """ self.undo_stack.pop(-1) + def revert(self): - self.table=self.undo_stack.pop(-1) + """ + Reverts the object to a previous state. See :py:meth:`ConfigSet.stash` + """ + self.table = self.undo_stack.pop(-1) + diff -Nru lilv-0.24.4~dfsg0/waflib/Configure.py lilv-0.24.6/waflib/Configure.py --- lilv-0.24.4~dfsg0/waflib/Configure.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Configure.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,368 +1,649 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) -import os,re,shlex,shutil,sys,time,traceback -from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors -WAF_CONFIG_LOG='config.log' -autoconfig=False -conf_template='''# project %(app)s configured on %(now)s by +""" +Configuration system + +A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to: + +* create data dictionaries (ConfigSet instances) +* store the list of modules to import +* hold configuration routines such as ``find_program``, etc +""" + +import os, re, shlex, shutil, sys, time, traceback +from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors + +WAF_CONFIG_LOG = 'config.log' +"""Name of the configuration log file""" + +autoconfig = False +"""Execute the configuration automatically""" + +conf_template = '''# project %(app)s configured on %(now)s by # waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) # using %(args)s #''' + class ConfigurationContext(Context.Context): '''configures the project''' - cmd='configure' - error_handlers=[] - def __init__(self,**kw): - super(ConfigurationContext,self).__init__(**kw) - self.environ=dict(os.environ) - self.all_envs={} - self.top_dir=None - self.out_dir=None - self.tools=[] - self.hash=0 - self.files=[] - self.tool_cache=[] + + cmd = 'configure' + + error_handlers = [] + """ + Additional functions to handle configuration errors + """ + + def __init__(self, **kw): + super(ConfigurationContext, self).__init__(**kw) + self.environ = dict(os.environ) + self.all_envs = {} + + self.top_dir = None + self.out_dir = None + + self.tools = [] # tools loaded in the configuration, and that will be loaded when building + + self.hash = 0 + self.files = [] + + self.tool_cache = [] + self.setenv('') - def setenv(self,name,env=None): + + def setenv(self, name, env=None): + """ + Set a new config set for conf.env. If a config set of that name already exists, + recall it without modification. + + The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it + is also used as *variants* by the build commands. + Though related to variants, whatever kind of data may be stored in the config set:: + + def configure(cfg): + cfg.env.ONE = 1 + cfg.setenv('foo') + cfg.env.ONE = 2 + + def build(bld): + 2 == bld.env_of_name('foo').ONE + + :param name: name of the configuration set + :type name: string + :param env: ConfigSet to copy, or an empty ConfigSet is created + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + """ if name not in self.all_envs or env: if not env: - env=ConfigSet.ConfigSet() + env = ConfigSet.ConfigSet() self.prepare_env(env) else: - env=env.derive() - self.all_envs[name]=env - self.variant=name + env = env.derive() + self.all_envs[name] = env + self.variant = name + def get_env(self): + """Getter for the env property""" return self.all_envs[self.variant] - def set_env(self,val): - self.all_envs[self.variant]=val - env=property(get_env,set_env) + def set_env(self, val): + """Setter for the env property""" + self.all_envs[self.variant] = val + + env = property(get_env, set_env) + def init_dirs(self): - top=self.top_dir + """ + Initialize the project directory and the build directory + """ + + top = self.top_dir if not top: - top=Options.options.top + top = Options.options.top if not top: - top=getattr(Context.g_module,Context.TOP,None) + top = getattr(Context.g_module, Context.TOP, None) if not top: - top=self.path.abspath() - top=os.path.abspath(top) - self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top) + top = self.path.abspath() + top = os.path.abspath(top) + + self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top) assert(self.srcnode) - out=self.out_dir + + out = self.out_dir if not out: - out=Options.options.out + out = Options.options.out if not out: - out=getattr(Context.g_module,Context.OUT,None) + out = getattr(Context.g_module, Context.OUT, None) if not out: - out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','') - out=os.path.realpath(out) - self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out) + out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '') + + # someone can be messing with symlinks + out = os.path.realpath(out) + + self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out) self.bldnode.mkdir() + if not os.path.isdir(self.bldnode.abspath()): - conf.fatal('Could not create the build directory %s'%self.bldnode.abspath()) + self.fatal('Could not create the build directory %s' % self.bldnode.abspath()) + def execute(self): + """ + See :py:func:`waflib.Context.Context.execute` + """ self.init_dirs() - self.cachedir=self.bldnode.make_node(Build.CACHE_DIR) + + self.cachedir = self.bldnode.make_node(Build.CACHE_DIR) self.cachedir.mkdir() - path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG) - self.logger=Logs.make_logger(path,'cfg') - app=getattr(Context.g_module,'APPNAME','') + + path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG) + self.logger = Logs.make_logger(path, 'cfg') + + app = getattr(Context.g_module, 'APPNAME', '') if app: - ver=getattr(Context.g_module,'VERSION','') + ver = getattr(Context.g_module, 'VERSION', '') if ver: - app="%s (%s)"%(app,ver) - params={'now':time.ctime(),'pyver':sys.hexversion,'systype':sys.platform,'args':" ".join(sys.argv),'wafver':Context.WAFVERSION,'abi':Context.ABI,'app':app} - self.to_log(conf_template%params) - self.msg('Setting top to',self.srcnode.abspath()) - self.msg('Setting out to',self.bldnode.abspath()) - if id(self.srcnode)==id(self.bldnode): + app = "%s (%s)" % (app, ver) + + params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app} + self.to_log(conf_template % params) + self.msg('Setting top to', self.srcnode.abspath()) + self.msg('Setting out to', self.bldnode.abspath()) + + if id(self.srcnode) == id(self.bldnode): Logs.warn('Setting top == out') - elif id(self.path)!=id(self.srcnode): + elif id(self.path) != id(self.srcnode): if self.srcnode.is_child_of(self.path): Logs.warn('Are you certain that you do not want to set top="." ?') - super(ConfigurationContext,self).execute() + + super(ConfigurationContext, self).execute() + self.store() - Context.top_dir=self.srcnode.abspath() - Context.out_dir=self.bldnode.abspath() - env=ConfigSet.ConfigSet() - env.argv=sys.argv - env.options=Options.options.__dict__ - env.config_cmd=self.cmd - env.run_dir=Context.run_dir - env.top_dir=Context.top_dir - env.out_dir=Context.out_dir - env.hash=self.hash - env.files=self.files - env.environ=dict(self.environ) - if not(self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN')or getattr(Options.options,'no_lock_in_run')): - env.store(os.path.join(Context.run_dir,Options.lockfile)) - if not(self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP')or getattr(Options.options,'no_lock_in_top')): - env.store(os.path.join(Context.top_dir,Options.lockfile)) - if not(self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT')or getattr(Options.options,'no_lock_in_out')): - env.store(os.path.join(Context.out_dir,Options.lockfile)) - def prepare_env(self,env): + + Context.top_dir = self.srcnode.abspath() + Context.out_dir = self.bldnode.abspath() + + # this will write a configure lock so that subsequent builds will + # consider the current path as the root directory (see prepare_impl). + # to remove: use 'waf distclean' + env = ConfigSet.ConfigSet() + env.argv = sys.argv + env.options = Options.options.__dict__ + env.config_cmd = self.cmd + + env.run_dir = Context.run_dir + env.top_dir = Context.top_dir + env.out_dir = Context.out_dir + + # conf.hash & conf.files hold wscript files paths and hash + # (used only by Configure.autoconfig) + env.hash = self.hash + env.files = self.files + env.environ = dict(self.environ) + env.launch_dir = Context.launch_dir + + if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')): + env.store(os.path.join(Context.run_dir, Options.lockfile)) + if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')): + env.store(os.path.join(Context.top_dir, Options.lockfile)) + if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')): + env.store(os.path.join(Context.out_dir, Options.lockfile)) + + def prepare_env(self, env): + """ + Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env`` + + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param env: a ConfigSet, usually ``conf.env`` + """ if not env.PREFIX: if Options.options.prefix or Utils.is_win32: - env.PREFIX=Options.options.prefix + env.PREFIX = Options.options.prefix else: - env.PREFIX='/' + env.PREFIX = '/' if not env.BINDIR: if Options.options.bindir: - env.BINDIR=Options.options.bindir + env.BINDIR = Options.options.bindir else: - env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env) + env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env) if not env.LIBDIR: if Options.options.libdir: - env.LIBDIR=Options.options.libdir + env.LIBDIR = Options.options.libdir else: - env.LIBDIR=Utils.subst_vars('${PREFIX}/lib%s'%Utils.lib64(),env) + env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env) + def store(self): - n=self.cachedir.make_node('build.config.py') - n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools)) + """Save the config results into the cache file""" + n = self.cachedir.make_node('build.config.py') + n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools)) + if not self.all_envs: self.fatal('nothing to store in the configuration context!') + for key in self.all_envs: - tmpenv=self.all_envs[key] - tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX)) - def load(self,tool_list,tooldir=None,funs=None,with_sys_path=True,cache=False): - tools=Utils.to_list(tool_list) + tmpenv = self.all_envs[key] + tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) + + def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False): + """ + Load Waf tools, which will be imported whenever a build is started. + + :param tool_list: waf tools to import + :type tool_list: list of string + :param tooldir: paths for the imports + :type tooldir: list of string + :param funs: functions to execute from the waf tools + :type funs: list of string + :param cache: whether to prevent the tool from running twice + :type cache: bool + """ + + tools = Utils.to_list(tool_list) if tooldir: - tooldir=Utils.to_list(tooldir) + tooldir = Utils.to_list(tooldir) for tool in tools: + # avoid loading the same tool more than once with the same functions + # used by composite projects + if cache: - mag=(tool,id(self.env),tooldir,funs) + mag = (tool, id(self.env), tooldir, funs) if mag in self.tool_cache: - self.to_log('(tool %s is already loaded, skipping)'%tool) + self.to_log('(tool %s is already loaded, skipping)' % tool) continue self.tool_cache.append(mag) - module=None + + module = None try: - module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path) + module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path) except ImportError as e: - self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,getattr(e,'waf_sys_path',sys.path),e)) + self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e)) except Exception as e: - self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs)) + self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs)) self.to_log(traceback.format_exc()) raise + if funs is not None: self.eval_rules(funs) else: - func=getattr(module,'configure',None) + func = getattr(module, 'configure', None) if func: - if type(func)is type(Utils.readf): + if type(func) is type(Utils.readf): func(self) else: self.eval_rules(func) - self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs}) - def post_recurse(self,node): - super(ConfigurationContext,self).post_recurse(node) - self.hash=Utils.h_list((self.hash,node.read('rb'))) + + self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs}) + + def post_recurse(self, node): + """ + Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse` + + :param node: script + :type node: :py:class:`waflib.Node.Node` + """ + super(ConfigurationContext, self).post_recurse(node) + self.hash = Utils.h_list((self.hash, node.read('rb'))) self.files.append(node.abspath()) - def eval_rules(self,rules): - self.rules=Utils.to_list(rules) + + def eval_rules(self, rules): + """ + Execute configuration tests provided as list of functions to run + + :param rules: list of configuration method names + :type rules: list of string + """ + self.rules = Utils.to_list(rules) for x in self.rules: - f=getattr(self,x) + f = getattr(self, x) if not f: - self.fatal('No such configuration function %r'%x) + self.fatal('No such configuration function %r' % x) f() + def conf(f): - def fun(*k,**kw): - mandatory=kw.pop('mandatory',True) + """ + Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and + :py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter + named 'mandatory' to disable the configuration errors:: + + def configure(conf): + conf.find_program('abc', mandatory=False) + + :param f: method to bind + :type f: function + """ + def fun(*k, **kw): + mandatory = kw.pop('mandatory', True) try: - return f(*k,**kw) + return f(*k, **kw) except Errors.ConfigurationError: if mandatory: raise - fun.__name__=f.__name__ - setattr(ConfigurationContext,f.__name__,fun) - setattr(Build.BuildContext,f.__name__,fun) + + fun.__name__ = f.__name__ + setattr(ConfigurationContext, f.__name__, fun) + setattr(Build.BuildContext, f.__name__, fun) return f + @conf -def add_os_flags(self,var,dest=None,dup=False): +def add_os_flags(self, var, dest=None, dup=False): + """ + Import operating system environment values into ``conf.env`` dict:: + + def configure(conf): + conf.add_os_flags('CFLAGS') + + :param var: variable to use + :type var: string + :param dest: destination variable, by default the same as var + :type dest: string + :param dup: add the same set of flags again + :type dup: bool + """ try: - flags=shlex.split(self.environ[var]) + flags = shlex.split(self.environ[var]) except KeyError: return - if dup or''.join(flags)not in''.join(Utils.to_list(self.env[dest or var])): - self.env.append_value(dest or var,flags) + if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])): + self.env.append_value(dest or var, flags) + @conf -def cmd_to_list(self,cmd): - if isinstance(cmd,str): +def cmd_to_list(self, cmd): + """ + Detect if a command is written in pseudo shell like ``ccache g++`` and return a list. + + :param cmd: command + :type cmd: a string or a list of string + """ + if isinstance(cmd, str): if os.path.isfile(cmd): - return[cmd] - if os.sep=='/': + # do not take any risk + return [cmd] + if os.sep == '/': return shlex.split(cmd) else: try: - return shlex.split(cmd,posix=False) + return shlex.split(cmd, posix=False) except TypeError: + # Python 2.5 on windows? return shlex.split(cmd) return cmd + @conf -def check_waf_version(self,mini='1.9.99',maxi='2.1.0',**kw): - self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)),**kw) - ver=Context.HEXVERSION - if Utils.num2ver(mini)>ver: - self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver)) - if Utils.num2ver(maxi) ver: + self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver)) + if Utils.num2ver(maxi) < ver: + self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver)) + self.end_msg('ok', **kw) + @conf -def find_file(self,filename,path_list=[]): +def find_file(self, filename, path_list=[]): + """ + Find a file in a list of paths + + :param filename: name of the file to search for + :param path_list: list of directories to search + :return: the first matching filename; else a configuration exception is raised + """ for n in Utils.to_list(filename): for d in Utils.to_list(path_list): - p=os.path.expanduser(os.path.join(d,n)) + p = os.path.expanduser(os.path.join(d, n)) if os.path.exists(p): return p - self.fatal('Could not find %r'%filename) + self.fatal('Could not find %r' % filename) + @conf -def find_program(self,filename,**kw): - exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py') - environ=kw.get('environ',getattr(self,'environ',os.environ)) - ret='' - filename=Utils.to_list(filename) - msg=kw.get('msg',', '.join(filename)) - var=kw.get('var','') +def find_program(self, filename, **kw): + """ + Search for a program on the operating system + + When var is used, you may set os.environ[var] to help find a specific program version, for example:: + + $ CC='ccache gcc' waf configure + + :param path_list: paths to use for searching + :type param_list: list of string + :param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings + :type var: string + :param value: obtain the program from the value passed exclusively + :type value: list or string (list is preferred) + :param exts: list of extensions for the binary (do not add an extension for portability) + :type exts: list of string + :param msg: name to display in the log, by default filename is used + :type msg: string + :param interpreter: interpreter for the program + :type interpreter: ConfigSet variable key + :raises: :py:class:`waflib.Errors.ConfigurationError` + """ + + exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py') + + environ = kw.get('environ', getattr(self, 'environ', os.environ)) + + ret = '' + + filename = Utils.to_list(filename) + msg = kw.get('msg', ', '.join(filename)) + + var = kw.get('var', '') if not var: - var=re.sub(r'[-.]','_',filename[0].upper()) - path_list=kw.get('path_list','') + var = re.sub(r'[-.]', '_', filename[0].upper()) + + path_list = kw.get('path_list', '') if path_list: - path_list=Utils.to_list(path_list) + path_list = Utils.to_list(path_list) else: - path_list=environ.get('PATH','').split(os.pathsep) + path_list = environ.get('PATH', '').split(os.pathsep) + if kw.get('value'): - ret=self.cmd_to_list(kw['value']) + # user-provided in command-line options and passed to find_program + ret = self.cmd_to_list(kw['value']) elif environ.get(var): - ret=self.cmd_to_list(environ[var]) + # user-provided in the os environment + ret = self.cmd_to_list(environ[var]) elif self.env[var]: - ret=self.cmd_to_list(self.env[var]) + # a default option in the wscript file + ret = self.cmd_to_list(self.env[var]) else: if not ret: - ret=self.find_binary(filename,exts.split(','),path_list) + ret = self.find_binary(filename, exts.split(','), path_list) if not ret and Utils.winreg: - ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename) + ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename) if not ret and Utils.winreg: - ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename) - ret=self.cmd_to_list(ret) + ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename) + ret = self.cmd_to_list(ret) + if ret: - if len(ret)==1: - retmsg=ret[0] + if len(ret) == 1: + retmsg = ret[0] else: - retmsg=ret + retmsg = ret else: - retmsg=False - self.msg('Checking for program %r'%msg,retmsg,**kw) + retmsg = False + + self.msg('Checking for program %r' % msg, retmsg, **kw) if not kw.get('quiet'): - self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret)) + self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret)) + if not ret: - self.fatal(kw.get('errmsg','')or'Could not find the program %r'%filename) - interpreter=kw.get('interpreter') + self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename) + + interpreter = kw.get('interpreter') if interpreter is None: - if not Utils.check_exe(ret[0],env=environ): - self.fatal('Program %r is not executable'%ret) - self.env[var]=ret + if not Utils.check_exe(ret[0], env=environ): + self.fatal('Program %r is not executable' % ret) + self.env[var] = ret else: - self.env[var]=self.env[interpreter]+ret + self.env[var] = self.env[interpreter] + ret + return ret + @conf -def find_binary(self,filenames,exts,paths): +def find_binary(self, filenames, exts, paths): for f in filenames: for ext in exts: - exe_name=f+ext + exe_name = f + ext if os.path.isabs(exe_name): if os.path.isfile(exe_name): return exe_name else: for path in paths: - x=os.path.expanduser(os.path.join(path,exe_name)) + x = os.path.expanduser(os.path.join(path, exe_name)) if os.path.isfile(x): return x return None + @conf -def run_build(self,*k,**kw): - lst=[str(v)for(p,v)in kw.items()if p!='env'] - h=Utils.h_list(lst) - dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h) +def run_build(self, *k, **kw): + """ + Create a temporary build context to execute a build. A reference to that build + context is kept on self.test_bld for debugging purposes, and you should not rely + on it too much (read the note on the cache below). + The parameters given in the arguments to this function are passed as arguments for + a single task generator created in the build. Only three parameters are obligatory: + + :param features: features to pass to a task generator created in the build + :type features: list of string + :param compile_filename: file to create for the compilation (default: *test.c*) + :type compile_filename: string + :param code: code to write in the filename to compile + :type code: string + + Though this function returns *0* by default, the build may set an attribute named *retval* on the + build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example. + + This function also features a cache which can be enabled by the following option:: + + def options(opt): + opt.add_option('--confcache', dest='confcache', default=0, + action='count', help='Use a configuration cache') + + And execute the configuration with the following command-line:: + + $ waf configure --confcache + + """ + buf = [] + for key in sorted(kw.keys()): + v = kw[key] + if hasattr(v, '__call__'): + buf.append(Utils.h_fun(v)) + else: + buf.append(str(v)) + h = Utils.h_list(buf) + dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h) + + cachemode = kw.get('confcache', getattr(Options.options, 'confcache', None)) + + if not cachemode and os.path.exists(dir): + shutil.rmtree(dir) + try: os.makedirs(dir) except OSError: pass + try: os.stat(dir) except OSError: - self.fatal('cannot use the configuration test folder %r'%dir) - cachemode=getattr(Options.options,'confcache',None) - if cachemode==1: + self.fatal('cannot use the configuration test folder %r' % dir) + + if cachemode == 1: try: - proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build')) + proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build')) except EnvironmentError: pass else: - ret=proj['cache_run_build'] - if isinstance(ret,str)and ret.startswith('Test does not build'): + ret = proj['cache_run_build'] + if isinstance(ret, str) and ret.startswith('Test does not build'): self.fatal(ret) return ret - bdir=os.path.join(dir,'testbuild') + + bdir = os.path.join(dir, 'testbuild') + if not os.path.exists(bdir): os.makedirs(bdir) - cls_name=kw.get('run_build_cls')or getattr(self,'run_build_cls','build') - self.test_bld=bld=Context.create_context(cls_name,top_dir=dir,out_dir=bdir) + + cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build') + self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir) bld.init_dirs() - bld.progress_bar=0 - bld.targets='*' - bld.logger=self.logger - bld.all_envs.update(self.all_envs) - bld.env=kw['env'] - bld.kw=kw - bld.conf=self + bld.progress_bar = 0 + bld.targets = '*' + + bld.logger = self.logger + bld.all_envs.update(self.all_envs) # not really necessary + bld.env = kw['env'] + + bld.kw = kw + bld.conf = self kw['build_fun'](bld) - ret=-1 + ret = -1 try: try: bld.compile() except Errors.WafError: - ret='Test does not build: %s'%traceback.format_exc() + ret = 'Test does not build: %s' % traceback.format_exc() self.fatal(ret) else: - ret=getattr(bld,'retval',0) + ret = getattr(bld, 'retval', 0) finally: - if cachemode==1: - proj=ConfigSet.ConfigSet() - proj['cache_run_build']=ret - proj.store(os.path.join(dir,'cache_run_build')) + if cachemode: + # cache the results each time + proj = ConfigSet.ConfigSet() + proj['cache_run_build'] = ret + proj.store(os.path.join(dir, 'cache_run_build')) else: shutil.rmtree(dir) return ret + @conf -def ret_msg(self,msg,args): - if isinstance(msg,str): +def ret_msg(self, msg, args): + if isinstance(msg, str): return msg return msg(args) + @conf -def test(self,*k,**kw): - if not'env'in kw: - kw['env']=self.env.derive() +def test(self, *k, **kw): + + if not 'env' in kw: + kw['env'] = self.env.derive() + + # validate_c for example if kw.get('validate'): kw['validate'](kw) - self.start_msg(kw['msg'],**kw) - ret=None + + self.start_msg(kw['msg'], **kw) + ret = None try: - ret=self.run_build(*k,**kw) + ret = self.run_build(*k, **kw) except self.errors.ConfigurationError: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - if Logs.verbose>1: + self.end_msg(kw['errmsg'], 'YELLOW', **kw) + if Logs.verbose > 1: raise else: self.fatal('The configuration failed') else: - kw['success']=ret + kw['success'] = ret + if kw.get('post_check'): - ret=kw['post_check'](kw) + ret = kw['post_check'](kw) + if ret: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - self.fatal('The configuration failed %r'%ret) + self.end_msg(kw['errmsg'], 'YELLOW', **kw) + self.fatal('The configuration failed %r' % ret) else: - self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) return ret + diff -Nru lilv-0.24.4~dfsg0/waflib/Context.py lilv-0.24.6/waflib/Context.py --- lilv-0.24.4~dfsg0/waflib/Context.py 2018-07-09 14:42:42.000000000 +0000 +++ lilv-0.24.6/waflib/Context.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,128 +1,288 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2010-2018 (ita) -import os,re,imp,sys -from waflib import Utils,Errors,Logs +""" +Classes and functions enabling the command system +""" + +import os, re, imp, sys +from waflib import Utils, Errors, Logs import waflib.Node -HEXVERSION=0x2000900 -WAFVERSION="2.0.9" -WAFREVISION="8a950e7bca9a3a9b1ae62aae039ef76e2adc4177" -ABI=20 -DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI) -APPNAME='APPNAME' -VERSION='VERSION' -TOP='top' -OUT='out' -WSCRIPT_FILE='wscript' -launch_dir='' -run_dir='' -top_dir='' -out_dir='' -waf_dir='' -default_encoding=Utils.console_encoding() -g_module=None -STDOUT=1 -STDERR=-1 -BOTH=0 -classes=[] -def create_context(cmd_name,*k,**kw): + +# the following 3 constants are updated on each new release (do not touch) +HEXVERSION=0x2001200 +"""Constant updated on new releases""" + +WAFVERSION="2.0.18" +"""Constant updated on new releases""" + +WAFREVISION="314689b8994259a84f0de0aaef74d7ce91f541ad" +"""Git revision when the waf version is updated""" + +ABI = 20 +"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)""" + +DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI) +"""Name of the pickle file for storing the build data""" + +APPNAME = 'APPNAME' +"""Default application name (used by ``waf dist``)""" + +VERSION = 'VERSION' +"""Default application version (used by ``waf dist``)""" + +TOP = 'top' +"""The variable name for the top-level directory in wscript files""" + +OUT = 'out' +"""The variable name for the output directory in wscript files""" + +WSCRIPT_FILE = 'wscript' +"""Name of the waf script files""" + +launch_dir = '' +"""Directory from which waf has been called""" +run_dir = '' +"""Location of the wscript file to use as the entry point""" +top_dir = '' +"""Location of the project directory (top), if the project was configured""" +out_dir = '' +"""Location of the build directory (out), if the project was configured""" +waf_dir = '' +"""Directory containing the waf modules""" + +default_encoding = Utils.console_encoding() +"""Encoding to use when reading outputs from other processes""" + +g_module = None +""" +Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`) +""" + +STDOUT = 1 +STDERR = -1 +BOTH = 0 + +classes = [] +""" +List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes +are added automatically by a metaclass. +""" + +def create_context(cmd_name, *k, **kw): + """ + Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command. + Used in particular by :py:func:`waflib.Scripting.run_command` + + :param cmd_name: command name + :type cmd_name: string + :param k: arguments to give to the context class initializer + :type k: list + :param k: keyword arguments to give to the context class initializer + :type k: dict + :return: Context object + :rtype: :py:class:`waflib.Context.Context` + """ for x in classes: - if x.cmd==cmd_name: - return x(*k,**kw) - ctx=Context(*k,**kw) - ctx.fun=cmd_name + if x.cmd == cmd_name: + return x(*k, **kw) + ctx = Context(*k, **kw) + ctx.fun = cmd_name return ctx + class store_context(type): - def __init__(cls,name,bases,dct): - super(store_context,cls).__init__(name,bases,dct) - name=cls.__name__ - if name in('ctx','Context'): + """ + Metaclass that registers command classes into the list :py:const:`waflib.Context.classes` + Context classes must provide an attribute 'cmd' representing the command name, and a function + attribute 'fun' representing the function name that the command uses. + """ + def __init__(cls, name, bases, dct): + super(store_context, cls).__init__(name, bases, dct) + name = cls.__name__ + + if name in ('ctx', 'Context'): return + try: cls.cmd except AttributeError: - raise Errors.WafError('Missing command for the context class %r (cmd)'%name) - if not getattr(cls,'fun',None): - cls.fun=cls.cmd - classes.insert(0,cls) -ctx=store_context('ctx',(object,),{}) + raise Errors.WafError('Missing command for the context class %r (cmd)' % name) + + if not getattr(cls, 'fun', None): + cls.fun = cls.cmd + + classes.insert(0, cls) + +ctx = store_context('ctx', (object,), {}) +"""Base class for all :py:class:`waflib.Context.Context` classes""" + class Context(ctx): - errors=Errors - tools={} - def __init__(self,**kw): + """ + Default context for waf commands, and base class for new command contexts. + + Context objects are passed to top-level functions:: + + def foo(ctx): + print(ctx.__class__.__name__) # waflib.Context.Context + + Subclasses must define the class attributes 'cmd' and 'fun': + + :param cmd: command to execute as in ``waf cmd`` + :type cmd: string + :param fun: function name to execute when the command is called + :type fun: string + + .. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext + + """ + + errors = Errors + """ + Shortcut to :py:mod:`waflib.Errors` provided for convenience + """ + + tools = {} + """ + A module cache for wscript files; see :py:meth:`Context.Context.load` + """ + + def __init__(self, **kw): try: - rd=kw['run_dir'] + rd = kw['run_dir'] except KeyError: - rd=run_dir - self.node_class=type('Nod3',(waflib.Node.Node,),{}) - self.node_class.__module__='waflib.Node' - self.node_class.ctx=self - self.root=self.node_class('',None) - self.cur_script=None - self.path=self.root.find_dir(rd) - self.stack_path=[] - self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self} - self.logger=None + rd = run_dir + + # binds the context to the nodes in use to avoid a context singleton + self.node_class = type('Nod3', (waflib.Node.Node,), {}) + self.node_class.__module__ = 'waflib.Node' + self.node_class.ctx = self + + self.root = self.node_class('', None) + self.cur_script = None + self.path = self.root.find_dir(rd) + + self.stack_path = [] + self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self} + self.logger = None + def finalize(self): + """ + Called to free resources such as logger files + """ try: - logger=self.logger + logger = self.logger except AttributeError: pass else: Logs.free_logger(logger) - delattr(self,'logger') - def load(self,tool_list,*k,**kw): - tools=Utils.to_list(tool_list) - path=Utils.to_list(kw.get('tooldir','')) - with_sys_path=kw.get('with_sys_path',True) + delattr(self, 'logger') + + def load(self, tool_list, *k, **kw): + """ + Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` + from it. A ``tooldir`` argument may be provided as a list of module paths. + + :param tool_list: list of Waf tool names to load + :type tool_list: list of string or space-separated string + """ + tools = Utils.to_list(tool_list) + path = Utils.to_list(kw.get('tooldir', '')) + with_sys_path = kw.get('with_sys_path', True) + for t in tools: - module=load_tool(t,path,with_sys_path=with_sys_path) - fun=getattr(module,kw.get('name',self.fun),None) + module = load_tool(t, path, with_sys_path=with_sys_path) + fun = getattr(module, kw.get('name', self.fun), None) if fun: fun(self) + def execute(self): + """ + Here, it calls the function name in the top-level wscript file. Most subclasses + redefine this method to provide additional functionality. + """ self.recurse([os.path.dirname(g_module.root_path)]) - def pre_recurse(self,node): + + def pre_recurse(self, node): + """ + Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. + The current script is bound as a Node object on ``self.cur_script``, and the current path + is bound to ``self.path`` + + :param node: script + :type node: :py:class:`waflib.Node.Node` + """ self.stack_path.append(self.cur_script) - self.cur_script=node - self.path=node.parent - def post_recurse(self,node): - self.cur_script=self.stack_path.pop() + + self.cur_script = node + self.path = node.parent + + def post_recurse(self, node): + """ + Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates. + + :param node: script + :type node: :py:class:`waflib.Node.Node` + """ + self.cur_script = self.stack_path.pop() if self.cur_script: - self.path=self.cur_script.parent - def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None): + self.path = self.cur_script.parent + + def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): + """ + Runs user-provided functions from the supplied list of directories. + The directories can be either absolute, or relative to the directory + of the wscript file + + The methods :py:meth:`waflib.Context.Context.pre_recurse` and + :py:meth:`waflib.Context.Context.post_recurse` are called immediately before + and after a script has been executed. + + :param dirs: List of directories to visit + :type dirs: list of string or space-separated string + :param name: Name of function to invoke from the wscript + :type name: string + :param mandatory: whether sub wscript files are required to exist + :type mandatory: bool + :param once: read the script file once for a particular context + :type once: bool + """ try: - cache=self.recurse_cache + cache = self.recurse_cache except AttributeError: - cache=self.recurse_cache={} + cache = self.recurse_cache = {} + for d in Utils.to_list(dirs): + if not os.path.isabs(d): - d=os.path.join(self.path.abspath(),d) - WSCRIPT=os.path.join(d,WSCRIPT_FILE) - WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun) - node=self.root.find_node(WSCRIPT_FUN) - if node and(not once or node not in cache): - cache[node]=True + # absolute paths only + d = os.path.join(self.path.abspath(), d) + + WSCRIPT = os.path.join(d, WSCRIPT_FILE) + WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun) + + node = self.root.find_node(WSCRIPT_FUN) + if node and (not once or node not in cache): + cache[node] = True self.pre_recurse(node) try: - function_code=node.read('rU',encoding) - exec(compile(function_code,node.abspath(),'exec'),self.exec_dict) + function_code = node.read('r', encoding) + exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) finally: self.post_recurse(node) elif not node: - node=self.root.find_node(WSCRIPT) - tup=(node,name or self.fun) - if node and(not once or tup not in cache): - cache[tup]=True + node = self.root.find_node(WSCRIPT) + tup = (node, name or self.fun) + if node and (not once or tup not in cache): + cache[tup] = True self.pre_recurse(node) try: - wscript_module=load_module(node.abspath(),encoding=encoding) - user_function=getattr(wscript_module,(name or self.fun),None) + wscript_module = load_module(node.abspath(), encoding=encoding) + user_function = getattr(wscript_module, (name or self.fun), None) if not user_function: if not mandatory: continue - raise Errors.WafError('No function %r defined in %s'%(name or self.fun,node.abspath())) + raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath())) user_function(self) finally: self.post_recurse(node) @@ -132,127 +292,221 @@ try: os.listdir(d) except OSError: - raise Errors.WafError('Cannot read the folder %r'%d) - raise Errors.WafError('No wscript file in directory %s'%d) - def log_command(self,cmd,kw): + raise Errors.WafError('Cannot read the folder %r' % d) + raise Errors.WafError('No wscript file in directory %s' % d) + + def log_command(self, cmd, kw): if Logs.verbose: - fmt=os.environ.get('WAF_CMD_FORMAT') - if fmt=='string': - if not isinstance(cmd,str): - cmd=Utils.shell_escape(cmd) - Logs.debug('runner: %r',cmd) - Logs.debug('runner_env: kw=%s',kw) - def exec_command(self,cmd,**kw): - subprocess=Utils.subprocess - kw['shell']=isinstance(cmd,str) - self.log_command(cmd,kw) + fmt = os.environ.get('WAF_CMD_FORMAT') + if fmt == 'string': + if not isinstance(cmd, str): + cmd = Utils.shell_escape(cmd) + Logs.debug('runner: %r', cmd) + Logs.debug('runner_env: kw=%s', kw) + + def exec_command(self, cmd, **kw): + """ + Runs an external process and returns the exit status:: + + def run(tsk): + ret = tsk.generator.bld.exec_command('touch foo.txt') + return ret + + If the context has the attribute 'log', then captures and logs the process stderr/stdout. + Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the + stdout/stderr values captured. + + :param cmd: command argument for subprocess.Popen + :type cmd: string or list + :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. + :type kw: dict + :returns: process exit status + :rtype: integer + :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process + :raises: :py:class:`waflib.Errors.WafError` in case of execution failure + """ + subprocess = Utils.subprocess + kw['shell'] = isinstance(cmd, str) + self.log_command(cmd, kw) + if self.logger: self.logger.info(cmd) - if'stdout'not in kw: - kw['stdout']=subprocess.PIPE - if'stderr'not in kw: - kw['stderr']=subprocess.PIPE - if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): - raise Errors.WafError('Program %s not found!'%cmd[0]) - cargs={} - if'timeout'in kw: - if sys.hexversion>=0x3030000: - cargs['timeout']=kw['timeout'] - if not'start_new_session'in kw: - kw['start_new_session']=True + + if 'stdout' not in kw: + kw['stdout'] = subprocess.PIPE + if 'stderr' not in kw: + kw['stderr'] = subprocess.PIPE + + if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): + raise Errors.WafError('Program %s not found!' % cmd[0]) + + cargs = {} + if 'timeout' in kw: + if sys.hexversion >= 0x3030000: + cargs['timeout'] = kw['timeout'] + if not 'start_new_session' in kw: + kw['start_new_session'] = True del kw['timeout'] - if'input'in kw: + if 'input' in kw: if kw['input']: - cargs['input']=kw['input'] - kw['stdin']=subprocess.PIPE + cargs['input'] = kw['input'] + kw['stdin'] = subprocess.PIPE del kw['input'] - if'cwd'in kw: - if not isinstance(kw['cwd'],str): - kw['cwd']=kw['cwd'].abspath() - encoding=kw.pop('decode_as',default_encoding) + + if 'cwd' in kw: + if not isinstance(kw['cwd'], str): + kw['cwd'] = kw['cwd'].abspath() + + encoding = kw.pop('decode_as', default_encoding) + try: - ret,out,err=Utils.run_process(cmd,kw,cargs) + ret, out, err = Utils.run_process(cmd, kw, cargs) except Exception as e: - raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + raise Errors.WafError('Execution failure: %s' % str(e), ex=e) + if out: - if not isinstance(out,str): - out=out.decode(encoding,errors='replace') + if not isinstance(out, str): + out = out.decode(encoding, errors='replace') if self.logger: - self.logger.debug('out: %s',out) + self.logger.debug('out: %s', out) else: - Logs.info(out,extra={'stream':sys.stdout,'c1':''}) + Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) if err: - if not isinstance(err,str): - err=err.decode(encoding,errors='replace') + if not isinstance(err, str): + err = err.decode(encoding, errors='replace') if self.logger: - self.logger.error('err: %s'%err) + self.logger.error('err: %s' % err) else: - Logs.info(err,extra={'stream':sys.stderr,'c1':''}) + Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) + return ret - def cmd_and_log(self,cmd,**kw): - subprocess=Utils.subprocess - kw['shell']=isinstance(cmd,str) - self.log_command(cmd,kw) - quiet=kw.pop('quiet',None) - to_ret=kw.pop('output',STDOUT) - if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): - raise Errors.WafError('Program %r not found!'%cmd[0]) - kw['stdout']=kw['stderr']=subprocess.PIPE + + def cmd_and_log(self, cmd, **kw): + """ + Executes a process and returns stdout/stderr if the execution is successful. + An exception is thrown when the exit status is non-0. In that case, both stderr and stdout + will be bound to the WafError object (configuration tests):: + + def configure(conf): + out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) + (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) + (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT) + try: + conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) + except Errors.WafError as e: + print(e.stdout, e.stderr) + + :param cmd: args for subprocess.Popen + :type cmd: list or string + :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. + :type kw: dict + :returns: a tuple containing the contents of stdout and stderr + :rtype: string + :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process + :raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object + """ + subprocess = Utils.subprocess + kw['shell'] = isinstance(cmd, str) + self.log_command(cmd, kw) + + quiet = kw.pop('quiet', None) + to_ret = kw.pop('output', STDOUT) + + if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): + raise Errors.WafError('Program %r not found!' % cmd[0]) + + kw['stdout'] = kw['stderr'] = subprocess.PIPE if quiet is None: self.to_log(cmd) - cargs={} - if'timeout'in kw: - if sys.hexversion>=0x3030000: - cargs['timeout']=kw['timeout'] - if not'start_new_session'in kw: - kw['start_new_session']=True + + cargs = {} + if 'timeout' in kw: + if sys.hexversion >= 0x3030000: + cargs['timeout'] = kw['timeout'] + if not 'start_new_session' in kw: + kw['start_new_session'] = True del kw['timeout'] - if'input'in kw: + if 'input' in kw: if kw['input']: - cargs['input']=kw['input'] - kw['stdin']=subprocess.PIPE + cargs['input'] = kw['input'] + kw['stdin'] = subprocess.PIPE del kw['input'] - if'cwd'in kw: - if not isinstance(kw['cwd'],str): - kw['cwd']=kw['cwd'].abspath() - encoding=kw.pop('decode_as',default_encoding) + + if 'cwd' in kw: + if not isinstance(kw['cwd'], str): + kw['cwd'] = kw['cwd'].abspath() + + encoding = kw.pop('decode_as', default_encoding) + try: - ret,out,err=Utils.run_process(cmd,kw,cargs) + ret, out, err = Utils.run_process(cmd, kw, cargs) except Exception as e: - raise Errors.WafError('Execution failure: %s'%str(e),ex=e) - if not isinstance(out,str): - out=out.decode(encoding,errors='replace') - if not isinstance(err,str): - err=err.decode(encoding,errors='replace') - if out and quiet!=STDOUT and quiet!=BOTH: - self.to_log('out: %s'%out) - if err and quiet!=STDERR and quiet!=BOTH: - self.to_log('err: %s'%err) + raise Errors.WafError('Execution failure: %s' % str(e), ex=e) + + if not isinstance(out, str): + out = out.decode(encoding, errors='replace') + if not isinstance(err, str): + err = err.decode(encoding, errors='replace') + + if out and quiet != STDOUT and quiet != BOTH: + self.to_log('out: %s' % out) + if err and quiet != STDERR and quiet != BOTH: + self.to_log('err: %s' % err) + if ret: - e=Errors.WafError('Command %r returned %r'%(cmd,ret)) - e.returncode=ret - e.stderr=err - e.stdout=out + e = Errors.WafError('Command %r returned %r' % (cmd, ret)) + e.returncode = ret + e.stderr = err + e.stdout = out raise e - if to_ret==BOTH: - return(out,err) - elif to_ret==STDERR: + + if to_ret == BOTH: + return (out, err) + elif to_ret == STDERR: return err return out - def fatal(self,msg,ex=None): + + def fatal(self, msg, ex=None): + """ + Prints an error message in red and stops command execution; this is + usually used in the configuration section:: + + def configure(conf): + conf.fatal('a requirement is missing') + + :param msg: message to display + :type msg: string + :param ex: optional exception object + :type ex: exception + :raises: :py:class:`waflib.Errors.ConfigurationError` + """ if self.logger: - self.logger.info('from %s: %s'%(self.path.abspath(),msg)) + self.logger.info('from %s: %s' % (self.path.abspath(), msg)) try: - logfile=self.logger.handlers[0].baseFilename + logfile = self.logger.handlers[0].baseFilename except AttributeError: pass else: if os.environ.get('WAF_PRINT_FAILURE_LOG'): - msg='Log from (%s):\n%s\n'%(logfile,Utils.readf(logfile)) + # see #1930 + msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile)) else: - msg='%s\n(complete log in %s)'%(msg,logfile) - raise self.errors.ConfigurationError(msg,ex=ex) - def to_log(self,msg): + msg = '%s\n(complete log in %s)' % (msg, logfile) + raise self.errors.ConfigurationError(msg, ex=ex) + + def to_log(self, msg): + """ + Logs information to the logger (if present), or to stderr. + Empty messages are not printed:: + + def build(bld): + bld.to_log('starting the build') + + Provide a logger on the context class or override this method if necessary. + + :param msg: message + :type msg: string + """ if not msg: return if self.logger: @@ -260,147 +514,224 @@ else: sys.stderr.write(str(msg)) sys.stderr.flush() - def msg(self,*k,**kw): + + + def msg(self, *k, **kw): + """ + Prints a configuration message of the form ``msg: result``. + The second part of the message will be in colors. The output + can be disabled easly by setting ``in_msg`` to a positive value:: + + def configure(conf): + self.in_msg = 1 + conf.msg('Checking for library foo', 'ok') + # no output + + :param msg: message to display to the user + :type msg: string + :param result: result to display + :type result: string or boolean + :param color: color to use, see :py:const:`waflib.Logs.colors_lst` + :type color: string + """ try: - msg=kw['msg'] + msg = kw['msg'] except KeyError: - msg=k[0] - self.start_msg(msg,**kw) + msg = k[0] + + self.start_msg(msg, **kw) + try: - result=kw['result'] + result = kw['result'] except KeyError: - result=k[1] - color=kw.get('color') - if not isinstance(color,str): - color=result and'GREEN'or'YELLOW' - self.end_msg(result,color,**kw) - def start_msg(self,*k,**kw): + result = k[1] + + color = kw.get('color') + if not isinstance(color, str): + color = result and 'GREEN' or 'YELLOW' + + self.end_msg(result, color, **kw) + + def start_msg(self, *k, **kw): + """ + Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg` + """ if kw.get('quiet'): return - msg=kw.get('msg')or k[0] + + msg = kw.get('msg') or k[0] try: if self.in_msg: - self.in_msg+=1 + self.in_msg += 1 return except AttributeError: - self.in_msg=0 - self.in_msg+=1 + self.in_msg = 0 + self.in_msg += 1 + try: - self.line_just=max(self.line_just,len(msg)) + self.line_just = max(self.line_just, len(msg)) except AttributeError: - self.line_just=max(40,len(msg)) - for x in(self.line_just*'-',msg): + self.line_just = max(40, len(msg)) + for x in (self.line_just * '-', msg): self.to_log(x) - Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='') - def end_msg(self,*k,**kw): + Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='') + + def end_msg(self, *k, **kw): + """Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`""" if kw.get('quiet'): return - self.in_msg-=1 + self.in_msg -= 1 if self.in_msg: return - result=kw.get('result')or k[0] - defcolor='GREEN' + + result = kw.get('result') or k[0] + + defcolor = 'GREEN' if result is True: - msg='ok' + msg = 'ok' elif not result: - msg='not found' - defcolor='YELLOW' + msg = 'not found' + defcolor = 'YELLOW' else: - msg=str(result) + msg = str(result) + self.to_log(msg) try: - color=kw['color'] + color = kw['color'] except KeyError: - if len(k)>1 and k[1]in Logs.colors_lst: - color=k[1] + if len(k) > 1 and k[1] in Logs.colors_lst: + # compatibility waf 1.7 + color = k[1] else: - color=defcolor - Logs.pprint(color,msg) - def load_special_tools(self,var,ban=[]): + color = defcolor + Logs.pprint(color, msg) + + def load_special_tools(self, var, ban=[]): + """ + Loads third-party extensions modules for certain programming languages + by trying to list certain files in the extras/ directory. This method + is typically called once for a programming language group, see for + example :py:mod:`waflib.Tools.compiler_c` + + :param var: glob expression, for example 'cxx\\_\\*.py' + :type var: string + :param ban: list of exact file names to exclude + :type ban: list of string + """ if os.path.isdir(waf_dir): - lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) + lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) for x in lst: if not x.name in ban: - load_tool(x.name.replace('.py','')) + load_tool(x.name.replace('.py', '')) else: from zipfile import PyZipFile - waflibs=PyZipFile(waf_dir) - lst=waflibs.namelist() + waflibs = PyZipFile(waf_dir) + lst = waflibs.namelist() for x in lst: - if not re.match('waflib/extras/%s'%var.replace('*','.*'),var): + if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var): continue - f=os.path.basename(x) - doban=False + f = os.path.basename(x) + doban = False for b in ban: - r=b.replace('*','.*') - if re.match(r,f): - doban=True + r = b.replace('*', '.*') + if re.match(r, f): + doban = True if not doban: - f=f.replace('.py','') + f = f.replace('.py', '') load_tool(f) -cache_modules={} -def load_module(path,encoding=None): + +cache_modules = {} +""" +Dictionary holding already loaded modules (wscript), indexed by their absolute path. +The modules are added automatically by :py:func:`waflib.Context.load_module` +""" + +def load_module(path, encoding=None): + """ + Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules` + + :param path: file path + :type path: string + :return: Loaded Python module + :rtype: module + """ try: return cache_modules[path] except KeyError: pass - module=imp.new_module(WSCRIPT_FILE) + + module = imp.new_module(WSCRIPT_FILE) try: - code=Utils.readf(path,m='rU',encoding=encoding) + code = Utils.readf(path, m='r', encoding=encoding) except EnvironmentError: - raise Errors.WafError('Could not read the file %r'%path) - module_dir=os.path.dirname(path) - sys.path.insert(0,module_dir) + raise Errors.WafError('Could not read the file %r' % path) + + module_dir = os.path.dirname(path) + sys.path.insert(0, module_dir) try: - exec(compile(code,path,'exec'),module.__dict__) + exec(compile(code, path, 'exec'), module.__dict__) finally: sys.path.remove(module_dir) - cache_modules[path]=module + + cache_modules[path] = module return module -def load_tool(tool,tooldir=None,ctx=None,with_sys_path=True): - if tool=='java': - tool='javaw' + +def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): + """ + Imports a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools` + + :type tool: string + :param tool: Name of the tool + :type tooldir: list + :param tooldir: List of directories to search for the tool module + :type with_sys_path: boolean + :param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs + """ + if tool == 'java': + tool = 'javaw' # jython else: - tool=tool.replace('++','xx') + tool = tool.replace('++', 'xx') + if not with_sys_path: - back_path=sys.path - sys.path=[] + back_path = sys.path + sys.path = [] try: if tooldir: - assert isinstance(tooldir,list) - sys.path=tooldir+sys.path + assert isinstance(tooldir, list) + sys.path = tooldir + sys.path try: __import__(tool) except ImportError as e: - e.waf_sys_path=list(sys.path) + e.waf_sys_path = list(sys.path) raise finally: for d in tooldir: sys.path.remove(d) - ret=sys.modules[tool] - Context.tools[tool]=ret + ret = sys.modules[tool] + Context.tools[tool] = ret return ret else: if not with_sys_path: - sys.path.insert(0,waf_dir) + sys.path.insert(0, waf_dir) try: - for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'): + for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): try: - __import__(x%tool) + __import__(x % tool) break except ImportError: - x=None - else: + x = None + else: # raise an exception __import__(tool) except ImportError as e: - e.waf_sys_path=list(sys.path) + e.waf_sys_path = list(sys.path) raise finally: if not with_sys_path: sys.path.remove(waf_dir) - ret=sys.modules[x%tool] - Context.tools[tool]=ret + ret = sys.modules[x % tool] + Context.tools[tool] = ret return ret finally: if not with_sys_path: - sys.path+=back_path + sys.path += back_path + diff -Nru lilv-0.24.4~dfsg0/waflib/COPYING lilv-0.24.6/waflib/COPYING --- lilv-0.24.4~dfsg0/waflib/COPYING 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/COPYING 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,25 @@ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff -Nru lilv-0.24.4~dfsg0/waflib/Errors.py lilv-0.24.6/waflib/Errors.py --- lilv-0.24.4~dfsg0/waflib/Errors.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Errors.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,39 +1,68 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2010-2018 (ita) + +""" +Exceptions used in the Waf code +""" + +import traceback, sys -import traceback,sys class WafError(Exception): - def __init__(self,msg='',ex=None): + """Base class for all Waf errors""" + def __init__(self, msg='', ex=None): + """ + :param msg: error message + :type msg: string + :param ex: exception causing this error (optional) + :type ex: exception + """ Exception.__init__(self) - self.msg=msg - assert not isinstance(msg,Exception) - self.stack=[] + self.msg = msg + assert not isinstance(msg, Exception) + + self.stack = [] if ex: if not msg: - self.msg=str(ex) - if isinstance(ex,WafError): - self.stack=ex.stack + self.msg = str(ex) + if isinstance(ex, WafError): + self.stack = ex.stack else: - self.stack=traceback.extract_tb(sys.exc_info()[2]) - self.stack+=traceback.extract_stack()[:-1] - self.verbose_msg=''.join(traceback.format_list(self.stack)) + self.stack = traceback.extract_tb(sys.exc_info()[2]) + self.stack += traceback.extract_stack()[:-1] + self.verbose_msg = ''.join(traceback.format_list(self.stack)) + def __str__(self): return str(self.msg) + class BuildError(WafError): - def __init__(self,error_tasks=[]): - self.tasks=error_tasks - WafError.__init__(self,self.format_error()) + """Error raised during the build and install phases""" + def __init__(self, error_tasks=[]): + """ + :param error_tasks: tasks that could not complete normally + :type error_tasks: list of task objects + """ + self.tasks = error_tasks + WafError.__init__(self, self.format_error()) + def format_error(self): - lst=['Build failed'] + """Formats the error messages from the tasks that failed""" + lst = ['Build failed'] for tsk in self.tasks: - txt=tsk.format_error() + txt = tsk.format_error() if txt: lst.append(txt) - return'\n'.join(lst) + return '\n'.join(lst) + class ConfigurationError(WafError): + """Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`""" pass + class TaskRescan(WafError): + """Task-specific exception type signalling required signature recalculations""" pass + class TaskNotReady(WafError): + """Task-specific exception type signalling that task signatures cannot be computed""" pass + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/autoship.py lilv-0.24.6/waflib/extras/autoship.py --- lilv-0.24.4~dfsg0/waflib/extras/autoship.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/autoship.py 2019-11-10 21:40:56.000000000 +0000 @@ -0,0 +1,650 @@ +#!/usr/bin/env python + +import sys +import os + + +def report(msg): + sys.stderr.write(msg + "\n") + + +def warn(msg): + sys.stderr.write("warning: %s\n" % msg) + + +def error_exit(msg): + sys.stderr.write("error: %s\n" % msg) + sys.exit(1) + + +def ensure(condition, message): + if not condition: + error_exit(message) + + +def get_project_info(top=None): + """Load wscript to get project information (name, version, and so on)""" + + import importlib + + project_dir = top or os.getcwd() + wscript_path = os.path.join(project_dir, "wscript") + sys.path.insert(0, os.path.dirname(wscript_path)) + + loader = importlib.machinery.SourceFileLoader("wscript", wscript_path) + spec = importlib.util.spec_from_loader("wscript", loader) + wscript = importlib.util.module_from_spec(spec) + spec.loader.exec_module(wscript) + + return { + "name": wscript.APPNAME, + "version": wscript.VERSION, + "uri": getattr(wscript, "uri", None), + "title": getattr(wscript, "title", wscript.APPNAME.title()), + "dist_pattern": wscript.dist_pattern, + "post_tags": wscript.post_tags, + } + + +def parse_version(revision): + """Convert semver string `revision` to a tuple of integers""" + return tuple(map(int, revision.split("."))) + + +def is_release_version(version): + """Return true if `version` is a stable version number""" + if isinstance(version, tuple): + return version[len(version) - 1] % 2 == 0 + + return is_release_version(parse_version(version)) + + +def get_blurb(in_file): + """Get the first paragraph of a Markdown file""" + with open(in_file, "r") as f: + f.readline() # Title + f.readline() # Title underline + f.readline() # Blank + + out = "" + line = f.readline() + while len(line) > 0 and line != "\n": + out += line.replace("\n", " ") + line = f.readline() + + return out.strip() + + +def get_items_markdown(items, indent=""): + """Return a list of NEWS entries as a Markdown list""" + return "".join([indent + "* %s\n" % item for item in items]) + + +def get_release_json(title, entry): + """Return a release description in Gitlab JSON format""" + import json + + version = entry["revision"] + desc = { + "name": "%s %s" % (title, version), + "tag_name": "v%s" % version, + "description": get_items_markdown(entry["items"]), + "released_at": entry["date"].isoformat(), + } + + return json.dumps(desc) + + +def read_text_news(in_file, preserve_timezones=False, dist_pattern=None): + """Read NEWS entries""" + + import datetime + import email.utils + import re + + entries = {} + with open(in_file, "r") as f: + while True: + # Read header line + head = f.readline() + matches = re.match(r"([^ ]*) \((.*)\) ([a-zA-z]*)", head) + if matches is None: + break + + e = { + "name": matches.group(1), + "revision": matches.group(2), + "status": matches.group(3), + "items": [], + } + + semver = parse_version(e["revision"]) + if is_release_version(semver) and dist_pattern is not None: + e["dist"] = dist_pattern % semver + + # Read blank line after header + if f.readline() != "\n": + raise SyntaxError("expected blank line after NEWS header") + + def add_item(item): + if len(item) > 0: + e["items"] += [item.replace("\n", " ").strip()] + + # Read entries for this revision + item = "" + line = f.readline() + while line: + if line.startswith(" * "): + add_item(item) + item = line[3:].lstrip() + elif line == "\n": + add_item(item) + break + else: + item += line.lstrip() + + line = f.readline() + + matches = re.match(r" -- (.*) <(.*)> (.*)", f.readline()) + date = email.utils.parsedate_to_datetime(matches.group(3)) + if not preserve_timezones: + date = date.astimezone(datetime.timezone.utc) + + e.update( + { + "date": date, + "blamee_name": matches.group(1), + "blamee_mbox": matches.group(2), + } + ) + + entries[semver] = e + + # Skip trailing blank line before next entry + space = f.readline() + if space != "\n" and space != "": + raise SyntaxError("expected blank line, not '%s'" % space) + + return entries + + +def write_text_news(entries, news): + """Write NEWS in standard Debian changelog format""" + import textwrap + + revisions = sorted(entries.keys(), reverse=True) + for r in revisions: + e = entries[r] + summary = "%s (%s) %s" % (e["name"], e["revision"], e["status"]) + news.write("\n" if r != revisions[0] else "") + news.write("%s;\n" % summary) + + for item in e["items"]: + wrapped = textwrap.wrap(item, width=74) + news.write("\n * " + "\n ".join(wrapped)) + + email = e["blamee_mbox"].replace("mailto:", "") + author = "%s <%s>" % (e["blamee_name"], email) + date = e["date"].strftime("%a, %d %b %Y %H:%M:%S %z") + news.write("\n\n -- %s %s\n" % (author, date)) + + +def read_ttl_news(name, in_files, top_entries=None, dist_pattern=None): + """Read news entries from Turtle""" + + import datetime + import rdflib + + doap = rdflib.Namespace("http://usefulinc.com/ns/doap#") + dcs = rdflib.Namespace("http://ontologi.es/doap-changeset#") + rdfs = rdflib.Namespace("http://www.w3.org/2000/01/rdf-schema#") + foaf = rdflib.Namespace("http://xmlns.com/foaf/0.1/") + rdf = rdflib.Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") + g = rdflib.ConjunctiveGraph() + + # Parse input files + for i in in_files: + g.parse(i, format="turtle") + + proj = g.value(None, rdf.type, doap.Project) + for f in g.triples([proj, rdfs.seeAlso, None]): + if f[2].endswith(".ttl"): + g.parse(f[2], format="turtle") + + entries = {} + for r in g.triples([proj, doap.release, None]): + release = r[2] + revision = g.value(release, doap.revision, None) + date = g.value(release, doap.created, None) + blamee = g.value(release, dcs.blame, None) + changeset = g.value(release, dcs.changeset, None) + dist = g.value(release, doap["file-release"], None) + + semver = parse_version(revision) + if not dist: + if dist_pattern is not None: + dist = dist_pattern % semver + else: + warn("No file release for %s %s" % (proj, revision)) + + if revision and date and blamee and changeset: + status = "stable" if is_release_version(revision) else "unstable" + iso_date = datetime.datetime.strptime(date, "%Y-%m-%dT%H:%M:%S%z") + e = { + "name": name, + "revision": str(revision), + "date": iso_date, + "status": status, + "items": [], + } + + if dist is not None: + e["dist"] = dist + + for i in g.triples([changeset, dcs.item, None]): + item = str(g.value(i[2], rdfs.label, None)) + e["items"] += [item] + if dist and top_entries is not None: + if dist not in top_entries: + top_entries[dist] = {"items": []} + top_entries[dist]["items"] += ["%s: %s" % (name, item)] + + e["blamee_name"] = str(g.value(blamee, foaf.name, None)) + e["blamee_mbox"] = str(g.value(blamee, foaf.mbox, None)) + + entries[semver] = e + else: + warn("Ignored incomplete %s release description" % name) + + return entries + + +def write_ttl_news(entries, out_file, template=None, subject_uri=None): + """Write NEWS in Turtle format""" + import rdflib + import rdflib.namespace + import rdflib.resource + import datetime + + # Set up namespaces and make a graph for the output + doap = rdflib.Namespace("http://usefulinc.com/ns/doap#") + dcs = rdflib.Namespace("http://ontologi.es/doap-changeset#") + rdfs = rdflib.Namespace("http://www.w3.org/2000/01/rdf-schema#") + rdf = rdflib.Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") + xsd = rdflib.Namespace("http://www.w3.org/2001/XMLSchema#") + g = rdflib.ConjunctiveGraph() + ns = rdflib.namespace.NamespaceManager(g) + ns.bind("doap", doap) + ns.bind("dcs", dcs) + + # Load given template file + if template is not None: + g.load(template, format="turtle") + + if subject_uri is not None: + # Use given subject uri + subject = rdflib.URIRef(subject_uri) + g.add((subject, rdf.type, doap.Project)) + else: + # Find project URI to use as subject, and optionally the maintainer + subject = g.value(None, rdf.type, doap.Project) + ensure(subject is not None, "Unable to find project URI for subject") + + maintainer = g.value(subject, doap.maintainer, None) + + for r, e in entries.items(): + semver = parse_version(e["revision"]) + ver_string = "%03d%03d%03d" % semver + + release = rdflib.BNode("r%s" % ver_string) + g.add((subject, doap.release, release)) + g.add((release, doap.revision, rdflib.Literal(e["revision"]))) + + if "dist" in e: + g.add((release, doap["file-release"], rdflib.URIRef(e["dist"]))) + + utc_date = e["date"].astimezone(datetime.timezone.utc) + date_str = utc_date.strftime("%Y-%m-%dT%H:%M:%S") + "Z" + time = rdflib.Literal(date_str, datatype=xsd.dateTime, normalize=False) + g.add((release, doap.created, time)) + + if maintainer is not None: + g.add((release, dcs.blame, maintainer)) + + changeset = rdflib.BNode("c%s" % ver_string) + g.add((release, dcs.changeset, changeset)) + for index, item in enumerate(e["items"]): + item_node = rdflib.BNode("i%s%08d" % (ver_string, index)) + g.add((changeset, dcs.item, item_node)) + g.add((item_node, rdfs.label, rdflib.Literal(item))) + + g.serialize(out_file, format="turtle") + + +def read_news(path=None, format="NEWS", unsorted=False, utc=True, top=None): + """Read news in either text changelog or Turtle format""" + + if format == "NEWS" and path is None: + path = os.path.join(top or "", "NEWS") + + top = top or os.path.dirname(path) + info = get_project_info(top) + dist_pattern = info.get("dist_pattern", None) + + if format == "NEWS": + entries = read_text_news(path, not utc, dist_pattern) + else: + ensure(path is not None, "Input path must be given for Turtle input") + entries = read_ttl_news(info["name"], [path]) + + if not unsorted: + for r, e in entries.items(): + e["items"] = list(sorted(e["items"])) + + return entries + + +def write_news_file(entries, news, format, template, subject): + """Write news entries to a file object""" + if format == "NEWS": + write_text_news(entries, news) + else: + write_ttl_news(entries, news, template, subject) + + +def write_news(entries, news, format="NEWS", template=None, subject=None): + """Write news entries to a file object or path""" + if isinstance(news, str): + with open(news, "w" if format == "NEWS" else "wb") as f: + write_news_file(entries, f, format, template, subject) + else: + write_news_file(entries, news, format, template, subject) + + +def news_command(): + ap = argparse.ArgumentParser(description="Generate NEWS file") + ap.add_argument("out_path", help="news output file") + ap.add_argument("--in-path", help="input file") + ap.add_argument("--unsorted", action="store_true", help="don't sort items") + ap.add_argument("--in-format", default="NEWS", choices=["NEWS", "turtle"]) + ap.add_argument("--timezones", action="store_true", help="keep timezones") + + args = ap.parse_args(sys.argv[2:]) + entries = read_news( + args.in_path, args.in_format, args.unsorted, not args.timezones + ) + + with open(args.out_path, "w") as news: + write_news(entries, news) + + +def ttl_news_command(): + ap = argparse.ArgumentParser(description="Generate Turtle changeset") + ap.add_argument("--in-path", help="news input file") + ap.add_argument("out_path", help="news output file") + ap.add_argument("--template") + ap.add_argument("--unsorted", action="store_true", help="don't sort items") + ap.add_argument("--in-format", default="NEWS", choices=["NEWS", "turtle"]) + + args = ap.parse_args(sys.argv[2:]) + info = get_project_info() + entries = read_news(args.in_path, args.in_format, info["dist_pattern"]) + + write_ttl_news( + entries, args.out_path, template=args.template, subject_uri=info["uri"] + ) + + +def write_posts(entries, out_dir, meta={}): + """Write news posts in Pelican Markdown format""" + import datetime + + report("Writing posts to %s" % out_dir) + + info = get_project_info() + description = get_blurb("README.md") + title = info["title"] + meta["Tags"] = ", ".join(info["post_tags"]) + meta["Author"] = meta.get("Author", os.getenv("USER")) + + try: + os.mkdir(out_dir) + except Exception: + pass + + for r, e in entries.items(): + name = e["name"] + revision = e["revision"] + if "dist" not in e: + warn("No file release for %s %s" % (name, revision)) + continue + + date = e["date"].astimezone(datetime.timezone.utc) + date_str = date.strftime("%Y-%m-%d") + datetime_str = date.strftime("%Y-%m-%d %H:%M") + slug_version = revision.replace(".", "-") + filename = "%s-%s-%s.md" % (date_str, name, slug_version) + + with open(os.path.join(out_dir, filename), "w") as post: + slug = "%s-%s" % (name, slug_version) + post.write("Title: %s %s\n" % (title, revision)) + post.write("Date: %s\n" % datetime_str) + post.write("Slug: %s\n" % slug) + for k in sorted(meta.keys()): + post.write("%s: %s\n" % (k, meta[k])) + + url = e["dist"] + link = "[%s %s](%s)" % (title, revision, url) + post.write("\n%s has been released." % link) + post.write(" " + description + "\n") + + if e["items"] != ["Initial release"]: + post.write("\nChanges:\n\n") + post.write(get_items_markdown(e["items"], indent=" ")) + + +def posts_command(): + ap = argparse.ArgumentParser(description="Generate Pelican posts") + ap.add_argument("out_dir", help="output directory") + ap.add_argument("--author", help="post author") + ap.add_argument("--in-path", help="input file") + ap.add_argument("--in-format", default="NEWS", choices=["NEWS", "turtle"]) + ap.add_argument("--title", help="Title for posts") + + args = ap.parse_args(sys.argv[2:]) + info = get_project_info() + entries = read_news(args.in_path, args.in_format, info["dist_pattern"]) + meta = {"Author": args.author} if args.author else {} + + write_posts(entries, args.out_dir, meta) + + +def json_command(): + ap = argparse.ArgumentParser(description="Get release description in JSON") + ap.add_argument("version", help="Version number") + ap.add_argument("--in-path", default="NEWS", help="input file") + ap.add_argument("--in-format", default="NEWS", choices=["NEWS", "turtle"]) + + args = ap.parse_args(sys.argv[2:]) + info = get_project_info() + semver = parse_version(args.version) + entries = read_news(args.in_path, args.in_format, info["dist_pattern"]) + + print(get_release_json(info["title"], entries[semver])) + + +def post_lab_release(version, lab, group, token, dry_run=False): + import json + import shlex + import subprocess + + def run_cmd(cmd): + if dry_run: + print(" ".join([shlex.quote(i) for i in cmd])) + else: + subprocess.check_call(cmd) + + info = get_project_info() + name = info["name"] + title = info["title"] + semver = parse_version(version) + entries = read_news() + url = "https://%s/api/v4/projects/%s%%2F%s" % (lab, group, name) + dry_run = dry_run + + # Check that this is a release version + ensure(is_release_version(semver), "%s is an unstable version" % version) + + # Post Gitlab release + post_cmd = [ + "curl", + "-XPOST", + "-HContent-Type: application/json", + "-HPRIVATE-TOKEN: " + token, + "-d" + get_release_json(title, entries[semver]), + "%s/releases" % url, + ] + run_cmd(post_cmd) + + report("Posted Gitlab release %s %s" % (name, version)) + + +def post_lab_release_command(): + ap = argparse.ArgumentParser(description="Post Gitlab release") + ap.add_argument("version", help="Version number") + ap.add_argument("group", help="Gitlab user or group for project") + ap.add_argument("token", help="Gitlab access token") + ap.add_argument("--lab", default="gitlab.com", help="Gitlab instance") + ap.add_argument("--dry-run", action="store_true", help="do nothing") + args = ap.parse_args(sys.argv[2:]) + + post_lab_release(args.version, args.lab, args.group, args.token, args.dry_run) + + +def release(args, posts_dir=None, remote_dist_dir=None, dist_name=None): + import json + import os + import shlex + import subprocess + + def run_cmd(cmd): + if args.dry_run: + print(" ".join([shlex.quote(i) for i in cmd])) + else: + subprocess.check_call(cmd) + + info = get_project_info() + name = info["name"] + title = info["title"] + version = info["version"] + semver = parse_version(version) + dry_run = args.dry_run + + # Check that this is a release version first of all + ensure(is_release_version(semver), "%s is an unstable version" % version) + report("Releasing %s %s" % (name, version)) + + # Check that NEWS is up to date + entries = read_news() + ensure(semver in entries, "%s has no NEWS entries" % version) + + # Check that working copy is up to date + fetch_cmd = ["git", "fetch", "--dry-run"] + fetch_status = subprocess.check_output(fetch_cmd).decode("utf-8") + ensure(len(fetch_status) == 0, "Local copy is out of date") + + # Remove distribution if one was already built + dist = "%s-%s.tar.bz2" % (dist_name or name.lower(), version) + sig = dist + ".sig" + try: + os.remove(dist) + os.remove(sig) + except Exception: + pass + + # Check that working copy is clean + branch_cmd = ["git", "rev-parse", "--abbrev-ref", "HEAD"] + branch = subprocess.check_output(branch_cmd).decode("ascii").strip() + status_cmd = ["git", "status", "--porcelain", "-b"] + status = subprocess.check_output(status_cmd).decode("utf-8") + sys.stdout.write(status) + expected_status = "## %s...origin/%s\n" % (branch, branch) + ensure(status == expected_status, "Working copy is dirty") + + # Fetch project description and ensure it matches + url = "https://%s/api/v4/projects/%s%%2F%s" % (args.lab, args.group, name) + desc_cmd = ["curl", "-HPRIVATE-TOKEN: " + args.token, url] + desc = json.loads(subprocess.check_output(desc_cmd)) + proj_name = desc["name"] + ensure(proj_name == name, "Project name '%s' != '%s'" % (proj_name, name)) + + # Build distribution + run_cmd(["./waf", "configure", "--docs"]) + run_cmd(["./waf", "build"]) + run_cmd(["./waf", "distcheck"]) + ensure(dry_run or os.path.exists(dist), "%s was not created" % dist) + + # Sign distribution + run_cmd(["gpg", "-b", dist]) + ensure(dry_run or os.path.exists(sig), "%s.sig was not created" % dist) + run_cmd(["gpg", "--verify", sig]) + + # Tag release + tag = "v" + version + run_cmd(["git", "tag", "-s", tag, "-m", "%s %s" % (title, version)]) + run_cmd(["git", "push", "--tags"]) + + # Generate posts + if posts_dir is not None: + write_posts(entries, posts_dir) + + # Upload distribution and signature + if remote_dist_dir is not None: + run_cmd(["scp", dist, os.path.join(remote_dist_dir, dist)]) + run_cmd(["scp", sig, os.path.join(remote_dist_dir, sig)]) + + # Post Gitlab release + post_lab_release(version, args.lab, args.group, args.token, dry_run) + + report("Released %s %s" % (name, version)) + report("Remember to upload posts and push to other remotes!") + + +def release_command(): + ap = argparse.ArgumentParser(description="Release project") + ap.add_argument("group", help="Gitlab user or group for project") + ap.add_argument("token", help="Gitlab access token") + ap.add_argument("--lab", default="gitlab.com", help="Gitlab instance") + ap.add_argument("--dry-run", action="store_true", help="do nothing") + ap.add_argument("--posts", help="Pelican posts directory") + ap.add_argument("--scp", help="SSH path to distribution directory") + args = ap.parse_args(sys.argv[2:]) + + release(args, posts_dir=args.posts, remote_dist_dir=args.scp) + + +if __name__ == "__main__": + import argparse + + # Get list of command names from handler functions for help text + global_names = list(globals().keys()) + handlers = [k[0:-8] for k in global_names if k.endswith("_command")] + + # Run simple top level argument parser to get command name + ap = argparse.ArgumentParser( + description="Automatic release building", + epilog="commands: " + " ".join(handlers), + ) + ap.add_argument("command", help="Subcommand to run") + args = ap.parse_args(sys.argv[1:2]) + + # Check that a handler is defined for the given command + function_name = args.command + "_command" + if function_name not in globals(): + sys.stderr.write("error: Unknown command '%s'\n" % args.command) + ap.print_help() + sys.exit(1) + + # Dispatch to command handler + globals()[function_name]() + sys.exit(0) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/autowaf.py lilv-0.24.6/waflib/extras/autowaf.py --- lilv-0.24.4~dfsg0/waflib/extras/autowaf.py 2018-07-22 18:07:39.000000000 +0000 +++ lilv-0.24.6/waflib/extras/autowaf.py 2019-11-02 16:26:18.000000000 +0000 @@ -1,806 +1,1168 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - import glob import os import subprocess import sys -from waflib import Build,Logs,Options,Utils -from waflib.TaskGen import feature,before,after -global g_is_child -g_is_child=False -global g_step -g_step=0 -global line_just -line_just=40 -class TestContext(Build.BuildContext): - cmd='test' - fun='test' -@feature('c','cxx') +import time + +from waflib import Configure, ConfigSet, Build, Context, Logs, Options, Utils +from waflib.TaskGen import feature, before, after + +NONEMPTY = -10 + +if sys.platform == 'win32': + lib_path_name = 'PATH' +elif sys.platform == 'darwin': + lib_path_name = 'DYLD_LIBRARY_PATH' +else: + lib_path_name = 'LD_LIBRARY_PATH' + +# Compute dependencies globally +# import preproc +# preproc.go_absolute = True + +@feature('c', 'cxx') @after('apply_incpaths') def include_config_h(self): - self.env.append_value('INCPATHS',self.bld.bldnode.abspath()) -def set_options(opt,debug_by_default=False,test=False): - global g_step - if g_step>0: - return - opts=opt.get_option_group('Configuration options') - opts.add_option('--bindir',type='string',help="executable programs [default: PREFIX/bin]") - opts.add_option('--configdir',type='string',help="configuration data [default: PREFIX/etc]") - opts.add_option('--datadir',type='string',help="shared data [default: PREFIX/share]") - opts.add_option('--includedir',type='string',help="header files [default: PREFIX/include]") - opts.add_option('--libdir',type='string',help="libraries [default: PREFIX/lib]") - opts.add_option('--mandir',type='string',help="manual pages [default: DATADIR/man]") - opts.add_option('--docdir',type='string',help="HTML documentation [default: DATADIR/doc]") - if debug_by_default: - opts.add_option('--optimize',action='store_false',default=True,dest='debug',help="build optimized binaries") - else: - opts.add_option('--debug',action='store_true',default=False,dest='debug',help="build debuggable binaries") - opts.add_option('--pardebug',action='store_true',default=False,dest='pardebug',help="build parallel-installable debuggable libraries with D suffix") - opts.add_option('--strict',action='store_true',default=False,dest='strict',help="use strict compiler flags and show all warnings") - opts.add_option('--ultra-strict',action='store_true',default=False,dest='ultra_strict',help="use extremely strict compiler flags (likely noisy)") - opts.add_option('--docs',action='store_true',default=False,dest='docs',help="build documentation (requires doxygen)") - if test: - test_opts=opt.add_option_group('Test options','') - opts.add_option('--test',action='store_true',dest='build_tests',help='build unit tests') - opts.add_option('--no-coverage',action='store_true',dest='no_coverage',help='do not instrument code for test coverage') - test_opts.add_option('--test-wrapper',type='string',dest='test_wrapper',help='command prefix for tests (e.g. valgrind)') - test_opts.add_option('--verbose-tests',action='store_true',default=False,dest='verbose_tests',help='always show test output') - g_step=1 -def get_check_func(conf,lang): - if lang=='c': - return conf.check_cc - elif lang=='cxx': - return conf.check_cxx - else: - Logs.error("Unknown header language `%s'"%lang) -def check_header(conf,lang,name,define='',mandatory=True): - check_func=get_check_func(conf,lang) - if define!='': - check_func(header_name=name,define_name=define,mandatory=mandatory) - else: - check_func(header_name=name,mandatory=mandatory) -def check_function(conf,lang,name,**args): - header_names=Utils.to_list(args['header_name']) - includes=''.join(['#include <%s>\n'%x for x in header_names]) - fragment=''' + self.env.append_value('INCPATHS', self.bld.bldnode.abspath()) + +class OptionsContext(Options.OptionsContext): + def __init__(self, **kwargs): + super(OptionsContext, self).__init__(**kwargs) + set_options(self) + + def configuration_options(self): + return self.get_option_group('Configuration options') + + def add_flags(self, group, flags): + """Tersely add flags (a dictionary of longname:desc) to a group""" + for name, desc in flags.items(): + group.add_option('--' + name, action='store_true', + dest=name.replace('-', '_'), help=desc) + +def set_options(opt, debug_by_default=False): + "Add standard autowaf options" + opts = opt.get_option_group('Configuration options') + + # Standard directory options + opts.add_option('--bindir', type='string', + help="executable programs [default: PREFIX/bin]") + opts.add_option('--configdir', type='string', + help="configuration data [default: PREFIX/etc]") + opts.add_option('--datadir', type='string', + help="shared data [default: PREFIX/share]") + opts.add_option('--includedir', type='string', + help="header files [default: PREFIX/include]") + opts.add_option('--libdir', type='string', + help="libraries [default: PREFIX/lib]") + opts.add_option('--mandir', type='string', + help="manual pages [default: DATADIR/man]") + opts.add_option('--docdir', type='string', + help="HTML documentation [default: DATADIR/doc]") + + # Build options + if debug_by_default: + opts.add_option('--optimize', action='store_false', default=True, + dest='debug', help="build optimized binaries") + else: + opts.add_option('-d', '--debug', action='store_true', default=False, + dest='debug', help="build debuggable binaries") + opts.add_option('--pardebug', action='store_true', default=False, + dest='pardebug', + help="build debug libraries with D suffix") + + opts.add_option('-s', '--strict', action='store_true', default=False, + dest='strict', + help="use strict compiler flags and show all warnings") + opts.add_option('-S', '--ultra-strict', action='store_true', default=False, + dest='ultra_strict', + help="use extremely strict compiler flags (likely noisy)") + opts.add_option('--docs', action='store_true', default=False, dest='docs', + help="build documentation (requires doxygen)") + opts.add_option('-w', '--werror', action='store_true', dest='werror', + help="Treat warnings as errors") + + # Test options + if hasattr(Context.g_module, 'test'): + test_opts = opt.add_option_group('Test options', '') + opts.add_option('-T', '--test', action='store_true', dest='build_tests', + help='build unit tests') + opts.add_option('--no-coverage', action='store_true', + dest='no_coverage', + help='do not instrument code for test coverage') + test_opts.add_option('--wrapper', type='string', + dest='test_wrapper', + help='command prefix for tests (e.g. valgrind)') + test_opts.add_option('--test-filter', type='string', + dest='test_filter', + help='regular expression for tests to run') + + # Run options + run_opts = opt.add_option_group('Run options') + run_opts.add_option('--cmd', type='string', dest='cmd', + help='command to run from build directory') + +class ConfigureContext(Configure.ConfigurationContext): + """configures the project""" + + def __init__(self, **kwargs): + self.line_just = 45 + if hasattr(Context.g_module, 'line_just'): + self.line_just = Context.g_module.line_just + + super(ConfigureContext, self).__init__(**kwargs) + self.run_env = ConfigSet.ConfigSet() + self.system_include_paths = set() + + def pre_recurse(self, node): + if len(self.stack_path) == 1: + Logs.pprint('BOLD', 'Configuring %s' % node.parent.srcpath()) + super(ConfigureContext, self).pre_recurse(node) + + def store(self): + self.env.AUTOWAF_RUN_ENV = self.run_env.get_merged_dict() + for path in sorted(self.system_include_paths): + if 'COMPILER_CC' in self.env: + self.env.append_value('CFLAGS', ['-isystem', path]) + if 'COMPILER_CXX' in self.env: + self.env.append_value('CXXFLAGS', ['-isystem', path]) + + super(ConfigureContext, self).store() + + def check_pkg(self, *args, **kwargs): + return check_pkg(self, *args, **kwargs) + + def check_function(self, *args, **kwargs): + return check_function(self, *args, **kwargs) + + def build_path(self, path='.'): + """Return `path` within the build directory""" + return str(self.path.get_bld().make_node(path)) + +def get_check_func(conf, lang): + if lang == 'c': + return conf.check_cc + elif lang == 'cxx': + return conf.check_cxx + else: + Logs.error("Unknown header language `%s'" % lang) + +def check_header(conf, lang, name, define='', mandatory=True): + "Check for a header" + check_func = get_check_func(conf, lang) + if define != '': + check_func(header_name=name, + define_name=define, + mandatory=mandatory) + else: + check_func(header_name=name, mandatory=mandatory) + +def check_function(conf, lang, name, **args): + "Check for a function" + header_names = Utils.to_list(args['header_name']) + includes = ''.join(['#include <%s>\n' % x for x in header_names]) + fragment = ''' %s int main() { return !(void(*)())(%s); } -'''%(includes,name) - check_func=get_check_func(conf,lang) - args['msg']='Checking for %s'%name - check_func(fragment=fragment,**args) +''' % (includes, name) + + check_func = get_check_func(conf, lang) + args['msg'] = 'Checking for %s' % name + check_func(fragment=fragment, **args) + def nameify(name): - return name.replace('/','_').replace('++','PP').replace('-','_').replace('.','_') -def define(conf,var_name,value): - conf.define(var_name,value) - conf.env[var_name]=value -def check_pkg(conf,name,**args): - if args['uselib_store'].lower()in conf.env['AUTOWAF_LOCAL_LIBS']: - return - class CheckType: - OPTIONAL=1 - MANDATORY=2 - var_name='CHECKED_'+nameify(args['uselib_store']) - check=var_name not in conf.env - mandatory='mandatory'not in args or args['mandatory'] - if not check and'atleast_version'in args: - checked_version=conf.env['VERSION_'+name] - if checked_version and checked_version= [0-9\.]*', spec) + args = [] + if match: + name = match.group(1) + args = [spec] + elif spec.find(' ') == -1: + name = spec + else: + Logs.error("Invalid package spec: %s" % spec) + + found = None + pkg_var_name = 'PKG_' + name.replace('-', '_') + pkg_name = name + args += kwargs.get('args', []) + + if conf.env.PARDEBUG: + kwargs['mandatory'] = False # Smash mandatory arg + found = conf.check_cfg(package=pkg_name + 'D', + args=args + ['--cflags', '--libs']) + if found: + pkg_name += 'D' + + args['mandatory'] = mandatory # Unsmash mandatory arg + + if not found: + found = conf.check_cfg(package=spec, + args=args + ['--cflags', '--libs'], + **kwargs) + + if not conf.env.MSVC_COMPILER and 'system' in kwargs and kwargs['system']: + conf.system_include_paths.update( + conf.env['INCLUDES_' + nameify(kwargs['uselib_store'])]) + def normpath(path): - if sys.platform=='win32': - return os.path.normpath(path).replace('\\','/') - else: - return os.path.normpath(path) + if sys.platform == 'win32': + return os.path.normpath(path).replace('\\', '/') + else: + return os.path.normpath(path) + def configure(conf): - global g_step - if g_step>1: - return - def append_cxx_flags(flags): - conf.env.append_value('CFLAGS',flags) - conf.env.append_value('CXXFLAGS',flags) - if Options.options.docs: - conf.load('doxygen') - try: - conf.load('clang_compilation_database') - except: - pass - conf.env['DOCS']=Options.options.docs and conf.env.DOXYGEN - conf.env['DEBUG']=Options.options.debug or Options.options.pardebug - conf.env['PARDEBUG']=Options.options.pardebug - conf.env['PREFIX']=normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX']))) - def config_dir(var,opt,default): - if opt: - conf.env[var]=normpath(opt) - else: - conf.env[var]=normpath(default) - opts=Options.options - prefix=conf.env['PREFIX'] - config_dir('BINDIR',opts.bindir,os.path.join(prefix,'bin')) - config_dir('SYSCONFDIR',opts.configdir,os.path.join(prefix,'etc')) - config_dir('DATADIR',opts.datadir,os.path.join(prefix,'share')) - config_dir('INCLUDEDIR',opts.includedir,os.path.join(prefix,'include')) - config_dir('LIBDIR',opts.libdir,os.path.join(prefix,'lib')) - config_dir('MANDIR',opts.mandir,os.path.join(conf.env['DATADIR'],'man')) - config_dir('DOCDIR',opts.docdir,os.path.join(conf.env['DATADIR'],'doc')) - if Options.options.debug: - if conf.env['MSVC_COMPILER']: - conf.env['CFLAGS']=['/Od','/Z7','/MTd','/FS'] - conf.env['CXXFLAGS']=['/Od','/Z7','/MTd','/FS'] - conf.env['LINKFLAGS']=['/DEBUG','/MANIFEST'] - else: - conf.env['CFLAGS']=['-O0','-g'] - conf.env['CXXFLAGS']=['-O0','-g'] - else: - if conf.env['MSVC_COMPILER']: - conf.env['CFLAGS']=['/MD','/FS','/DNDEBUG'] - conf.env['CXXFLAGS']=['/MD','/FS','/DNDEBUG'] - else: - append_cxx_flags(['-DNDEBUG']) - if conf.env.MSVC_COMPILER: - Options.options.no_coverage=True - if Options.options.strict: - conf.env.append_value('CFLAGS',['/Wall']) - conf.env.append_value('CXXFLAGS',['/Wall']) - else: - if Options.options.ultra_strict: - Options.options.strict=True - conf.env.append_value('CFLAGS',['-Wredundant-decls','-Wstrict-prototypes','-Wmissing-prototypes','-Wcast-qual']) - conf.env.append_value('CXXFLAGS',['-Wcast-qual']) - if Options.options.strict: - conf.env.append_value('CFLAGS',['-pedantic','-Wshadow']) - if conf.env.DEST_OS!="darwin": - conf.env.append_value('LINKFLAGS',['-Wl,--no-undefined']) - conf.env.append_value('CXXFLAGS',['-Wnon-virtual-dtor','-Woverloaded-virtual']) - append_cxx_flags(['-Wall','-Wcast-align','-Wextra','-Wmissing-declarations','-Wno-unused-parameter','-Wstrict-overflow','-Wundef','-Wwrite-strings','-fstrict-overflow']) - extra_flags=['-Wlogical-op','-Wsuggest-attribute=noreturn','-Wunsafe-loop-optimizations'] - if conf.check_cc(cflags=['-Werror']+extra_flags,mandatory=False,msg="Checking for extra C warning flags"): - conf.env.append_value('CFLAGS',extra_flags) - if'COMPILER_CXX'in conf.env: - if conf.check_cxx(cxxflags=['-Werror']+extra_flags,mandatory=False,msg="Checking for extra C++ warning flags"): - conf.env.append_value('CXXFLAGS',extra_flags) - if not conf.env['MSVC_COMPILER']: - append_cxx_flags(['-fshow-column']) - conf.env.NO_COVERAGE=True - conf.env.BUILD_TESTS=False - try: - conf.env.BUILD_TESTS=Options.options.build_tests - conf.env.NO_COVERAGE=Options.options.no_coverage - if not Options.options.no_coverage: - if conf.is_defined('CLANG'): - for cov in[conf.env.CC[0].replace('clang','llvm-cov'),'llvm-cov']: - if conf.find_program(cov,var='LLVM_COV',mandatory=False): - break - else: - conf.check_cc(lib='gcov',define_name='HAVE_GCOV',mandatory=False) - except: - pass - conf.env.prepend_value('CFLAGS','-I'+os.path.abspath('.')) - conf.env.prepend_value('CXXFLAGS','-I'+os.path.abspath('.')) - g_step=2 -def display_summary(conf): - global g_is_child - Logs.pprint('','') - if not g_is_child: - display_msg(conf,"Install prefix",conf.env['PREFIX']) - display_msg(conf,"Debuggable build",bool(conf.env['DEBUG'])) - display_msg(conf,"Build documentation",bool(conf.env['DOCS'])) -def set_c_lang(conf,lang): - if conf.env.MSVC_COMPILER: - conf.env.append_unique('CFLAGS',['-TP']) - else: - flag='-std=%s'%lang - conf.check(cflags=['-Werror',flag],msg="Checking for flag '%s'"%flag) - conf.env.append_unique('CFLAGS',[flag]) -def set_cxx_lang(conf,lang): - if conf.env.MSVC_COMPILER: - if lang!='c++14': - lang='c++latest' - conf.env.append_unique('CXXFLAGS',['/std:%s'%lang]) - else: - flag='-std=%s'%lang - conf.check(cxxflags=['-Werror',flag],msg="Checking for flag '%s'"%flag) - conf.env.append_unique('CXXFLAGS',[flag]) + def append_cxx_flags(flags): + conf.env.append_value('CFLAGS', flags) + conf.env.append_value('CXXFLAGS', flags) + + if Options.options.docs: + conf.load('doxygen') + + try: + conf.load('clang_compilation_database') + except Exception: + pass + + prefix = normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX']))) + + conf.env['DOCS'] = Options.options.docs and conf.env.DOXYGEN + conf.env['DEBUG'] = Options.options.debug or Options.options.pardebug + conf.env['PARDEBUG'] = Options.options.pardebug + conf.env['PREFIX'] = prefix + + def config_dir(var, opt, default): + if opt: + conf.env[var] = normpath(opt) + else: + conf.env[var] = normpath(default) + + opts = Options.options + + config_dir('BINDIR', opts.bindir, os.path.join(prefix, 'bin')) + config_dir('SYSCONFDIR', opts.configdir, os.path.join(prefix, 'etc')) + config_dir('DATADIR', opts.datadir, os.path.join(prefix, 'share')) + config_dir('INCLUDEDIR', opts.includedir, os.path.join(prefix, 'include')) + config_dir('LIBDIR', opts.libdir, os.path.join(prefix, 'lib')) + + datadir = conf.env['DATADIR'] + config_dir('MANDIR', opts.mandir, os.path.join(datadir, 'man')) + config_dir('DOCDIR', opts.docdir, os.path.join(datadir, 'doc')) + + if Options.options.debug: + if conf.env['MSVC_COMPILER']: + conf.env['CFLAGS'] = ['/Od', '/Z7', '/MTd', '/FS'] + conf.env['CXXFLAGS'] = ['/Od', '/Z7', '/MTd', '/FS'] + conf.env['LINKFLAGS'] = ['/DEBUG', '/MANIFEST'] + else: + conf.env['CFLAGS'] = ['-O0', '-g'] + conf.env['CXXFLAGS'] = ['-O0', '-g'] + else: + if conf.env['MSVC_COMPILER']: + append_cxx_flags(['/MD', '/FS', '/DNDEBUG']) + else: + append_cxx_flags(['-DNDEBUG']) + + if conf.env.MSVC_COMPILER: + Options.options.no_coverage = True + append_cxx_flags(['/nologo', + '/FS', + '/D_CRT_SECURE_NO_WARNINGS', + '/experimental:external', + '/external:W0', + '/external:anglebrackets']) + conf.env.append_value('LINKFLAGS', '/nologo') + if Options.options.strict or Options.options.ultra_strict: + ms_strict_flags = ['/Wall', + '/wd4061', + '/wd4200', + '/wd4514', + '/wd4571', + '/wd4625', + '/wd4626', + '/wd4706', + '/wd4710', + '/wd4820', + '/wd5026', + '/wd5027', + '/wd5045'] + conf.env.append_value('CFLAGS', ms_strict_flags) + conf.env.append_value('CXXFLAGS', ms_strict_flags) + conf.env.append_value('CXXFLAGS', ['/EHsc']) + else: + if Options.options.ultra_strict: + Options.options.strict = True + conf.env.append_value('CFLAGS', ['-Wredundant-decls', + '-Wstrict-prototypes', + '-Wmissing-prototypes', + '-Wcast-qual']) + conf.env.append_value('CXXFLAGS', ['-Wcast-qual']) + + if Options.options.strict: + conf.env.append_value('CFLAGS', ['-pedantic', '-Wshadow']) + if conf.env.DEST_OS != "darwin": + conf.env.append_value('LINKFLAGS', ['-Wl,--no-undefined']) + conf.env.append_value('CXXFLAGS', ['-Wnon-virtual-dtor', + '-Woverloaded-virtual']) + append_cxx_flags(['-Wall', + '-Wcast-align', + '-Wextra', + '-Wmissing-declarations', + '-Wno-unused-parameter', + '-Wno-parentheses', + '-Wstrict-overflow', + '-Wundef', + '-Wwrite-strings', + '-fstrict-overflow']) + + # Add less universal flags after checking they work + extra_flags = ['-Wlogical-op', + '-Wsuggest-attribute=noreturn', + '-Wunsafe-loop-optimizations'] + if conf.check_cc(cflags=['-Werror'] + extra_flags, mandatory=False, + msg="Checking for extra C warning flags"): + conf.env.append_value('CFLAGS', extra_flags) + if 'COMPILER_CXX' in conf.env: + if conf.check_cxx(cxxflags=['-Werror'] + extra_flags, + mandatory=False, + msg="Checking for extra C++ warning flags"): + conf.env.append_value('CXXFLAGS', extra_flags) + + if not conf.env['MSVC_COMPILER']: + append_cxx_flags(['-fshow-column']) + + if Options.options.werror: + if conf.env.MSVC_COMPILER: + append_cxx_flags('/WX') + else: + append_cxx_flags('-Werror') + + conf.env.NO_COVERAGE = True + conf.env.BUILD_TESTS = False + try: + conf.env.BUILD_TESTS = Options.options.build_tests + conf.env.NO_COVERAGE = Options.options.no_coverage + if not Options.options.no_coverage: + # Set up unit test code coverage + if conf.is_defined('CLANG'): + for cov in [conf.env.CC[0].replace('clang', 'llvm-cov'), + 'llvm-cov']: + if conf.find_program(cov, var='LLVM_COV', mandatory=False): + break + else: + conf.check_cc(lib='gcov', define_name='HAVE_GCOV', + mandatory=False) + except Exception: + pass # Test options do not exist + + # Define version in configuration + appname = getattr(Context.g_module, Context.APPNAME, 'noname') + version = getattr(Context.g_module, Context.VERSION, '0.0.0') + defname = appname.upper().replace('-', '_').replace('.', '_') + conf.define(defname + '_VERSION', version) + conf.env[defname + '_VERSION'] = version + + conf.env.prepend_value('CFLAGS', '-I' + os.path.abspath('.')) + conf.env.prepend_value('CXXFLAGS', '-I' + os.path.abspath('.')) + +def display_summary(conf, msgs=None): + if len(conf.stack_path) == 1: + display_msg(conf, "Install prefix", conf.env['PREFIX']) + if 'COMPILER_CC' in conf.env: + display_msg(conf, "C Flags", ' '.join(conf.env['CFLAGS'])) + if 'COMPILER_CXX' in conf.env: + display_msg(conf, "C++ Flags", ' '.join(conf.env['CXXFLAGS'])) + display_msg(conf, "Debuggable", bool(conf.env['DEBUG'])) + display_msg(conf, "Build documentation", bool(conf.env['DOCS'])) + + if msgs is not None: + display_msgs(conf, msgs) + +def set_c_lang(conf, lang): + "Set a specific C language standard, like 'c99' or 'c11'" + if conf.env.MSVC_COMPILER: + # MSVC has no hope or desire to compile C99, just compile as C++ + conf.env.append_unique('CFLAGS', ['/TP']) + else: + flag = '-std=%s' % lang + conf.check(cflags=['-Werror', flag], + msg="Checking for flag '%s'" % flag) + conf.env.append_unique('CFLAGS', [flag]) + +def set_cxx_lang(conf, lang): + "Set a specific C++ language standard, like 'c++11', 'c++14', or 'c++17'" + if conf.env.MSVC_COMPILER: + if lang != 'c++14': + lang = 'c++latest' + conf.env.append_unique('CXXFLAGS', ['/std:%s' % lang]) + else: + flag = '-std=%s' % lang + conf.check(cxxflags=['-Werror', flag], + msg="Checking for flag '%s'" % flag) + conf.env.append_unique('CXXFLAGS', [flag]) + def set_modern_c_flags(conf): - if'COMPILER_CC'in conf.env: - if conf.env.MSVC_COMPILER: - conf.env.append_unique('CFLAGS',['-TP']) - else: - for flag in['-std=c11','-std=c99']: - if conf.check(cflags=['-Werror',flag],mandatory=False,msg="Checking for flag '%s'"%flag): - conf.env.append_unique('CFLAGS',[flag]) - break -def set_modern_cxx_flags(conf,mandatory=False): - if'COMPILER_CXX'in conf.env: - if conf.env.MSVC_COMPILER: - conf.env.append_unique('CXXFLAGS',['/std:c++latest']) - else: - for flag in['-std=c++14','-std=c++1y','-std=c++11','-std=c++0x']: - if conf.check(cxxflags=['-Werror',flag],mandatory=False,msg="Checking for flag '%s'"%flag): - conf.env.append_unique('CXXFLAGS',[flag]) - break -def set_local_lib(conf,name,has_objects): - var_name='HAVE_'+nameify(name.upper()) - define(conf,var_name,1) - if has_objects: - if type(conf.env['AUTOWAF_LOCAL_LIBS'])!=dict: - conf.env['AUTOWAF_LOCAL_LIBS']={} - conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()]=True - else: - if type(conf.env['AUTOWAF_LOCAL_HEADERS'])!=dict: - conf.env['AUTOWAF_LOCAL_HEADERS']={} - conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()]=True -def append_property(obj,key,val): - if hasattr(obj,key): - setattr(obj,key,getattr(obj,key)+val) - else: - setattr(obj,key,val) -def use_lib(bld,obj,libs): - abssrcdir=os.path.abspath('.') - libs_list=libs.split() - for l in libs_list: - in_headers=l.lower()in bld.env['AUTOWAF_LOCAL_HEADERS'] - in_libs=l.lower()in bld.env['AUTOWAF_LOCAL_LIBS'] - if in_libs: - append_property(obj,'use',' lib%s '%l.lower()) - append_property(obj,'framework',bld.env['FRAMEWORK_'+l]) - if in_headers or in_libs: - if bld.env.MSVC_COMPILER: - inc_flag='/I'+os.path.join(abssrcdir,l.lower()) - else: - inc_flag='-iquote '+os.path.join(abssrcdir,l.lower()) - for f in['CFLAGS','CXXFLAGS']: - if inc_flag not in bld.env[f]: - bld.env.prepend_value(f,inc_flag) - else: - append_property(obj,'uselib',' '+l) -@feature('c','cxx') + "Use the most modern C language available" + if 'COMPILER_CC' in conf.env: + if conf.env.MSVC_COMPILER: + # MSVC has no hope or desire to compile C99, just compile as C++ + conf.env.append_unique('CFLAGS', ['/TP']) + else: + for flag in ['-std=c11', '-std=c99']: + if conf.check(cflags=['-Werror', flag], mandatory=False, + msg="Checking for flag '%s'" % flag): + conf.env.append_unique('CFLAGS', [flag]) + break + +def set_modern_cxx_flags(conf, mandatory=False): + "Use the most modern C++ language available" + if 'COMPILER_CXX' in conf.env: + if conf.env.MSVC_COMPILER: + conf.env.append_unique('CXXFLAGS', ['/std:c++latest']) + else: + for lang in ['c++14', 'c++1y', 'c++11', 'c++0x']: + flag = '-std=%s' % lang + if conf.check(cxxflags=['-Werror', flag], mandatory=False, + msg="Checking for flag '%s'" % flag): + conf.env.append_unique('CXXFLAGS', [flag]) + break + +def set_local_lib(conf, name, has_objects): + var_name = 'HAVE_' + nameify(name.upper()) + conf.define(var_name, 1) + conf.env[var_name] = 1 + if has_objects: + if type(conf.env['AUTOWAF_LOCAL_LIBS']) != dict: + conf.env['AUTOWAF_LOCAL_LIBS'] = {} + conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()] = True + else: + if type(conf.env['AUTOWAF_LOCAL_HEADERS']) != dict: + conf.env['AUTOWAF_LOCAL_HEADERS'] = {} + conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()] = True + +def append_property(obj, key, val): + if hasattr(obj, key): + setattr(obj, key, getattr(obj, key) + val) + else: + setattr(obj, key, val) + +@feature('c', 'cxx') @before('apply_link') def version_lib(self): - if self.env.DEST_OS=='win32': - self.vnum=None - if self.env['PARDEBUG']: - applicable=['cshlib','cxxshlib','cstlib','cxxstlib'] - if[x for x in applicable if x in self.features]: - self.target=self.target+'D' -def set_lib_env(conf,name,version): - 'Set up environment for local library as if found via pkg-config.' - NAME=name.upper() - major_ver=version.split('.')[0] - pkg_var_name='PKG_'+name.replace('-','_')+'_'+major_ver - lib_name='%s-%s'%(name,major_ver) - if conf.env.PARDEBUG: - lib_name+='D' - conf.env[pkg_var_name]=lib_name - conf.env['INCLUDES_'+NAME]=['${INCLUDEDIR}/%s-%s'%(name,major_ver)] - conf.env['LIBPATH_'+NAME]=[conf.env.LIBDIR] - conf.env['LIB_'+NAME]=[lib_name] -def set_line_just(conf,width): - global line_just - line_just=max(line_just,width) - conf.line_just=line_just -def display_header(title): - global g_is_child - if g_is_child: - Logs.pprint('BOLD',title) -def display_msg(conf,msg,status=None,color=None): - color='CYAN' - if type(status)==bool and status: - color='GREEN' - status='yes' - elif type(status)==bool and not status or status=="False": - color='YELLOW' - status='no' - Logs.pprint('NORMAL',' %s'%msg.ljust(conf.line_just-2),sep='') - Logs.pprint('NORMAL',":",sep='') - Logs.pprint(color,status) -def link_flags(env,lib): - return' '.join(map(lambda x:env['LIB_ST']%x,env['LIB_'+lib])) -def compile_flags(env,lib): - return' '.join(map(lambda x:env['CPPPATH_ST']%x,env['INCLUDES_'+lib])) -def set_recursive(): - global g_is_child - g_is_child=True -def is_child(): - global g_is_child - return g_is_child -def build_pc(bld,name,version,version_suffix,libs,subst_dict={}): - '''Build a pkg-config file for a library. + if self.env.DEST_OS == 'win32': + self.vnum = None # Prevent waf from automatically appending -0 + if self.env['PARDEBUG']: + applicable = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib'] + if [x for x in applicable if x in self.features]: + self.target = self.target + 'D' + +def set_lib_env(conf, + name, + version, + has_objects=True, + include_path=None, + lib_path=None): + "Set up environment for local library as if found via pkg-config." + NAME = name.upper() + major_ver = version.split('.')[0] + pkg_var_name = 'PKG_' + name.replace('-', '_') + '_' + major_ver + lib_name = '%s-%s' % (name, major_ver) + + if lib_path is None: + lib_path = str(conf.path.get_bld()) + + if include_path is None: + include_path = str(conf.path) + + if conf.env.PARDEBUG: + lib_name += 'D' + + conf.env[pkg_var_name] = lib_name + conf.env['INCLUDES_' + NAME] = [include_path] + conf.env['LIBPATH_' + NAME] = [lib_path] + if has_objects: + conf.env['LIB_' + NAME] = [lib_name] + + conf.run_env.append_unique(lib_path_name, [lib_path]) + conf.define(NAME + '_VERSION', version) + +def display_msg(conf, msg, status=None, color=None): + color = 'CYAN' + if type(status) == bool and status: + color = 'GREEN' + status = 'yes' + elif type(status) == bool and not status or status == "False": + color = 'YELLOW' + status = 'no' + Logs.pprint('BOLD', '%s' % msg.ljust(conf.line_just), sep='') + Logs.pprint('BOLD', ":", sep='') + Logs.pprint(color, status) + +def display_msgs(conf, msgs): + for k, v in msgs.items(): + display_msg(conf, k, v) + +def link_flags(env, lib): + return ' '.join(map(lambda x: env['LIB_ST'] % x, + env['LIB_' + lib])) + +def compile_flags(env, lib): + return ' '.join(map(lambda x: env['CPPPATH_ST'] % x, + env['INCLUDES_' + lib])) + +def build_pc(bld, name, version, version_suffix, libs, subst_dict={}): + """Build a pkg-config file for a library. + name -- uppercase variable name (e.g. 'SOMENAME') version -- version string (e.g. '1.2.3') version_suffix -- name version suffix (e.g. '2') libs -- string/list of dependencies (e.g. 'LIBFOO GLIB') - ''' - pkg_prefix=bld.env['PREFIX'] - if pkg_prefix[-1]=='/': - pkg_prefix=pkg_prefix[:-1] - target=name.lower() - if version_suffix!='': - target+='-'+version_suffix - if bld.env['PARDEBUG']: - target+='D' - target+='.pc' - libdir=bld.env['LIBDIR'] - if libdir.startswith(pkg_prefix): - libdir=libdir.replace(pkg_prefix,'${exec_prefix}') - includedir=bld.env['INCLUDEDIR'] - if includedir.startswith(pkg_prefix): - includedir=includedir.replace(pkg_prefix,'${prefix}') - obj=bld(features='subst',source='%s.pc.in'%name.lower(),target=target,install_path=os.path.join(bld.env['LIBDIR'],'pkgconfig'),exec_prefix='${prefix}',PREFIX=pkg_prefix,EXEC_PREFIX='${prefix}',LIBDIR=libdir,INCLUDEDIR=includedir) - if type(libs)!=list: - libs=libs.split() - subst_dict[name+'_VERSION']=version - subst_dict[name+'_MAJOR_VERSION']=version[0:version.find('.')] - for i in libs: - subst_dict[i+'_LIBS']=link_flags(bld.env,i) - lib_cflags=compile_flags(bld.env,i) - if lib_cflags=='': - lib_cflags=' ' - subst_dict[i+'_CFLAGS']=lib_cflags - obj.__dict__.update(subst_dict) -def build_dir(name,subdir): - if is_child(): - return os.path.join('build',name,subdir) - else: - return os.path.join('build',subdir) + """ + + pkg_prefix = bld.env['PREFIX'] + if len(pkg_prefix) > 1 and pkg_prefix[-1] == '/': + pkg_prefix = pkg_prefix[:-1] + + target = name.lower() + if version_suffix != '': + target += '-' + version_suffix + + if bld.env['PARDEBUG']: + target += 'D' + + target += '.pc' + + libdir = bld.env['LIBDIR'] + if libdir.startswith(pkg_prefix): + libdir = libdir.replace(pkg_prefix, '${exec_prefix}') + + includedir = bld.env['INCLUDEDIR'] + if includedir.startswith(pkg_prefix): + includedir = includedir.replace(pkg_prefix, '${prefix}') + + obj = bld(features='subst', + source='%s.pc.in' % name.lower(), + target=target, + install_path=os.path.join(bld.env['LIBDIR'], 'pkgconfig'), + exec_prefix='${prefix}', + PREFIX=pkg_prefix, + EXEC_PREFIX='${prefix}', + LIBDIR=libdir, + INCLUDEDIR=includedir) + + if type(libs) != list: + libs = libs.split() + + subst_dict[name + '_VERSION'] = version + subst_dict[name + '_MAJOR_VERSION'] = version[0:version.find('.')] + for i in libs: + subst_dict[i + '_LIBS'] = link_flags(bld.env, i) + lib_cflags = compile_flags(bld.env, i) + if lib_cflags == '': + lib_cflags = ' ' + subst_dict[i + '_CFLAGS'] = lib_cflags + + obj.__dict__.update(subst_dict) + def make_simple_dox(name): - name=name.lower() - NAME=name.upper() - try: - top=os.getcwd() - os.chdir(build_dir(name,'doc/html')) - page='group__%s.html'%name - if not os.path.exists(page): - return - for i in[['%s_API '%NAME,''],['%s_DEPRECATED '%NAME,''],['group__%s.html'%name,''],[' ',''],['<\/script>',''],['<\/a>

.*<\/h2>',''],['',''],['\"doxygen\"\/','Doxygen']]: - os.system("sed -i 's/%s/%s/g' %s"%(i[0],i[1],page)) - os.rename('group__%s.html'%name,'index.html') - for i in(glob.glob('*.png')+glob.glob('*.html')+glob.glob('*.js')+glob.glob('*.css')): - if i!='index.html'and i!='style.css': - os.remove(i) - os.chdir(top) - os.chdir(build_dir(name,'doc/man/man3')) - for i in glob.glob('*.3'): - os.system("sed -i 's/%s_API //' %s"%(NAME,i)) - for i in glob.glob('_*'): - os.remove(i) - os.chdir(top) - except Exception as e: - Logs.error("Failed to fix up %s documentation: %s"%(name,e)) -def build_dox(bld,name,version,srcdir,blddir,outdir='',versioned=True): - if not bld.env['DOCS']: - return - if is_child(): - src_dir=os.path.join(srcdir,name.lower()) - else: - src_dir=srcdir - subst_tg=bld(features='subst',source='doc/reference.doxygen.in',target='doc/reference.doxygen',install_path='',name='doxyfile') - subst_dict={name+'_VERSION':version,name+'_SRCDIR':os.path.abspath(src_dir),name+'_DOC_DIR':''} - subst_tg.__dict__.update(subst_dict) - subst_tg.post() - docs=bld(features='doxygen',doxyfile='doc/reference.doxygen') - docs.post() - outname=name.lower() - if versioned: - outname+='-%d'%int(version[0:version.find('.')]) - bld.install_files(os.path.join('${DOCDIR}',outname,outdir,'html'),bld.path.get_bld().ant_glob('doc/html/*')) - for i in range(1,8): - bld.install_files('${MANDIR}/man%d'%i,bld.path.get_bld().ant_glob('doc/man/man%d/*'%i,excl='**/_*')) -def build_version_files(header_path,source_path,domain,major,minor,micro): - header_path=os.path.abspath(header_path) - source_path=os.path.abspath(source_path) - text="int "+domain+"_major_version = "+str(major)+";\n" - text+="int "+domain+"_minor_version = "+str(minor)+";\n" - text+="int "+domain+"_micro_version = "+str(micro)+";\n" - try: - o=open(source_path,'w') - o.write(text) - o.close() - except IOError: - Logs.error('Failed to open %s for writing\n'%source_path) - sys.exit(-1) - text="#ifndef __"+domain+"_version_h__\n" - text+="#define __"+domain+"_version_h__\n" - text+="extern const char* "+domain+"_revision;\n" - text+="extern int "+domain+"_major_version;\n" - text+="extern int "+domain+"_minor_version;\n" - text+="extern int "+domain+"_micro_version;\n" - text+="#endif /* __"+domain+"_version_h__ */\n" - try: - o=open(header_path,'w') - o.write(text) - o.close() - except IOError: - Logs.warn('Failed to open %s for writing\n'%header_path) - sys.exit(-1) - return None -def build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder=None): - Logs.info('Generating pot file from %s'%name) - pot_file='%s.pot'%name - cmd=['xgettext','--keyword=_','--keyword=N_','--keyword=S_','--from-code=UTF-8','-o',pot_file] - if copyright_holder: - cmd+=['--copyright-holder="%s"'%copyright_holder] - cmd+=sources - Logs.info('Updating '+pot_file) - subprocess.call(cmd,cwd=os.path.join(srcdir,dir)) -def build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder=None): - pwd=os.getcwd() - os.chdir(os.path.join(srcdir,dir)) - pot_file='%s.pot'%name - po_files=glob.glob('po/*.po') - for po_file in po_files: - cmd=['msgmerge','--update',po_file,pot_file] - Logs.info('Updating '+po_file) - subprocess.call(cmd) - os.chdir(pwd) -def build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder=None): - pwd=os.getcwd() - os.chdir(os.path.join(srcdir,dir)) - po_files=glob.glob('po/*.po') - for po_file in po_files: - mo_file=po_file.replace('.po','.mo') - cmd=['msgfmt','-c','-f','-o',mo_file,po_file] - Logs.info('Generating '+po_file) - subprocess.call(cmd) - os.chdir(pwd) -def build_i18n(bld,srcdir,dir,name,sources,copyright_holder=None): - build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder) - build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder) - build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder) -def cd_to_build_dir(ctx,appname): - top_level=(len(ctx.stack_path)>1) - if top_level: - os.chdir(os.path.join('build',appname)) - else: - os.chdir('build') - Logs.pprint('GREEN',"Waf: Entering directory `%s'"%os.path.abspath(os.getcwd())) -def cd_to_orig_dir(ctx,child): - if child: - os.chdir(os.path.join('..','..')) - else: - os.chdir('..') -def pre_test(ctx,appname,dirs=['src']): - if not hasattr(ctx,'autowaf_tests_total'): - ctx.autowaf_tests_total=0 - ctx.autowaf_tests_failed=0 - ctx.autowaf_local_tests_total=0 - ctx.autowaf_local_tests_failed=0 - ctx.autowaf_tests={} - ctx.autowaf_tests[appname]={'total':0,'failed':0} - cd_to_build_dir(ctx,appname) - if not ctx.env.NO_COVERAGE: - diropts='' - for i in dirs: - diropts+=' -d '+i - clear_log=open('lcov-clear.log','w') - try: - try: - subprocess.call(('lcov %s -z'%diropts).split(),stdout=clear_log,stderr=clear_log) - except: - Logs.warn('Failed to run lcov, no coverage report will be generated') - finally: - clear_log.close() -def post_test(ctx,appname,dirs=['src'],remove=['*boost*','c++*']): - if not ctx.env.NO_COVERAGE: - diropts='' - for i in dirs: - diropts+=' -d '+i - coverage_log=open('lcov-coverage.log','w') - coverage_lcov=open('coverage.lcov','w') - coverage_stripped_lcov=open('coverage-stripped.lcov','w') - try: - try: - base='.' - if g_is_child: - base='..' - lcov_cmd='lcov -c %s -b %s'%(diropts,base) - if ctx.env.LLVM_COV: - lcov_cmd+=' --gcov-tool %s'%ctx.env.LLVM_COV[0] - subprocess.call(lcov_cmd.split(),stdout=coverage_lcov,stderr=coverage_log) - subprocess.call(['lcov','--remove','coverage.lcov']+remove,stdout=coverage_stripped_lcov,stderr=coverage_log) - if not os.path.isdir('coverage'): - os.makedirs('coverage') - subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(),stdout=coverage_log,stderr=coverage_log) - except: - Logs.warn('Failed to run lcov, no coverage report will be generated') - finally: - coverage_stripped_lcov.close() - coverage_lcov.close() - coverage_log.close() - if ctx.autowaf_tests[appname]['failed']>0: - Logs.pprint('RED','\nSummary: %d / %d %s tests failed'%(ctx.autowaf_tests[appname]['failed'],ctx.autowaf_tests[appname]['total'],appname)) - else: - Logs.pprint('GREEN','\nSummary: All %d %s tests passed'%(ctx.autowaf_tests[appname]['total'],appname)) - if not ctx.env.NO_COVERAGE: - Logs.pprint('GREEN','Coverage: \n'%os.path.abspath('coverage/index.html')) - Logs.pprint('GREEN',"Waf: Leaving directory `%s'"%os.path.abspath(os.getcwd())) - top_level=(len(ctx.stack_path)>1) - if top_level: - cd_to_orig_dir(ctx,top_level) -def run_test(ctx,appname,test,desired_status=0,dirs=['src'],name='',header=False,quiet=False): - ctx.autowaf_tests_total+=1 - ctx.autowaf_local_tests_total+=1 - ctx.autowaf_tests[appname]['total']+=1 - out=(None,None) - if type(test)==list: - name=test[0] - returncode=test[1] - elif callable(test): - returncode=test() - else: - s=test - if isinstance(test,type([])): - s=' '.join(test) - if header and not quiet: - Logs.pprint('Green','\n** Test %s'%s) - cmd=test - if Options.options.test_wrapper: - cmd=Options.options.test_wrapper+' '+test - if name=='': - name=test - proc=subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) - out=proc.communicate() - returncode=proc.returncode - success=desired_status is None or returncode==desired_status - if success: - if not quiet: - Logs.pprint('GREEN','** Pass %s'%name) - else: - Logs.pprint('RED','** FAIL %s'%name) - ctx.autowaf_tests_failed+=1 - ctx.autowaf_tests[appname]['failed']+=1 - if type(test)!=list and not callable(test): - Logs.pprint('RED',test) - if Options.options.verbose_tests and type(test)!=list and not callable(test): - sys.stdout.write(out[0]) - sys.stderr.write(out[1]) - return(success,out) -def tests_name(ctx,appname,name='*'): - if name=='*': - return appname - else: - return'%s.%s'%(appname,name) -def begin_tests(ctx,appname,name='*'): - ctx.autowaf_local_tests_failed=0 - ctx.autowaf_local_tests_total=0 - Logs.pprint('GREEN','\n** Begin %s tests'%tests_name(ctx,appname,name)) - class Handle: - def __enter__(self): - pass - def __exit__(self,type,value,traceback): - end_tests(ctx,appname,name) - return Handle() -def end_tests(ctx,appname,name='*'): - failures=ctx.autowaf_local_tests_failed - if failures==0: - Logs.pprint('GREEN','** Passed all %d %s tests'%(ctx.autowaf_local_tests_total,tests_name(ctx,appname,name))) - else: - Logs.pprint('RED','** Failed %d / %d %s tests'%(failures,ctx.autowaf_local_tests_total,tests_name(ctx,appname,name))) -def run_tests(ctx,appname,tests,desired_status=0,dirs=['src'],name='*',headers=False): - begin_tests(ctx,appname,name) - diropts='' - for i in dirs: - diropts+=' -d '+i - for i in tests: - run_test(ctx,appname,i,desired_status,dirs,i,headers) - end_tests(ctx,appname,name) + "Clean up messy Doxygen documentation after it is built" + name = name.lower() + NAME = name.upper() + try: + top = os.getcwd() + os.chdir(build_dir(name, 'doc/html')) + page = 'group__%s.html' % name + if not os.path.exists(page): + return + for i in [ + ['%s_API ' % NAME, ''], + ['%s_DEPRECATED ' % NAME, ''], + ['group__%s.html' % name, ''], + [' ', ''], + [r'<\/script>', ''], + [r'<\/a>

.*<\/h2>', ''], + [r'', + ''], + [r'\"doxygen\"\/', + 'Doxygen']]: + os.system("sed -i 's/%s/%s/g' %s" % (i[0], i[1], page)) + os.rename('group__%s.html' % name, 'index.html') + for i in (glob.glob('*.png') + + glob.glob('*.html') + + glob.glob('*.js') + + glob.glob('*.css')): + if i != 'index.html' and i != 'style.css': + os.remove(i) + os.chdir(top) + os.chdir(build_dir(name, 'doc/man/man3')) + for i in glob.glob('*.3'): + os.system("sed -i 's/%s_API //' %s" % (NAME, i)) + for i in glob.glob('_*'): + os.remove(i) + os.chdir(top) + except Exception as e: + Logs.error("Failed to fix up %s documentation: %s" % (name, e)) + finally: + os.chdir(top) + +def build_dox(bld, name, version, srcdir, blddir, outdir='', versioned=True): + """Build Doxygen API documentation""" + if not bld.env['DOCS']: + return + + # Doxygen paths in are relative to the doxygen file + src_dir = bld.path.srcpath() + subst_tg = bld(features='subst', + source='doc/reference.doxygen.in', + target='doc/reference.doxygen', + install_path='', + name='doxyfile') + + subst_dict = { + name + '_VERSION': version, + name + '_SRCDIR': os.path.abspath(src_dir), + name + '_DOC_DIR': '' + } + + subst_tg.__dict__.update(subst_dict) + + subst_tg.post() + + docs = bld(features='doxygen', + doxyfile='doc/reference.doxygen') + + docs.post() + + outname = name.lower() + if versioned: + outname += '-%d' % int(version[0:version.find('.')]) + bld.install_files( + os.path.join('${DOCDIR}', outname, outdir, 'html'), + bld.path.get_bld().ant_glob('doc/html/*')) + for i in range(1, 8): + bld.install_files('${MANDIR}/man%d' % i, + bld.path.get_bld().ant_glob('doc/man/man%d/*' % i, + excl='**/_*')) + +def build_version_files(header_path, source_path, domain, major, minor, micro): + """Generate version code header""" + header_path = os.path.abspath(header_path) + source_path = os.path.abspath(source_path) + text = "int " + domain + "_major_version = " + str(major) + ";\n" + text += "int " + domain + "_minor_version = " + str(minor) + ";\n" + text += "int " + domain + "_micro_version = " + str(micro) + ";\n" + try: + o = open(source_path, 'w') + o.write(text) + o.close() + except IOError: + Logs.error('Failed to open %s for writing\n' % source_path) + sys.exit(-1) + + text = "#ifndef __" + domain + "_version_h__\n" + text += "#define __" + domain + "_version_h__\n" + text += "extern const char* " + domain + "_revision;\n" + text += "extern int " + domain + "_major_version;\n" + text += "extern int " + domain + "_minor_version;\n" + text += "extern int " + domain + "_micro_version;\n" + text += "#endif /* __" + domain + "_version_h__ */\n" + try: + o = open(header_path, 'w') + o.write(text) + o.close() + except IOError: + Logs.warn('Failed to open %s for writing\n' % header_path) + sys.exit(-1) + + return None + +def build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder=None): + Logs.info('Generating pot file from %s' % name) + pot_file = '%s.pot' % name + + cmd = ['xgettext', + '--keyword=_', + '--keyword=N_', + '--keyword=S_', + '--from-code=UTF-8', + '-o', pot_file] + + if copyright_holder: + cmd += ['--copyright-holder="%s"' % copyright_holder] + + cmd += sources + Logs.info('Updating ' + pot_file) + subprocess.call(cmd, cwd=os.path.join(srcdir, dir)) + +def build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder=None): + pwd = os.getcwd() + os.chdir(os.path.join(srcdir, dir)) + pot_file = '%s.pot' % name + po_files = glob.glob('po/*.po') + for po_file in po_files: + cmd = ['msgmerge', + '--update', + po_file, + pot_file] + Logs.info('Updating ' + po_file) + subprocess.call(cmd) + os.chdir(pwd) + +def build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder=None): + pwd = os.getcwd() + os.chdir(os.path.join(srcdir, dir)) + po_files = glob.glob('po/*.po') + for po_file in po_files: + mo_file = po_file.replace('.po', '.mo') + cmd = ['msgfmt', + '-c', + '-f', + '-o', + mo_file, + po_file] + Logs.info('Generating ' + po_file) + subprocess.call(cmd) + os.chdir(pwd) + +def build_i18n(bld, srcdir, dir, name, sources, copyright_holder=None): + build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder) + build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder) + build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder) + +class ExecutionEnvironment: + """Context that sets system environment variables for program execution""" + def __init__(self, changes): + self.original_environ = os.environ.copy() + + self.diff = {} + for path_name, paths in changes.items(): + value = os.pathsep.join(paths) + if path_name in os.environ: + value += os.pathsep + os.environ[path_name] + + self.diff[path_name] = value + + os.environ.update(self.diff) + + def __str__(self): + return '\n'.join({'%s="%s"' % (k, v) for k, v in self.diff.items()}) + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + os.environ = self.original_environ + +class RunContext(Build.BuildContext): + "runs an executable from the build directory" + cmd = 'run' + + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + + with ExecutionEnvironment(self.env.AUTOWAF_RUN_ENV) as env: + if Options.options.verbose: + Logs.pprint('GREEN', str(env) + '\n') + + if Options.options.cmd: + Logs.pprint('GREEN', 'Running %s' % Options.options.cmd) + subprocess.call(Options.options.cmd, shell=True) + else: + Logs.error("error: Missing --cmd option for run command") + +def show_diff(from_lines, to_lines, from_filename, to_filename): + import difflib + import sys + + same = True + for line in difflib.unified_diff( + from_lines, to_lines, + fromfile=os.path.abspath(from_filename), + tofile=os.path.abspath(to_filename)): + sys.stderr.write(line) + same = False + + return same + +def test_file_equals(patha, pathb): + import filecmp + import io + + for path in (patha, pathb): + if not os.access(path, os.F_OK): + Logs.pprint('RED', 'error: missing file %s' % path) + return False + + if filecmp.cmp(patha, pathb, shallow=False): + return True + + with io.open(patha, 'rU', encoding='utf-8') as fa: + with io.open(pathb, 'rU', encoding='utf-8') as fb: + return show_diff(fa.readlines(), fb.readlines(), patha, pathb) + +def bench_time(): + if hasattr(time, 'perf_counter'): # Added in Python 3.3 + return time.perf_counter() + else: + return time.time() + +class TestOutput: + """Test output that is truthy if result is as expected""" + def __init__(self, expected, result=None): + self.stdout = self.stderr = None + self.expected = expected + self.result = result + + def __bool__(self): + return self.expected is None or self.result == self.expected + + __nonzero__ = __bool__ + +def is_string(s): + if sys.version_info[0] < 3: + return isinstance(s, basestring) + return isinstance(s, str) + +class TestScope: + """Scope for running tests that maintains pass/fail statistics""" + def __init__(self, tst, name, defaults): + self.tst = tst + self.name = name + self.defaults = defaults + self.n_failed = 0 + self.n_total = 0 + + def run(self, test, **kwargs): + if type(test) == list and 'name' not in kwargs: + import pipes + kwargs['name'] = ' '.join(map(pipes.quote, test)) + + if Options.options.test_filter and 'name' in kwargs: + import re + found = False + for scope in self.tst.stack: + if re.search(Options.options.test_filter, scope.name): + found = True + break + + if (not found and + not re.search(Options.options.test_filter, self.name) and + not re.search(Options.options.test_filter, kwargs['name'])): + return True + + if callable(test): + output = self._run_callable(test, **kwargs) + elif type(test) == list: + output = self._run_command(test, **kwargs) + else: + raise Exception("Unknown test type") + + if not output: + self.tst.log_bad('FAILED', kwargs['name']) + + return self.tst.test_result(output) + + def _run_callable(self, test, **kwargs): + expected = kwargs['expected'] if 'expected' in kwargs else True + return TestOutput(expected, test()) + + def _run_command(self, test, **kwargs): + if 'stderr' in kwargs and kwargs['stderr'] == NONEMPTY: + # Run with a temp file for stderr and check that it is non-empty + import tempfile + with tempfile.TemporaryFile() as stderr: + kwargs['stderr'] = stderr + output = self.run(test, **kwargs) + stderr.seek(0, 2) # Seek to end + return (output if not output else + self.run( + lambda: stderr.tell() > 0, + name=kwargs['name'] + ' error message')) + + try: + # Run with stdout and stderr set to the appropriate streams + out_stream = self._stream('stdout', kwargs) + err_stream = self._stream('stderr', kwargs) + return self._exec(test, **kwargs) + finally: + out_stream = out_stream.close() if out_stream else None + err_stream = err_stream.close() if err_stream else None + + def _stream(self, stream_name, kwargs): + s = kwargs[stream_name] if stream_name in kwargs else None + if is_string(s): + kwargs[stream_name] = open(s, 'wb') + return kwargs[stream_name] + return None + + def _exec(self, + test, + expected=0, + name='', + stdin=None, + stdout=None, + stderr=None, + verbosity=1): + def stream(s): + return open(s, 'wb') if type(s) == str else s + + if verbosity > 1: + self.tst.log_good('RUN ', name) + + if Options.options.test_wrapper: + import shlex + test = shlex.split(Options.options.test_wrapper) + test + + output = TestOutput(expected) + with open(os.devnull, 'wb') as null: + out = null if verbosity < 3 and not stdout else stdout + err = null if verbosity < 2 and not stderr else stderr + proc = subprocess.Popen(test, stdin=stdin, stdout=out, stderr=err) + output.stdout, output.stderr = proc.communicate() + output.result = proc.returncode + + if output and verbosity > 0: + self.tst.log_good(' OK', name) + + return output + +class TestContext(Build.BuildContext): + "runs test suite" + fun = cmd = 'test' + + def __init__(self, **kwargs): + super(TestContext, self).__init__(**kwargs) + self.start_time = bench_time() + self.max_depth = 1 + + defaults = {'verbosity': Options.options.verbose} + self.stack = [TestScope(self, Context.g_module.APPNAME, defaults)] + + def defaults(self): + return self.stack[-1].defaults + + def finalize(self): + if self.stack[-1].n_failed > 0: + sys.exit(1) + + super(TestContext, self).finalize() + + def __call__(self, test, **kwargs): + return self.stack[-1].run(test, **self.args(**kwargs)) + + def file_equals(self, from_path, to_path, **kwargs): + kwargs.update({'expected': True, + 'name': '%s == %s' % (from_path, to_path)}) + return self(lambda: test_file_equals(from_path, to_path), **kwargs) + + def log_good(self, title, fmt, *args): + Logs.pprint('GREEN', '[%s] %s' % (title.center(10), fmt % args)) + + def log_bad(self, title, fmt, *args): + Logs.pprint('RED', '[%s] %s' % (title.center(10), fmt % args)) + + def pre_recurse(self, node): + wscript_module = Context.load_module(node.abspath()) + group_name = wscript_module.APPNAME + self.stack.append(TestScope(self, group_name, self.defaults())) + self.max_depth = max(self.max_depth, len(self.stack) - 1) + + bld_dir = node.get_bld().parent + if bld_dir != self.path.get_bld(): + Logs.info('') + + self.original_dir = os.getcwd() + Logs.info("Waf: Entering directory `%s'\n", bld_dir) + os.chdir(str(bld_dir)) + + if not self.env.NO_COVERAGE and str(node.parent) == Context.top_dir: + self.clear_coverage() + + self.log_good('=' * 10, 'Running %s tests', group_name) + super(TestContext, self).pre_recurse(node) + + def test_result(self, success): + self.stack[-1].n_total += 1 + self.stack[-1].n_failed += 1 if not success else 0 + return success + + def pop(self): + scope = self.stack.pop() + self.stack[-1].n_total += scope.n_total + self.stack[-1].n_failed += scope.n_failed + return scope + + def post_recurse(self, node): + super(TestContext, self).post_recurse(node) + + scope = self.pop() + duration = (bench_time() - self.start_time) * 1000.0 + is_top = str(node.parent) == str(Context.top_dir) + + if is_top and self.max_depth > 1: + Logs.info('') + + self.log_good('=' * 10, '%d tests from %s ran (%d ms total)', + scope.n_total, scope.name, duration) + + if not self.env.NO_COVERAGE: + if is_top: + self.gen_coverage() + + if os.path.exists('coverage/index.html'): + self.log_good('REPORT', '', + os.path.abspath('coverage/index.html')) + + successes = scope.n_total - scope.n_failed + Logs.pprint('GREEN', '[ PASSED ] %d tests' % successes) + if scope.n_failed > 0: + Logs.pprint('RED', '[ FAILED ] %d tests' % scope.n_failed) + if is_top: + Logs.info("\nWaf: Leaving directory `%s'" % os.getcwd()) + + os.chdir(self.original_dir) + + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + + if not self.env.BUILD_TESTS: + self.fatal('Configuration does not include tests') + + with ExecutionEnvironment(self.env.AUTOWAF_RUN_ENV) as env: + if self.defaults()['verbosity'] > 0: + Logs.pprint('GREEN', str(env) + '\n') + self.recurse([self.run_dir]) + + def src_path(self, path): + return os.path.relpath(os.path.join(str(self.path), path)) + + def args(self, **kwargs): + all_kwargs = self.defaults().copy() + all_kwargs.update(kwargs) + return all_kwargs + + def group(self, name, **kwargs): + return TestGroup( + self, self.stack[-1].name, name, **self.args(**kwargs)) + + def set_test_defaults(self, **kwargs): + """Set default arguments to be passed to all tests""" + self.stack[-1].defaults.update(kwargs) + + def clear_coverage(self): + """Zero old coverage data""" + try: + with open('cov-clear.log', 'w') as log: + subprocess.call(['lcov', '-z', '-d', str(self.path)], + stdout=log, stderr=log) + + except Exception: + Logs.warn('Failed to run lcov to clear old coverage data') + + def gen_coverage(self): + """Generate coverage data and report""" + try: + with open('cov.lcov', 'w') as out: + with open('cov.log', 'w') as err: + subprocess.call(['lcov', '-c', '--no-external', + '--rc', 'lcov_branch_coverage=1', + '-b', '.', + '-d', str(self.path)], + stdout=out, stderr=err) + + if not os.path.isdir('coverage'): + os.makedirs('coverage') + + with open('genhtml.log', 'w') as log: + subprocess.call(['genhtml', + '-o', 'coverage', + '--rc', 'genhtml_branch_coverage=1', + 'cov.lcov'], + stdout=log, stderr=log) + + summary = subprocess.check_output( + ['lcov', '--summary', + '--rc', 'lcov_branch_coverage=1', + 'cov.lcov'], + stderr=subprocess.STDOUT).decode('ascii') + + import re + lines = re.search('lines\.*: (.*)%.*', summary).group(1) + functions = re.search('functions\.*: (.*)%.*', summary).group(1) + branches = re.search('branches\.*: (.*)%.*', summary).group(1) + self.log_good('COVERAGE', '%s%% lines, %s%% functions, %s%% branches', + lines, functions, branches) + + except Exception: + Logs.warn('Failed to run lcov to generate coverage report') + +class TestGroup: + def __init__(self, tst, suitename, name, **kwargs): + self.tst = tst + self.suitename = suitename + self.name = name + self.kwargs = kwargs + self.start_time = bench_time() + tst.stack.append(TestScope(tst, name, tst.defaults())) + + def label(self): + return self.suitename + '.%s' % self.name if self.name else '' + + def args(self, **kwargs): + all_kwargs = self.tst.args(**self.kwargs) + all_kwargs.update(kwargs) + return all_kwargs + + def __enter__(self): + if 'verbosity' in self.kwargs and self.kwargs['verbosity'] > 0: + self.tst.log_good('-' * 10, self.label()) + return self + + def __call__(self, test, **kwargs): + return self.tst(test, **self.args(**kwargs)) + + def file_equals(self, from_path, to_path, **kwargs): + return self.tst.file_equals(from_path, to_path, **kwargs) + + def __exit__(self, type, value, traceback): + duration = (bench_time() - self.start_time) * 1000.0 + scope = self.tst.pop() + n_passed = scope.n_total - scope.n_failed + if scope.n_failed == 0: + self.tst.log_good('-' * 10, '%d tests from %s (%d ms total)', + scope.n_total, self.label(), duration) + else: + self.tst.log_bad('-' * 10, '%d/%d tests from %s (%d ms total)', + n_passed, scope.n_total, self.label(), duration) + def run_ldconfig(ctx): - if(ctx.cmd=='install'and not ctx.env['RAN_LDCONFIG']and ctx.env['LIBDIR']and'DESTDIR'not in os.environ and not Options.options.destdir): - try: - Logs.info("Waf: Running `/sbin/ldconfig %s'"%ctx.env['LIBDIR']) - subprocess.call(['/sbin/ldconfig',ctx.env['LIBDIR']]) - ctx.env['RAN_LDCONFIG']=True - except: - pass -def get_rdf_news(name,in_files,top_entries=None,extra_entries=None,dev_dist=None): - import rdflib - from time import strptime - doap=rdflib.Namespace('http://usefulinc.com/ns/doap#') - dcs=rdflib.Namespace('http://ontologi.es/doap-changeset#') - rdfs=rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#') - foaf=rdflib.Namespace('http://xmlns.com/foaf/0.1/') - rdf=rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') - m=rdflib.ConjunctiveGraph() - try: - for i in in_files: - m.parse(i,format='n3') - except: - Logs.warn('Error parsing data, unable to generate NEWS') - return - proj=m.value(None,rdf.type,doap.Project) - for f in m.triples([proj,rdfs.seeAlso,None]): - if f[2].endswith('.ttl'): - m.parse(f[2],format='n3') - entries={} - for r in m.triples([proj,doap.release,None]): - release=r[2] - revision=m.value(release,doap.revision,None) - date=m.value(release,doap.created,None) - blamee=m.value(release,dcs.blame,None) - changeset=m.value(release,dcs.changeset,None) - dist=m.value(release,doap['file-release'],None) - if not dist: - Logs.warn('No file release for %s %s'%(proj,revision)) - dist=dev_dist - if revision and date and blamee and changeset: - entry={} - entry['name']=str(name) - entry['revision']=str(revision) - entry['date']=strptime(str(date),'%Y-%m-%d') - entry['status']='stable'if dist!=dev_dist else'unstable' - entry['dist']=str(dist) - entry['items']=[] - for i in m.triples([changeset,dcs.item,None]): - item=str(m.value(i[2],rdfs.label,None)) - entry['items']+=[item] - if dist and top_entries is not None: - if not str(dist)in top_entries: - top_entries[str(dist)]={'items':[]} - top_entries[str(dist)]['items']+=['%s: %s'%(name,item)] - if extra_entries and dist: - for i in extra_entries[str(dist)]: - entry['items']+=extra_entries[str(dist)]['items'] - entry['blamee_name']=str(m.value(blamee,foaf.name,None)) - entry['blamee_mbox']=str(m.value(blamee,foaf.mbox,None)) - entries[(str(date),str(revision))]=entry - else: - Logs.warn('Ignored incomplete %s release description'%name) - return entries -def write_news(entries,out_file): - import textwrap - from time import strftime - if len(entries)==0: - return - news=open(out_file,'w') - for e in sorted(entries.keys(),reverse=True): - entry=entries[e] - news.write('%s (%s) %s;\n'%(entry['name'],entry['revision'],entry['status'])) - for item in entry['items']: - wrapped=textwrap.wrap(item,width=79) - news.write('\n * '+'\n '.join(wrapped)) - news.write('\n\n --') - news.write(' %s <%s>'%(entry['blamee_name'],entry['blamee_mbox'].replace('mailto:',''))) - news.write(' %s\n\n'%(strftime('%a, %d %b %Y %H:%M:%S +0000',entry['date']))) - news.close() -def write_posts(entries,meta,out_dir,status='stable'): - from time import strftime - try: - os.mkdir(out_dir) - except: - pass - for i in entries: - entry=entries[i] - revision=i[1] - if entry['status']!=status: - continue - date_str=strftime('%Y-%m-%d',entry['date']) - datetime_str=strftime('%Y-%m-%d %H:%M',entry['date']) - path=os.path.join(out_dir,'%s-%s-%s.md'%(date_str,entry['name'],revision.replace('.','-'))) - post=open(path,'w') - title=entry['title']if'title'in entry else entry['name'] - post.write('Title: %s %s\n'%(title,revision)) - post.write('Date: %s\n'%datetime_str) - post.write('Slug: %s-%s\n'%(entry['name'],revision.replace('.','-'))) - for k in meta: - post.write('%s: %s\n'%(k,meta[k])) - post.write('\n') - url=entry['dist'] - if entry['status']==status: - post.write('[%s %s](%s) has been released.'%((entry['name'],revision,url))) - if'description'in entry: - post.write(' '+entry['description']) - post.write('\n') - if(len(entry['items'])>0 and not(len(entry['items'])==1 and entry['items'][0]=='Initial release')): - post.write('\nChanges:\n\n') - for i in entry['items']: - post.write(' * %s\n'%i) - post.close() -def get_blurb(in_file): - f=open(in_file,'r') - f.readline() - f.readline() - f.readline() - out='' - line=f.readline() - while len(line)>0 and line!='\n': - out+=line.replace('\n',' ') - line=f.readline() - return out.strip() -def get_news(in_file,entry_props={}): - import re - import rfc822 - f=open(in_file,'r') - entries={} - while True: - head=f.readline() - matches=re.compile('([^ ]*) \((.*)\) ([a-zA-z]*);').match(head) - if matches is None: - break - entry={} - entry['name']=matches.group(1) - entry['revision']=matches.group(2) - entry['status']=matches.group(3) - entry['items']=[] - if'dist_pattern'in entry_props: - entry['dist']=entry_props['dist_pattern']%entry['revision'] - if f.readline()!='\n': - raise SyntaxError('expected blank line after NEWS header') - def add_item(item): - if len(item)>0: - entry['items']+=[item.replace('\n',' ').strip()] - item='' - line='' - while line!='\n': - line=f.readline() - if line.startswith(' * '): - add_item(item) - item=line[3:].lstrip() - else: - item+=line.lstrip() - add_item(item) - foot=f.readline() - matches=re.compile(' -- (.*) <(.*)> (.*)').match(foot) - entry['date']=rfc822.parsedate(matches.group(3)) - entry['blamee_name']=matches.group(1) - entry['blamee_mbox']=matches.group(2) - entry.update(entry_props) - entries[(entry['date'],entry['revision'])]=entry - f.readline() - f.close() - return entries -def news_to_posts(news_file,entry_props,post_meta,default_post_dir): - post_dir=os.getenv('POST_DIR') - if not post_dir: - post_dir=default_post_dir - sys.stderr.write('POST_DIR not set in environment, writing to %s\n'%post_dir) - else: - sys.stderr.write('writing posts to %s\n'%post_dir) - entries=get_news(news_file,entry_props) - write_posts(entries,post_meta,post_dir) + should_run = (ctx.cmd == 'install' and + not ctx.env['RAN_LDCONFIG'] and + ctx.env['LIBDIR'] and + 'DESTDIR' not in os.environ and + not Options.options.destdir) + + if should_run: + try: + Logs.info("Waf: Running `/sbin/ldconfig %s'" % ctx.env['LIBDIR']) + subprocess.call(['/sbin/ldconfig', ctx.env['LIBDIR']]) + ctx.env['RAN_LDCONFIG'] = True + except Exception: + pass + def run_script(cmds): - for cmd in cmds: - subprocess.check_call(cmd,shell=True) -def release(name,version,dist_name=None): - if dist_name is None: - dist_name=name.lower() - dist='%s-%s.tar.bz2'%(dist_name or name.lower(),version) - try: - os.remove(dist) - os.remove(dist+'.sig') - except: - pass - status=subprocess.check_output('git status --porcelain',shell=True) - if status: - Logs.error('error: git working copy is dirty\n'+status) - raise Exception('git working copy is dirty') - head=subprocess.check_output('git show -s --oneline',shell=True) - head_summary=head[8:].strip().lower() - expected_summary='%s %s'%(name.lower(),version) - if head_summary!=expected_summary: - raise Exception('latest commit "%s" does not match "%s"'%(head_summary,expected_summary)) - run_script(['./waf configure --docs','./waf','./waf distcheck','./waf posts','gpg -b %s'%dist,'git tag -s v%s -m "%s %s"'%(version,name,version)]) + for cmd in cmds: + subprocess.check_call(cmd, shell=True) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/batched_cc.py lilv-0.24.6/waflib/extras/batched_cc.py --- lilv-0.24.4~dfsg0/waflib/extras/batched_cc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/batched_cc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,173 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2015 (ita) + +""" +Instead of compiling object files one by one, c/c++ compilers are often able to compile at once: +cc -c ../file1.c ../file2.c ../file3.c + +Files are output on the directory where the compiler is called, and dependencies are more difficult +to track (do not run the command on all source files if only one file changes) +As such, we do as if the files were compiled one by one, but no command is actually run: +replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the +signatures from each slave and finds out the command-line to run. + +Just import this module to start using it: +def build(bld): + bld.load('batched_cc') + +Note that this is provided as an example, unity builds are recommended +for best performance results (fewer tasks and fewer jobs to execute). +See waflib/extras/unity.py. +""" + +from waflib import Task, Utils +from waflib.TaskGen import extension, feature, after_method +from waflib.Tools import c, cxx + +MAX_BATCH = 50 + +c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}' +c_fun, _ = Task.compile_fun_noshell(c_str) + +cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}' +cxx_fun, _ = Task.compile_fun_noshell(cxx_str) + +count = 70000 +class batch(Task.Task): + color = 'PINK' + + after = ['c', 'cxx'] + before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib'] + + def uid(self): + return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target]) + + def __str__(self): + return 'Batch compilation for %d slaves' % len(self.slaves) + + def __init__(self, *k, **kw): + Task.Task.__init__(self, *k, **kw) + self.slaves = [] + self.inputs = [] + self.hasrun = 0 + + global count + count += 1 + self.idx = count + + def add_slave(self, slave): + self.slaves.append(slave) + self.set_run_after(slave) + + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + + for t in self.slaves: + #if t.executed: + if t.hasrun != Task.SKIPPED: + return Task.RUN_ME + + return Task.SKIP_ME + + def get_cwd(self): + return self.slaves[0].outputs[0].parent + + def batch_incpaths(self): + st = self.env.CPPPATH_ST + return [st % node.abspath() for node in self.generator.includes_nodes] + + def run(self): + self.outputs = [] + + srclst = [] + slaves = [] + for t in self.slaves: + if t.hasrun != Task.SKIPPED: + slaves.append(t) + srclst.append(t.inputs[0].abspath()) + + self.env.SRCLST = srclst + + if self.slaves[0].__class__.__name__ == 'c': + ret = c_fun(self) + else: + ret = cxx_fun(self) + + if ret: + return ret + + for t in slaves: + t.old_post_run() + +def hook(cls_type): + def n_hook(self, node): + + ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o' + name = node.name + k = name.rfind('.') + if k >= 0: + basename = name[:k] + ext + else: + basename = name + ext + + outdir = node.parent.get_bld().make_node('%d' % self.idx) + outdir.mkdir() + out = outdir.find_or_declare(basename) + + task = self.create_task(cls_type, node, out) + + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks = [task] + + if not getattr(self, 'masters', None): + self.masters = {} + self.allmasters = [] + + def fix_path(tsk): + if self.env.CC_NAME == 'msvc': + tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath()) + + if not node.parent in self.masters: + m = self.masters[node.parent] = self.master = self.create_task('batch') + fix_path(m) + self.allmasters.append(m) + else: + m = self.masters[node.parent] + if len(m.slaves) > MAX_BATCH: + m = self.masters[node.parent] = self.master = self.create_task('batch') + fix_path(m) + self.allmasters.append(m) + m.add_slave(task) + return task + return n_hook + +extension('.c')(hook('c')) +extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx')) + +@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib') +@after_method('apply_link') +def link_after_masters(self): + if getattr(self, 'allmasters', None): + for m in self.allmasters: + self.link_task.set_run_after(m) + +# Modify the c and cxx task classes - in theory it would be best to +# create subclasses and to re-map the c/c++ extensions +for x in ('c', 'cxx'): + t = Task.classes[x] + def run(self): + pass + + def post_run(self): + pass + + setattr(t, 'oldrun', getattr(t, 'run', None)) + setattr(t, 'run', run) + setattr(t, 'old_post_run', t.post_run) + setattr(t, 'post_run', post_run) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/biber.py lilv-0.24.6/waflib/extras/biber.py --- lilv-0.24.4~dfsg0/waflib/extras/biber.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/biber.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2011 (ita) + +""" +Latex processing using "biber" +""" + +import os +from waflib import Task, Logs + +from waflib.Tools import tex as texmodule + +class tex(texmodule.tex): + biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False) + biber_fun.__doc__ = """ + Execute the program **biber** + """ + + def bibfile(self): + return None + + def bibunits(self): + self.env.env = {} + self.env.env.update(os.environ) + self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) + self.env.SRCFILE = self.aux_nodes[0].name[:-4] + + if not self.env['PROMPT_LATEX']: + self.env.append_unique('BIBERFLAGS', '--quiet') + + path = self.aux_nodes[0].abspath()[:-4] + '.bcf' + if os.path.isfile(path): + Logs.warn('calling biber') + self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun()) + else: + super(tex, self).bibfile() + super(tex, self).bibunits() + +class latex(tex): + texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False) +class pdflatex(tex): + texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False) +class xelatex(tex): + texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False) + +def configure(self): + """ + Almost the same as in tex.py, but try to detect 'biber' + """ + v = self.env + for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split(): + try: + self.find_program(p, var=p.upper()) + except self.errors.ConfigurationError: + pass + v['DVIPSFLAGS'] = '-Ppdf' + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/bjam.py lilv-0.24.6/waflib/extras/bjam.py --- lilv-0.24.4~dfsg0/waflib/extras/bjam.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/bjam.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,128 @@ +#! /usr/bin/env python +# per rosengren 2011 + +from os import sep, readlink +from waflib import Logs +from waflib.TaskGen import feature, after_method +from waflib.Task import Task, always_run + +def options(opt): + grp = opt.add_option_group('Bjam Options') + grp.add_option('--bjam_src', default=None, help='You can find it in /tools/jam/src') + grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in /bin./bjam') + grp.add_option('--bjam_config', default=None) + grp.add_option('--bjam_toolset', default=None) + +def configure(cnf): + if not cnf.env.BJAM_SRC: + cnf.env.BJAM_SRC = cnf.options.bjam_src + if not cnf.env.BJAM_UNAME: + cnf.env.BJAM_UNAME = cnf.options.bjam_uname + try: + cnf.find_program('bjam', path_list=[ + cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME + ]) + except Exception: + cnf.env.BJAM = None + if not cnf.env.BJAM_CONFIG: + cnf.env.BJAM_CONFIG = cnf.options.bjam_config + if not cnf.env.BJAM_TOOLSET: + cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset + +@feature('bjam') +@after_method('process_rule') +def process_bjam(self): + if not self.bld.env.BJAM: + self.create_task('bjam_creator') + self.create_task('bjam_build') + self.create_task('bjam_installer') + if getattr(self, 'always', False): + always_run(bjam_creator) + always_run(bjam_build) + always_run(bjam_installer) + +class bjam_creator(Task): + ext_out = 'bjam_exe' + vars=['BJAM_SRC', 'BJAM_UNAME'] + def run(self): + env = self.env + gen = self.generator + bjam = gen.bld.root.find_dir(env.BJAM_SRC) + if not bjam: + Logs.error('Can not find bjam source') + return -1 + bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam' + bjam_exe = bjam.find_resource(bjam_exe_relpath) + if bjam_exe: + env.BJAM = bjam_exe.srcpath() + return 0 + bjam_cmd = ['./build.sh'] + Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd)) + result = self.exec_command(bjam_cmd, cwd=bjam.srcpath()) + if not result == 0: + Logs.error('bjam failed') + return -1 + bjam_exe = bjam.find_resource(bjam_exe_relpath) + if bjam_exe: + env.BJAM = bjam_exe.srcpath() + return 0 + Logs.error('bjam failed') + return -1 + +class bjam_build(Task): + ext_in = 'bjam_exe' + ext_out = 'install' + vars = ['BJAM_TOOLSET'] + def run(self): + env = self.env + gen = self.generator + path = gen.path + bld = gen.bld + if hasattr(gen, 'root'): + build_root = path.find_node(gen.root) + else: + build_root = path + jam = bld.srcnode.find_resource(env.BJAM_CONFIG) + if jam: + Logs.debug('bjam: Using jam configuration from ' + jam.srcpath()) + jam_rel = jam.relpath_gen(build_root) + else: + Logs.warn('No build configuration in build_config/user-config.jam. Using default') + jam_rel = None + bjam_exe = bld.srcnode.find_node(env.BJAM) + if not bjam_exe: + Logs.error('env.BJAM is not set') + return -1 + bjam_exe_rel = bjam_exe.relpath_gen(build_root) + cmd = ([bjam_exe_rel] + + (['--user-config=' + jam_rel] if jam_rel else []) + + ['--stagedir=' + path.get_bld().path_from(build_root)] + + ['--debug-configuration'] + + ['--with-' + lib for lib in self.generator.target] + + (['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) + + ['link=' + 'shared'] + + ['variant=' + 'release'] + ) + Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd)) + ret = self.exec_command(cmd, cwd=build_root.srcpath()) + if ret != 0: + return ret + self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*')) + return 0 + +class bjam_installer(Task): + ext_in = 'install' + def run(self): + gen = self.generator + path = gen.path + for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')): + files = [] + for n in path.get_bld().ant_glob(pat): + try: + t = readlink(n.srcpath()) + gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False) + except OSError: + files.append(n) + gen.bld.install_files(idir, files, postpone=False) + return 0 + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/blender.py lilv-0.24.6/waflib/extras/blender.py --- lilv-0.24.4~dfsg0/waflib/extras/blender.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/blender.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,108 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Michal Proszek, 2014 (poxip) + +""" +Detect the version of Blender, path +and install the extension: + + def options(opt): + opt.load('blender') + def configure(cnf): + cnf.load('blender') + def build(bld): + bld(name='io_mesh_raw', + feature='blender', + files=['file1.py', 'file2.py'] + ) +If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name +Use ./waf configure --system to set the installation directory to system path +""" +import os +import re +from getpass import getuser + +from waflib import Utils +from waflib.TaskGen import feature +from waflib.Configure import conf + +def options(opt): + opt.add_option( + '-s', '--system', + dest='directory_system', + default=False, + action='store_true', + help='determines installation directory (default: user)' + ) + +@conf +def find_blender(ctx): + '''Return version number of blender, if not exist return None''' + blender = ctx.find_program('blender') + output = ctx.cmd_and_log(blender + ['--version']) + m = re.search(r'Blender\s*((\d+(\.|))*)', output) + if not m: + ctx.fatal('Could not retrieve blender version') + + try: + blender_version = m.group(1) + except IndexError: + ctx.fatal('Could not retrieve blender version') + + ctx.env['BLENDER_VERSION'] = blender_version + return blender + +@conf +def configure_paths(ctx): + """Setup blender paths""" + # Get the username + user = getuser() + _platform = Utils.unversioned_sys_platform() + config_path = {'user': '', 'system': ''} + if _platform.startswith('linux'): + config_path['user'] = '/home/%s/.config/blender/' % user + config_path['system'] = '/usr/share/blender/' + elif _platform == 'darwin': + # MAC OS X + config_path['user'] = \ + '/Users/%s/Library/Application Support/Blender/' % user + config_path['system'] = '/Library/Application Support/Blender/' + elif Utils.is_win32: + # Windows + appdata_path = ctx.getenv('APPDATA').replace('\\', '/') + homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/') + + config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path + config_path['system'] = \ + '%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive + else: + ctx.fatal( + 'Unsupported platform. ' + 'Available platforms: Linux, OSX, MS-Windows.' + ) + + blender_version = ctx.env['BLENDER_VERSION'] + + config_path['user'] += blender_version + '/' + config_path['system'] += blender_version + '/' + + ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user']) + if ctx.options.directory_system: + ctx.env['BLENDER_CONFIG_DIR'] = config_path['system'] + + ctx.env['BLENDER_ADDONS_DIR'] = os.path.join( + ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons' + ) + Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR']) + +def configure(ctx): + ctx.find_blender() + ctx.configure_paths() + +@feature('blender_list') +def blender(self): + # Two ways to install a blender extension: as a module or just .py files + dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name()) + Utils.check_dir(dest_dir) + self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.')) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/boo.py lilv-0.24.6/waflib/extras/boo.py --- lilv-0.24.4~dfsg0/waflib/extras/boo.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/boo.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,81 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Yannick LM 2011 + +""" +Support for the boo programming language, for example:: + + bld(features = "boo", # necessary feature + source = "src.boo", # list of boo files + gen = "world.dll", # target + type = "library", # library/exe ("-target:xyz" flag) + name = "world" # necessary if the target is referenced by 'use' + ) +""" + +from waflib import Task +from waflib.Configure import conf +from waflib.TaskGen import feature, after_method, before_method, extension + +@extension('.boo') +def boo_hook(self, node): + # Nothing here yet ... + # TODO filter the non-boo source files in 'apply_booc' and remove this method + pass + +@feature('boo') +@before_method('process_source') +def apply_booc(self): + """Create a booc task """ + src_nodes = self.to_nodes(self.source) + out_node = self.path.find_or_declare(self.gen) + + self.boo_task = self.create_task('booc', src_nodes, [out_node]) + + # Set variables used by the 'booc' task + self.boo_task.env.OUT = '-o:%s' % out_node.abspath() + + # type is "exe" by default + type = getattr(self, "type", "exe") + self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type + +@feature('boo') +@after_method('apply_boo') +def use_boo(self): + """" + boo applications honor the **use** keyword:: + """ + dep_names = self.to_list(getattr(self, 'use', [])) + for dep_name in dep_names: + dep_task_gen = self.bld.get_tgen_by_name(dep_name) + if not dep_task_gen: + continue + dep_task_gen.post() + dep_task = getattr(dep_task_gen, 'boo_task', None) + if not dep_task: + # Try a cs task: + dep_task = getattr(dep_task_gen, 'cs_task', None) + if not dep_task: + # Try a link task: + dep_task = getattr(dep_task, 'link_task', None) + if not dep_task: + # Abort ... + continue + self.boo_task.set_run_after(dep_task) # order + self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency + self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath()) + +class booc(Task.Task): + """Compiles .boo files """ + color = 'YELLOW' + run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}' + +@conf +def check_booc(self): + self.find_program('booc', 'BOOC') + self.env.BOO_FLAGS = ['-nologo'] + +def configure(self): + """Check that booc is available """ + self.check_booc() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/boost.py lilv-0.24.6/waflib/extras/boost.py --- lilv-0.24.4~dfsg0/waflib/extras/boost.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/boost.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,525 @@ +#!/usr/bin/env python +# encoding: utf-8 +# +# partially based on boost.py written by Gernot Vormayr +# written by Ruediger Sonderfeld , 2008 +# modified by Bjoern Michaelsen, 2008 +# modified by Luca Fossati, 2008 +# rewritten for waf 1.5.1, Thomas Nagy, 2008 +# rewritten for waf 1.6.2, Sylvain Rouquette, 2011 + +''' + +This is an extra tool, not bundled with the default waf binary. +To add the boost tool to the waf file: +$ ./waf-light --tools=compat15,boost + or, if you have waf >= 1.6.2 +$ ./waf update --files=boost + +When using this tool, the wscript will look like: + + def options(opt): + opt.load('compiler_cxx boost') + + def configure(conf): + conf.load('compiler_cxx boost') + conf.check_boost(lib='system filesystem') + + def build(bld): + bld(source='main.cpp', target='app', use='BOOST') + +Options are generated, in order to specify the location of boost includes/libraries. +The `check_boost` configuration function allows to specify the used boost libraries. +It can also provide default arguments to the --boost-mt command-line arguments. +Everything will be packaged together in a BOOST component that you can use. + +When using MSVC, a lot of compilation flags need to match your BOOST build configuration: + - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined. + Errors: C4530 + - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC + So before calling `conf.check_boost` you might want to disabling by adding + conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB'] + Errors: + - boost might also be compiled with /MT, which links the runtime statically. + If you have problems with redefined symbols, + self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB'] + self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc'] +Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases. + +''' + +import sys +import re +from waflib import Utils, Logs, Errors +from waflib.Configure import conf +from waflib.TaskGen import feature, after_method + +BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib'] +BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include'] +BOOST_VERSION_FILE = 'boost/version.hpp' +BOOST_VERSION_CODE = ''' +#include +#include +int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; } +''' + +BOOST_ERROR_CODE = ''' +#include +int main() { boost::system::error_code c; } +''' + +PTHREAD_CODE = ''' +#include +static void* f(void*) { return 0; } +int main() { + pthread_t th; + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_create(&th, &attr, &f, 0); + pthread_join(th, 0); + pthread_cleanup_push(0, 0); + pthread_cleanup_pop(0); + pthread_attr_destroy(&attr); +} +''' + +BOOST_THREAD_CODE = ''' +#include +int main() { boost::thread t; } +''' + +BOOST_LOG_CODE = ''' +#include +#include +#include +int main() { + using namespace boost::log; + add_common_attributes(); + add_console_log(std::clog, keywords::format = "%Message%"); + BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl; +} +''' + +# toolsets from {boost_dir}/tools/build/v2/tools/common.jam +PLATFORM = Utils.unversioned_sys_platform() +detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il' +detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang' +detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc' +BOOST_TOOLSETS = { + 'borland': 'bcb', + 'clang': detect_clang, + 'como': 'como', + 'cw': 'cw', + 'darwin': 'xgcc', + 'edg': 'edg', + 'g++': detect_mingw, + 'gcc': detect_mingw, + 'icpc': detect_intel, + 'intel': detect_intel, + 'kcc': 'kcc', + 'kylix': 'bck', + 'mipspro': 'mp', + 'mingw': 'mgw', + 'msvc': 'vc', + 'qcc': 'qcc', + 'sun': 'sw', + 'sunc++': 'sw', + 'tru64cxx': 'tru', + 'vacpp': 'xlc' +} + + +def options(opt): + opt = opt.add_option_group('Boost Options') + opt.add_option('--boost-includes', type='string', + default='', dest='boost_includes', + help='''path to the directory where the boost includes are, + e.g., /path/to/boost_1_55_0/stage/include''') + opt.add_option('--boost-libs', type='string', + default='', dest='boost_libs', + help='''path to the directory where the boost libs are, + e.g., path/to/boost_1_55_0/stage/lib''') + opt.add_option('--boost-mt', action='store_true', + default=False, dest='boost_mt', + help='select multi-threaded libraries') + opt.add_option('--boost-abi', type='string', default='', dest='boost_abi', + help='''select libraries with tags (gd for debug, static is automatically added), + see doc Boost, Getting Started, chapter 6.1''') + opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect', + help="auto-detect boost linkage options (don't get used to it / might break other stuff)") + opt.add_option('--boost-toolset', type='string', + default='', dest='boost_toolset', + help='force a toolset e.g. msvc, vc90, \ + gcc, mingw, mgw45 (default: auto)') + py_version = '%d%d' % (sys.version_info[0], sys.version_info[1]) + opt.add_option('--boost-python', type='string', + default=py_version, dest='boost_python', + help='select the lib python with this version \ + (default: %s)' % py_version) + + +@conf +def __boost_get_version_file(self, d): + if not d: + return None + dnode = self.root.find_dir(d) + if dnode: + return dnode.find_node(BOOST_VERSION_FILE) + return None + +@conf +def boost_get_version(self, d): + """silently retrieve the boost version number""" + node = self.__boost_get_version_file(d) + if node: + try: + txt = node.read() + except EnvironmentError: + Logs.error("Could not read the file %r", node.abspath()) + else: + re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M) + m1 = re_but1.search(txt) + re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M) + m2 = re_but2.search(txt) + if m1 and m2: + return (m1.group(1), m2.group(1)) + return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":") + +@conf +def boost_get_includes(self, *k, **kw): + includes = k and k[0] or kw.get('includes') + if includes and self.__boost_get_version_file(includes): + return includes + for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES: + if self.__boost_get_version_file(d): + return d + if includes: + self.end_msg('headers not found in %s' % includes) + self.fatal('The configuration failed') + else: + self.end_msg('headers not found, please provide a --boost-includes argument (see help)') + self.fatal('The configuration failed') + + +@conf +def boost_get_toolset(self, cc): + toolset = cc + if not cc: + build_platform = Utils.unversioned_sys_platform() + if build_platform in BOOST_TOOLSETS: + cc = build_platform + else: + cc = self.env.CXX_NAME + if cc in BOOST_TOOLSETS: + toolset = BOOST_TOOLSETS[cc] + return isinstance(toolset, str) and toolset or toolset(self.env) + + +@conf +def __boost_get_libs_path(self, *k, **kw): + ''' return the lib path and all the files in it ''' + if 'files' in kw: + return self.root.find_dir('.'), Utils.to_list(kw['files']) + libs = k and k[0] or kw.get('libs') + if libs: + path = self.root.find_dir(libs) + files = path.ant_glob('*boost_*') + if not libs or not files: + for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS: + if not d: + continue + path = self.root.find_dir(d) + if path: + files = path.ant_glob('*boost_*') + if files: + break + path = self.root.find_dir(d + '64') + if path: + files = path.ant_glob('*boost_*') + if files: + break + if not path: + if libs: + self.end_msg('libs not found in %s' % libs) + self.fatal('The configuration failed') + else: + self.end_msg('libs not found, please provide a --boost-libs argument (see help)') + self.fatal('The configuration failed') + + self.to_log('Found the boost path in %r with the libraries:' % path) + for x in files: + self.to_log(' %r' % x) + return path, files + +@conf +def boost_get_libs(self, *k, **kw): + ''' + return the lib path and the required libs + according to the parameters + ''' + path, files = self.__boost_get_libs_path(**kw) + files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True) + toolset = self.boost_get_toolset(kw.get('toolset', '')) + toolset_pat = '(-%s[0-9]{0,3})' % toolset + version = '-%s' % self.env.BOOST_VERSION + + def find_lib(re_lib, files): + for file in files: + if re_lib.search(file.name): + self.to_log('Found boost lib %s' % file) + return file + return None + + def format_lib_name(name): + if name.startswith('lib') and self.env.CC_NAME != 'msvc': + name = name[3:] + return name[:name.rfind('.')] + + def match_libs(lib_names, is_static): + libs = [] + lib_names = Utils.to_list(lib_names) + if not lib_names: + return libs + t = [] + if kw.get('mt', False): + t.append('-mt') + if kw.get('abi'): + t.append('%s%s' % (is_static and '-s' or '-', kw['abi'])) + elif is_static: + t.append('-s') + tags_pat = t and ''.join(t) or '' + ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN + ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN + + for lib in lib_names: + if lib == 'python': + # for instance, with python='27', + # accepts '-py27', '-py2', '27', '-2.7' and '2' + # but will reject '-py3', '-py26', '26' and '3' + tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1]) + else: + tags = tags_pat + # Trying libraries, from most strict match to least one + for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext), + 'boost_%s%s%s%s$' % (lib, tags, version, ext), + # Give up trying to find the right version + 'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext), + 'boost_%s%s%s$' % (lib, tags, ext), + 'boost_%s%s$' % (lib, ext), + 'boost_%s' % lib]: + self.to_log('Trying pattern %s' % pattern) + file = find_lib(re.compile(pattern), files) + if file: + libs.append(format_lib_name(file.name)) + break + else: + self.end_msg('lib %s not found in %s' % (lib, path.abspath())) + self.fatal('The configuration failed') + return libs + + return path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True) + +@conf +def _check_pthread_flag(self, *k, **kw): + ''' + Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode + + Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3, + boost/thread.hpp will trigger a #error if -pthread isn't used: + boost/config/requires_threads.hpp:47:5: #error "Compiler threading support + is not turned on. Please set the correct command line options for + threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)" + + Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4 + ''' + + var = kw.get('uselib_store', 'BOOST') + + self.start_msg('Checking the flags needed to use pthreads') + + # The ordering *is* (sometimes) important. Some notes on the + # individual items follow: + # (none): in case threads are in libc; should be tried before -Kthread and + # other compiler flags to prevent continual compiler warnings + # -lpthreads: AIX (must check this before -lpthread) + # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h) + # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able) + # -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread) + # -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads) + # -pthreads: Solaris/GCC + # -mthreads: MinGW32/GCC, Lynx/GCC + # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it + # doesn't hurt to check since this sometimes defines pthreads too; + # also defines -D_REENTRANT) + # ... -mt is also the pthreads flag for HP/aCC + # -lpthread: GNU Linux, etc. + # --thread-safe: KAI C++ + if Utils.unversioned_sys_platform() == "sunos": + # On Solaris (at least, for some versions), libc contains stubbed + # (non-functional) versions of the pthreads routines, so link-based + # tests will erroneously succeed. (We need to link with -pthreads/-mt/ + # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather + # a function called by this macro, so we could check for that, but + # who knows whether they'll stub that too in a future libc.) So, + # we'll just look for -pthreads and -lpthread first: + boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"] + else: + boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread", + "-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"] + + for boost_pthread_flag in boost_pthread_flags: + try: + self.env.stash() + self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag) + self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag) + self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False) + + self.end_msg(boost_pthread_flag) + return + except self.errors.ConfigurationError: + self.env.revert() + self.end_msg('None') + +@conf +def check_boost(self, *k, **kw): + """ + Initialize boost libraries to be used. + + Keywords: you can pass the same parameters as with the command line (without "--boost-"). + Note that the command line has the priority, and should preferably be used. + """ + if not self.env['CXX']: + self.fatal('load a c++ compiler first, conf.load("compiler_cxx")') + + params = { + 'lib': k and k[0] or kw.get('lib'), + 'stlib': kw.get('stlib') + } + for key, value in self.options.__dict__.items(): + if not key.startswith('boost_'): + continue + key = key[len('boost_'):] + params[key] = value and value or kw.get(key, '') + + var = kw.get('uselib_store', 'BOOST') + + self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False) + if self.env.DPKG_ARCHITECTURE: + deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH']) + BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip()) + + self.start_msg('Checking boost includes') + self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params) + versions = self.boost_get_version(inc) + self.env.BOOST_VERSION = versions[0] + self.env.BOOST_VERSION_NUMBER = int(versions[1]) + self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000, + int(versions[1]) / 100 % 1000, + int(versions[1]) % 100)) + if Logs.verbose: + Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var]) + + if not params['lib'] and not params['stlib']: + return + if 'static' in kw or 'static' in params: + Logs.warn('boost: static parameter is deprecated, use stlib instead.') + self.start_msg('Checking boost libs') + path, libs, stlibs = self.boost_get_libs(**params) + self.env['LIBPATH_%s' % var] = [path] + self.env['STLIBPATH_%s' % var] = [path] + self.env['LIB_%s' % var] = libs + self.env['STLIB_%s' % var] = stlibs + self.end_msg('ok') + if Logs.verbose: + Logs.pprint('CYAN', ' path : %s' % path) + Logs.pprint('CYAN', ' shared libs : %s' % libs) + Logs.pprint('CYAN', ' static libs : %s' % stlibs) + + def has_shlib(lib): + return params['lib'] and lib in params['lib'] + def has_stlib(lib): + return params['stlib'] and lib in params['stlib'] + def has_lib(lib): + return has_shlib(lib) or has_stlib(lib) + if has_lib('thread'): + # not inside try_link to make check visible in the output + self._check_pthread_flag(k, kw) + + def try_link(): + if has_lib('system'): + self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False) + if has_lib('thread'): + self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False) + if has_lib('log'): + if not has_lib('thread'): + self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS'] + if has_shlib('log'): + self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK'] + self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False) + + if params.get('linkage_autodetect', False): + self.start_msg("Attempting to detect boost linkage flags") + toolset = self.boost_get_toolset(kw.get('toolset', '')) + if toolset in ('vc',): + # disable auto-linking feature, causing error LNK1181 + # because the code wants to be linked against + self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB'] + + # if no dlls are present, we guess the .lib files are not stubs + has_dlls = False + for x in Utils.listdir(path): + if x.endswith(self.env.cxxshlib_PATTERN % ''): + has_dlls = True + break + if not has_dlls: + self.env['STLIBPATH_%s' % var] = [path] + self.env['STLIB_%s' % var] = libs + del self.env['LIB_%s' % var] + del self.env['LIBPATH_%s' % var] + + # we attempt to play with some known-to-work CXXFLAGS combinations + for cxxflags in (['/MD', '/EHsc'], []): + self.env.stash() + self.env["CXXFLAGS_%s" % var] += cxxflags + try: + try_link() + except Errors.ConfigurationError as e: + self.env.revert() + exc = e + else: + self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var])) + exc = None + self.env.commit() + break + + if exc is not None: + self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc) + self.fatal('The configuration failed') + else: + self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain") + self.fatal('The configuration failed') + else: + self.start_msg('Checking for boost linkage') + try: + try_link() + except Errors.ConfigurationError as e: + self.end_msg("Could not link against boost libraries using supplied options") + self.fatal('The configuration failed') + self.end_msg('ok') + + +@feature('cxx') +@after_method('apply_link') +def install_boost(self): + if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'): + return + install_boost.done = True + inst_to = getattr(self, 'install_path', '${BINDIR}') + for lib in self.env.LIB_BOOST: + try: + file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST) + self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file)) + except: + continue +install_boost.done = False + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/buildcopy.py lilv-0.24.6/waflib/extras/buildcopy.py --- lilv-0.24.4~dfsg0/waflib/extras/buildcopy.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/buildcopy.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2017 (xbreak) +""" +Create task that copies source files to the associated build node. +This is useful to e.g. construct a complete Python package so it can be unit tested +without installation. + +Source files to be copied can be specified either in `buildcopy_source` attribute, or +`source` attribute. If both are specified `buildcopy_source` has priority. + +Examples:: + + def build(bld): + bld(name = 'bar', + features = 'py buildcopy', + source = bld.path.ant_glob('src/bar/*.py')) + + bld(name = 'py baz', + features = 'buildcopy', + buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt']) + +""" +import os, shutil +from waflib import Errors, Task, TaskGen, Utils, Node, Logs + +@TaskGen.before_method('process_source') +@TaskGen.feature('buildcopy') +def make_buildcopy(self): + """ + Creates the buildcopy task. + """ + def to_src_nodes(lst): + """Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives + preference to nodes in build. + """ + if isinstance(lst, Node.Node): + if not lst.is_src(): + raise Errors.WafError('buildcopy: node %s is not in src'%lst) + if not os.path.isfile(lst.abspath()): + raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst) + return lst + + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + node = self.bld.path.get_src().search_node(lst) + if node: + if not os.path.isfile(node.abspath()): + raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node) + return node + + node = self.bld.path.get_src().find_node(lst) + if node: + if not os.path.isfile(node.abspath()): + raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node) + return node + raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst)) + + nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ] + if not nodes: + Logs.warn('buildcopy: No source files provided to buildcopy in %s (set `buildcopy_source` or `source`)', + self) + return + node_pairs = [(n, n.get_bld()) for n in nodes] + self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs) + +class buildcopy(Task.Task): + """ + Copy for each pair `n` in `node_pairs`: n[0] -> n[1]. + + Attribute `node_pairs` should contain a list of tuples describing source and target: + + node_pairs = [(in, out), ...] + + """ + color = 'PINK' + + def keyword(self): + return 'Copying' + + def run(self): + for f,t in self.node_pairs: + t.parent.mkdir() + shutil.copy2(f.abspath(), t.abspath()) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/build_file_tracker.py lilv-0.24.6/waflib/extras/build_file_tracker.py --- lilv-0.24.4~dfsg0/waflib/extras/build_file_tracker.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/build_file_tracker.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,28 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2015 + +""" +Force files to depend on the timestamps of those located in the build directory. You may +want to use this to force partial rebuilds, see playground/track_output_files/ for a working example. + +Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example, +or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool) +or to hash the file in the build directory with its timestamp +""" + +import os +from waflib import Node, Utils + +def get_bld_sig(self): + if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: + return Utils.h_file(self.abspath()) + + try: + # add the creation time to the signature + return self.sig + str(os.stat(self.abspath()).st_mtime) + except AttributeError: + return None + +Node.Node.get_bld_sig = get_bld_sig + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/build_logs.py lilv-0.24.6/waflib/extras/build_logs.py --- lilv-0.24.4~dfsg0/waflib/extras/build_logs.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/build_logs.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2013 (ita) + +""" +A system for recording all outputs to a log file. Just add the following to your wscript file:: + + def init(ctx): + ctx.load('build_logs') +""" + +import atexit, sys, time, os, shutil, threading +from waflib import ansiterm, Logs, Context + +# adding the logs under the build/ directory will clash with the clean/ command +try: + up = os.path.dirname(Context.g_module.__file__) +except AttributeError: + up = '.' +LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log')) + +wlock = threading.Lock() +class log_to_file(object): + def __init__(self, stream, fileobj, filename): + self.stream = stream + self.encoding = self.stream.encoding + self.fileobj = fileobj + self.filename = filename + self.is_valid = True + def replace_colors(self, data): + for x in Logs.colors_lst.values(): + if isinstance(x, str): + data = data.replace(x, '') + return data + def write(self, data): + try: + wlock.acquire() + self.stream.write(data) + self.stream.flush() + if self.is_valid: + self.fileobj.write(self.replace_colors(data)) + finally: + wlock.release() + def fileno(self): + return self.stream.fileno() + def flush(self): + self.stream.flush() + if self.is_valid: + self.fileobj.flush() + def isatty(self): + return self.stream.isatty() + +def init(ctx): + global LOGFILE + filename = os.path.abspath(LOGFILE) + try: + os.makedirs(os.path.dirname(os.path.abspath(filename))) + except OSError: + pass + + if hasattr(os, 'O_NOINHERIT'): + fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT) + fileobj = os.fdopen(fd, 'w') + else: + fileobj = open(LOGFILE, 'w') + old_stderr = sys.stderr + + # sys.stdout has already been replaced, so __stdout__ will be faster + #sys.stdout = log_to_file(sys.stdout, fileobj, filename) + #sys.stderr = log_to_file(sys.stderr, fileobj, filename) + def wrap(stream): + if stream.isatty(): + return ansiterm.AnsiTerm(stream) + return stream + sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename) + sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename) + + # now mess with the logging module... + for x in Logs.log.handlers: + try: + stream = x.stream + except AttributeError: + pass + else: + if id(stream) == id(old_stderr): + x.stream = sys.stderr + +def exit_cleanup(): + try: + fileobj = sys.stdout.fileobj + except AttributeError: + pass + else: + sys.stdout.is_valid = False + sys.stderr.is_valid = False + fileobj.close() + filename = sys.stdout.filename + + Logs.info('Output logged to %r', filename) + + # then copy the log file to "latest.log" if possible + up = os.path.dirname(os.path.abspath(filename)) + try: + shutil.copy(filename, os.path.join(up, 'latest.log')) + except OSError: + # this may fail on windows due to processes spawned + pass + +atexit.register(exit_cleanup) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/cabal.py lilv-0.24.6/waflib/extras/cabal.py --- lilv-0.24.4~dfsg0/waflib/extras/cabal.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/cabal.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,152 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Anton Feldmann, 2012 +# "Base for cabal" + +from waflib import Task, Utils +from waflib.TaskGen import extension +from waflib.Utils import threading +from shutil import rmtree + +lock = threading.Lock() +registering = False + +def configure(self): + self.find_program('cabal', var='CABAL') + self.find_program('ghc-pkg', var='GHCPKG') + pkgconfd = self.bldnode.abspath() + '/package.conf.d' + self.env.PREFIX = self.bldnode.abspath() + '/dist' + self.env.PKGCONFD = pkgconfd + if self.root.find_node(pkgconfd + '/package.cache'): + self.msg('Using existing package database', pkgconfd, color='CYAN') + else: + pkgdir = self.root.find_dir(pkgconfd) + if pkgdir: + self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED') + rmtree(pkgdir.abspath()) + pkgdir = None + + self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd]) + self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN') + +@extension('.cabal') +def process_cabal(self, node): + out_dir_node = self.bld.root.find_dir(self.bld.out_dir) + package_node = node.change_ext('.package') + package_node = out_dir_node.find_or_declare(package_node.name) + build_node = node.parent.get_bld() + build_path = build_node.abspath() + config_node = build_node.find_or_declare('setup-config') + inplace_node = build_node.find_or_declare('package.conf.inplace') + + config_task = self.create_task('cabal_configure', node) + config_task.cwd = node.parent.abspath() + config_task.depends_on = getattr(self, 'depends_on', '') + config_task.build_path = build_path + config_task.set_outputs(config_node) + + build_task = self.create_task('cabal_build', config_node) + build_task.cwd = node.parent.abspath() + build_task.build_path = build_path + build_task.set_outputs(inplace_node) + + copy_task = self.create_task('cabal_copy', inplace_node) + copy_task.cwd = node.parent.abspath() + copy_task.depends_on = getattr(self, 'depends_on', '') + copy_task.build_path = build_path + + last_task = copy_task + task_list = [config_task, build_task, copy_task] + + if (getattr(self, 'register', False)): + register_task = self.create_task('cabal_register', inplace_node) + register_task.cwd = node.parent.abspath() + register_task.set_run_after(copy_task) + register_task.build_path = build_path + + pkgreg_task = self.create_task('ghcpkg_register', inplace_node) + pkgreg_task.cwd = node.parent.abspath() + pkgreg_task.set_run_after(register_task) + pkgreg_task.build_path = build_path + + last_task = pkgreg_task + task_list += [register_task, pkgreg_task] + + touch_task = self.create_task('cabal_touch', inplace_node) + touch_task.set_run_after(last_task) + touch_task.set_outputs(package_node) + touch_task.build_path = build_path + + task_list += [touch_task] + + return task_list + +def get_all_src_deps(node): + hs_deps = node.ant_glob('**/*.hs') + hsc_deps = node.ant_glob('**/*.hsc') + lhs_deps = node.ant_glob('**/*.lhs') + c_deps = node.ant_glob('**/*.c') + cpp_deps = node.ant_glob('**/*.cpp') + proto_deps = node.ant_glob('**/*.proto') + return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], []) + +class Cabal(Task.Task): + def scan(self): + return (get_all_src_deps(self.generator.path), ()) + +class cabal_configure(Cabal): + run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}' + shell = True + + def scan(self): + out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir) + deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)] + return (deps, ()) + +class cabal_build(Cabal): + run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/' + shell = True + +class cabal_copy(Cabal): + run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}' + shell = True + +class cabal_register(Cabal): + run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}' + shell = True + +class ghcpkg_register(Cabal): + run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config' + shell = True + + def runnable_status(self): + global lock, registering + + val = False + lock.acquire() + val = registering + lock.release() + + if val: + return Task.ASK_LATER + + ret = Task.Task.runnable_status(self) + if ret == Task.RUN_ME: + lock.acquire() + registering = True + lock.release() + + return ret + + def post_run(self): + global lock, registering + + lock.acquire() + registering = False + lock.release() + + return Task.Task.post_run(self) + +class cabal_touch(Cabal): + run_str = 'touch ${TGT}' + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/c_bgxlc.py lilv-0.24.6/waflib/extras/c_bgxlc.py --- lilv-0.24.4~dfsg0/waflib/extras/c_bgxlc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/c_bgxlc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +""" +IBM XL Compiler for Blue Gene +""" + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf + +from waflib.Tools import xlc # method xlc_common_flags +from waflib.Tools.compiler_c import c_compiler +c_compiler['linux'].append('c_bgxlc') + +@conf +def find_bgxlc(conf): + cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC') + conf.get_xlc_version(cc) + conf.env.CC = cc + conf.env.CC_NAME = 'bgxlc' + +def configure(conf): + conf.find_bgxlc() + conf.find_ar() + conf.xlc_common_flags() + conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull'] + conf.env.LINKFLAGS_cprogram = [] + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/c_dumbpreproc.py lilv-0.24.6/waflib/extras/c_dumbpreproc.py --- lilv-0.24.4~dfsg0/waflib/extras/c_dumbpreproc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/c_dumbpreproc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2010 (ita) + +""" +Dumb C/C++ preprocessor for finding dependencies + +It will look at all include files it can find after removing the comments, so the following +will always add the dependency on both "a.h" and "b.h":: + + #include "a.h" + #ifdef B + #include "b.h" + #endif + int main() { + return 0; + } + +To use:: + + def configure(conf): + conf.load('compiler_c') + conf.load('c_dumbpreproc') +""" + +import re +from waflib.Tools import c_preproc + +re_inc = re.compile( + '^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$', + re.IGNORECASE | re.MULTILINE) + +def lines_includes(node): + code = node.read() + if c_preproc.use_trigraphs: + for (a, b) in c_preproc.trig_def: + code = code.split(a).join(b) + code = c_preproc.re_nl.sub('', code) + code = c_preproc.re_cpp.sub(c_preproc.repl, code) + return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)] + +parser = c_preproc.c_parser +class dumb_parser(parser): + def addlines(self, node): + if node in self.nodes[:-1]: + return + self.currentnode_stack.append(node.parent) + + # Avoid reading the same files again + try: + lines = self.parse_cache[node] + except KeyError: + lines = self.parse_cache[node] = lines_includes(node) + + self.lines = lines + [(c_preproc.POPFILE, '')] + self.lines + + def start(self, node, env): + try: + self.parse_cache = node.ctx.parse_cache + except AttributeError: + self.parse_cache = node.ctx.parse_cache = {} + + self.addlines(node) + while self.lines: + (x, y) = self.lines.pop(0) + if x == c_preproc.POPFILE: + self.currentnode_stack.pop() + continue + self.tryfind(y) + +c_preproc.c_parser = dumb_parser + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/c_emscripten.py lilv-0.24.6/waflib/extras/c_emscripten.py --- lilv-0.24.4~dfsg0/waflib/extras/c_emscripten.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/c_emscripten.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# -*- coding: utf-8 vi:ts=4:noexpandtab + +import subprocess, shlex, sys + +from waflib.Tools import ccroot, gcc, gxx +from waflib.Configure import conf +from waflib.TaskGen import after_method, feature + +from waflib.Tools.compiler_c import c_compiler +from waflib.Tools.compiler_cxx import cxx_compiler + +for supported_os in ('linux', 'darwin', 'gnu', 'aix'): + c_compiler[supported_os].append('c_emscripten') + cxx_compiler[supported_os].append('c_emscripten') + + +@conf +def get_emscripten_version(conf, cc): + """ + Emscripten doesn't support processing '-' like clang/gcc + """ + + dummy = conf.cachedir.parent.make_node("waf-emscripten.c") + dummy.write("") + cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()] + env = conf.env.env or None + try: + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) + out = p.communicate()[0] + except Exception as e: + conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e)) + + if not isinstance(out, str): + out = out.decode(sys.stdout.encoding or 'latin-1') + + k = {} + out = out.splitlines() + for line in out: + lst = shlex.split(line) + if len(lst)>2: + key = lst[1] + val = lst[2] + k[key] = val + + if not ('__clang__' in k and 'EMSCRIPTEN' in k): + conf.fatal('Could not determine the emscripten compiler version.') + + conf.env.DEST_OS = 'generic' + conf.env.DEST_BINFMT = 'elf' + conf.env.DEST_CPU = 'asm-js' + conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) + return k + +@conf +def find_emscripten(conf): + cc = conf.find_program(['emcc'], var='CC') + conf.get_emscripten_version(cc) + conf.env.CC = cc + conf.env.CC_NAME = 'emscripten' + cxx = conf.find_program(['em++'], var='CXX') + conf.env.CXX = cxx + conf.env.CXX_NAME = 'emscripten' + conf.find_program(['emar'], var='AR') + +def configure(conf): + conf.find_emscripten() + conf.find_ar() + conf.gcc_common_flags() + conf.gxx_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() + conf.env.ARFLAGS = ['rcs'] + conf.env.cshlib_PATTERN = '%s.js' + conf.env.cxxshlib_PATTERN = '%s.js' + conf.env.cstlib_PATTERN = '%s.a' + conf.env.cxxstlib_PATTERN = '%s.a' + conf.env.cprogram_PATTERN = '%s.html' + conf.env.cxxprogram_PATTERN = '%s.html' + conf.env.CXX_TGT_F = ['-c', '-o', ''] + conf.env.CC_TGT_F = ['-c', '-o', ''] + conf.env.CXXLNK_TGT_F = ['-o', ''] + conf.env.CCLNK_TGT_F = ['-o', ''] + conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/cfg_altoptions.py lilv-0.24.6/waflib/extras/cfg_altoptions.py --- lilv-0.24.4~dfsg0/waflib/extras/cfg_altoptions.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/cfg_altoptions.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,110 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Tool to extend c_config.check_cfg() + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2014" + +""" + +This tool allows to work around the absence of ``*-config`` programs +on systems, by keeping the same clean configuration syntax but inferring +values or permitting their modification via the options interface. + +Note that pkg-config can also support setting ``PKG_CONFIG_PATH``, +so you can put custom files in a folder containing new .pc files. +This tool could also be implemented by taking advantage of this fact. + +Usage:: + + def options(opt): + opt.load('c_config_alt') + opt.add_package_option('package') + + def configure(cfg): + conf.load('c_config_alt') + conf.check_cfg(...) + +Known issues: + +- Behavior with different build contexts... + +""" + +import os +import functools +from waflib import Configure, Options, Errors + +def name_to_dest(x): + return x.lower().replace('-', '_') + + +def options(opt): + def x(opt, param): + dest = name_to_dest(param) + gr = opt.get_option_group("configure options") + gr.add_option('--%s-root' % dest, + help="path containing include and lib subfolders for %s" \ + % param, + ) + + opt.add_package_option = functools.partial(x, opt) + + +check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg') + +@Configure.conf +def check_cfg(conf, *k, **kw): + if k: + lst = k[0].split() + kw['package'] = lst[0] + kw['args'] = ' '.join(lst[1:]) + + if not 'package' in kw: + return check_cfg_old(conf, **kw) + + package = kw['package'] + + package_lo = name_to_dest(package) + package_hi = package.upper().replace('-', '_') # TODO FIXME + package_hi = kw.get('uselib_store', package_hi) + + def check_folder(path, name): + try: + assert os.path.isdir(path) + except AssertionError: + raise Errors.ConfigurationError( + "%s_%s (%s) is not a folder!" \ + % (package_lo, name, path)) + return path + + root = getattr(Options.options, '%s_root' % package_lo, None) + + if root is None: + return check_cfg_old(conf, **kw) + else: + def add_manual_var(k, v): + conf.start_msg('Adding for %s a manual var' % (package)) + conf.env["%s_%s" % (k, package_hi)] = v + conf.end_msg("%s = %s" % (k, v)) + + + check_folder(root, 'root') + + pkg_inc = check_folder(os.path.join(root, "include"), 'inc') + add_manual_var('INCLUDES', [pkg_inc]) + pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath') + add_manual_var('LIBPATH', [pkg_lib]) + add_manual_var('LIB', [package]) + + for x in kw.get('manual_deps', []): + for k, v in sorted(conf.env.get_merged_dict().items()): + if k.endswith('_%s' % x): + k = k.replace('_%s' % x, '') + conf.start_msg('Adding for %s a manual dep' \ + %(package)) + conf.env["%s_%s" % (k, package_hi)] += v + conf.end_msg('%s += %s' % (k, v)) + + return True + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/clang_compilation_database.py lilv-0.24.6/waflib/extras/clang_compilation_database.py --- lilv-0.24.4~dfsg0/waflib/extras/clang_compilation_database.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/extras/clang_compilation_database.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,55 +1,85 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Christoph Koke, 2013 -import sys,os,json,shlex,pipes -from waflib import Logs,TaskGen,Task -Task.Task.keep_last_cmd=True -@TaskGen.feature('c','cxx') +""" +Writes the c and cpp compile commands into build/compile_commands.json +see http://clang.llvm.org/docs/JSONCompilationDatabase.html + +Usage: + + def configure(conf): + conf.load('compiler_cxx') + ... + conf.load('clang_compilation_database') +""" + +import sys, os, json, shlex, pipes +from waflib import Logs, TaskGen, Task + +Task.Task.keep_last_cmd = True + +@TaskGen.feature('c', 'cxx') @TaskGen.after_method('process_use') def collect_compilation_db_tasks(self): + "Add a compilation database entry for compiled tasks" try: - clang_db=self.bld.clang_compilation_database_tasks + clang_db = self.bld.clang_compilation_database_tasks except AttributeError: - clang_db=self.bld.clang_compilation_database_tasks=[] + clang_db = self.bld.clang_compilation_database_tasks = [] self.bld.add_post_fun(write_compilation_database) - tup=tuple(y for y in[Task.classes.get(x)for x in('c','cxx')]if y) - for task in getattr(self,'compiled_tasks',[]): - if isinstance(task,tup): + + tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y) + for task in getattr(self, 'compiled_tasks', []): + if isinstance(task, tup): clang_db.append(task) + def write_compilation_database(ctx): - database_file=ctx.bldnode.make_node('compile_commands.json') - Logs.info('Build commands will be stored in %s',database_file.path_from(ctx.path)) + "Write the clang compilation database as JSON" + database_file = ctx.bldnode.make_node('compile_commands.json') + Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path)) try: - root=json.load(database_file) + root = json.load(database_file) except IOError: - root=[] - clang_db=dict((x['file'],x)for x in root) - for task in getattr(ctx,'clang_compilation_database_tasks',[]): + root = [] + clang_db = dict((x['file'], x) for x in root) + for task in getattr(ctx, 'clang_compilation_database_tasks', []): try: - cmd=task.last_cmd + cmd = task.last_cmd except AttributeError: continue - directory=getattr(task,'cwd',ctx.variant_dir) - f_node=task.inputs[0] - filename=os.path.relpath(f_node.abspath(),directory) - entry={"directory":directory,"arguments":cmd,"file":filename,} - clang_db[filename]=entry - root=list(clang_db.values()) - database_file.write(json.dumps(root,indent=2)) -for x in('c','cxx'): + directory = getattr(task, 'cwd', ctx.variant_dir) + f_node = task.inputs[0] + filename = os.path.relpath(f_node.abspath(), directory) + entry = { + "directory": directory, + "arguments": cmd, + "file": filename, + } + clang_db[filename] = entry + root = list(clang_db.values()) + database_file.write(json.dumps(root, indent=2)) + +# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled. +# This will make sure compile_commands.json is always fully up to date. +# Previously you could end up with a partial compile_commands.json if the build failed. +for x in ('c', 'cxx'): if x not in Task.classes: continue - t=Task.classes[x] + + t = Task.classes[x] + def runnable_status(self): - def exec_command(cmd,**kw): + def exec_command(cmd, **kw): pass - run_status=self.old_runnable_status() - if run_status==Task.SKIP_ME: - setattr(self,'old_exec_command',getattr(self,'exec_command',None)) - setattr(self,'exec_command',exec_command) + + run_status = self.old_runnable_status() + if run_status == Task.SKIP_ME: + setattr(self, 'old_exec_command', getattr(self, 'exec_command', None)) + setattr(self, 'exec_command', exec_command) self.run() - setattr(self,'exec_command',getattr(self,'old_exec_command',None)) + setattr(self, 'exec_command', getattr(self, 'old_exec_command', None)) return run_status - setattr(t,'old_runnable_status',getattr(t,'runnable_status',None)) - setattr(t,'runnable_status',runnable_status) + + setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None)) + setattr(t, 'runnable_status', runnable_status) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/clang_cross_common.py lilv-0.24.6/waflib/extras/clang_cross_common.py --- lilv-0.24.4~dfsg0/waflib/extras/clang_cross_common.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/clang_cross_common.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,113 @@ +#!/usr/bin/env python +# encoding: utf-8 +# DragoonX6 2018 + +""" +Common routines for cross_clang.py and cross_clangxx.py +""" + +from waflib.Configure import conf +import waflib.Context + +def normalize_target_triple(target_triple): + target_triple = target_triple[:-1] + normalized_triple = target_triple.replace('--', '-unknown-') + + if normalized_triple.startswith('-'): + normalized_triple = 'unknown' + normalized_triple + + if normalized_triple.endswith('-'): + normalized_triple += 'unknown' + + # Normalize MinGW builds to *arch*-w64-mingw32 + if normalized_triple.endswith('windows-gnu'): + normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-w64-mingw32' + + # Strip the vendor when doing msvc builds, since it's unused anyway. + if normalized_triple.endswith('windows-msvc'): + normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-windows-msvc' + + return normalized_triple.replace('-', '_') + +@conf +def clang_modifier_msvc(conf): + import os + + """ + Really basic setup to use clang in msvc mode. + We actually don't really want to do a lot, even though clang is msvc compatible + in this mode, that doesn't mean we're actually using msvc. + It's probably the best to leave it to the user, we can assume msvc mode if the user + uses the clang-cl frontend, but this module only concerns itself with the gcc-like frontend. + """ + v = conf.env + v.cprogram_PATTERN = '%s.exe' + + v.cshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.lib' + v.IMPLIB_ST = '-Wl,-IMPLIB:%s' + v.SHLIB_MARKER = [] + + v.CFLAGS_cshlib = [] + v.LINKFLAGS_cshlib = ['-Wl,-DLL'] + v.cstlib_PATTERN = '%s.lib' + v.STLIB_MARKER = [] + + del(v.AR) + conf.find_program(['llvm-lib', 'lib'], var='AR') + v.ARFLAGS = ['-nologo'] + v.AR_TGT_F = ['-out:'] + + # Default to the linker supplied with llvm instead of link.exe or ld + v.LINK_CC = v.CC + ['-fuse-ld=lld', '-nostdlib'] + v.CCLNK_TGT_F = ['-o'] + v.def_PATTERN = '-Wl,-def:%s' + + v.LINKFLAGS = [] + + v.LIB_ST = '-l%s' + v.LIBPATH_ST = '-Wl,-LIBPATH:%s' + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-Wl,-LIBPATH:%s' + + CFLAGS_CRT_COMMON = [ + '-Xclang', '--dependent-lib=oldnames', + '-Xclang', '-fno-rtti-data', + '-D_MT' + ] + + v.CFLAGS_CRT_MULTITHREADED = CFLAGS_CRT_COMMON + [ + '-Xclang', '-flto-visibility-public-std', + '-Xclang', '--dependent-lib=libcmt', + ] + v.CXXFLAGS_CRT_MULTITHREADED = v.CFLAGS_CRT_MULTITHREADED + + v.CFLAGS_CRT_MULTITHREADED_DBG = CFLAGS_CRT_COMMON + [ + '-D_DEBUG', + '-Xclang', '-flto-visibility-public-std', + '-Xclang', '--dependent-lib=libcmtd', + ] + v.CXXFLAGS_CRT_MULTITHREADED_DBG = v.CFLAGS_CRT_MULTITHREADED_DBG + + v.CFLAGS_CRT_MULTITHREADED_DLL = CFLAGS_CRT_COMMON + [ + '-D_DLL', + '-Xclang', '--dependent-lib=msvcrt' + ] + v.CXXFLAGS_CRT_MULTITHREADED_DLL = v.CFLAGS_CRT_MULTITHREADED_DLL + + v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = CFLAGS_CRT_COMMON + [ + '-D_DLL', + '-D_DEBUG', + '-Xclang', '--dependent-lib=msvcrtd', + ] + v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CFLAGS_CRT_MULTITHREADED_DLL_DBG + +@conf +def clang_modifier_target_triple(conf, cpp=False): + compiler = conf.env.CXX if cpp else conf.env.CC + output = conf.cmd_and_log(compiler + ['-dumpmachine'], output=waflib.Context.STDOUT) + + modifier = ('clangxx' if cpp else 'clang') + '_modifier_' + clang_modifier_func = getattr(conf, modifier + normalize_target_triple(output), None) + if clang_modifier_func: + clang_modifier_func() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/clang_cross.py lilv-0.24.6/waflib/extras/clang_cross.py --- lilv-0.24.4~dfsg0/waflib/extras/clang_cross.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/clang_cross.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Krzysztof Kosiński 2014 +# DragoonX6 2018 + +""" +Detect the Clang C compiler +This version is an attempt at supporting the -target and -sysroot flag of Clang. +""" + +from waflib.Tools import ccroot, ar, gcc +from waflib.Configure import conf +import waflib.Context +import waflib.extras.clang_cross_common + +def options(opt): + """ + Target triplet for clang:: + $ waf configure --clang-target-triple=x86_64-pc-linux-gnu + """ + cc_compiler_opts = opt.add_option_group('Configuration options') + cc_compiler_opts.add_option('--clang-target-triple', default=None, + help='Target triple for clang', + dest='clang_target_triple') + cc_compiler_opts.add_option('--clang-sysroot', default=None, + help='Sysroot for clang', + dest='clang_sysroot') + +@conf +def find_clang(conf): + """ + Finds the program clang and executes it to ensure it really is clang + """ + + import os + + cc = conf.find_program('clang', var='CC') + + if conf.options.clang_target_triple != None: + conf.env.append_value('CC', ['-target', conf.options.clang_target_triple]) + + if conf.options.clang_sysroot != None: + sysroot = str() + + if os.path.isabs(conf.options.clang_sysroot): + sysroot = conf.options.clang_sysroot + else: + sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clang_sysroot)) + + conf.env.append_value('CC', ['--sysroot', sysroot]) + + conf.get_cc_version(cc, clang=True) + conf.env.CC_NAME = 'clang' + +@conf +def clang_modifier_x86_64_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clang_modifier_i386_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clang_modifier_x86_64_windows_msvc(conf): + conf.clang_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clang_modifier_x86_64_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +@conf +def clang_modifier_i386_windows_msvc(conf): + conf.clang_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clang_modifier_i386_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +def configure(conf): + conf.find_clang() + conf.find_program(['llvm-ar', 'ar'], var='AR') + conf.find_ar() + conf.gcc_common_flags() + # Allow the user to provide flags for the target platform. + conf.gcc_modifier_platform() + # And allow more fine grained control based on the compiler's triplet. + conf.clang_modifier_target_triple() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/clangxx_cross.py lilv-0.24.6/waflib/extras/clangxx_cross.py --- lilv-0.24.4~dfsg0/waflib/extras/clangxx_cross.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/clangxx_cross.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy 2009-2018 (ita) +# DragoonX6 2018 + +""" +Detect the Clang++ C++ compiler +This version is an attempt at supporting the -target and -sysroot flag of Clang++. +""" + +from waflib.Tools import ccroot, ar, gxx +from waflib.Configure import conf +import waflib.extras.clang_cross_common + +def options(opt): + """ + Target triplet for clang++:: + $ waf configure --clangxx-target-triple=x86_64-pc-linux-gnu + """ + cxx_compiler_opts = opt.add_option_group('Configuration options') + cxx_compiler_opts.add_option('--clangxx-target-triple', default=None, + help='Target triple for clang++', + dest='clangxx_target_triple') + cxx_compiler_opts.add_option('--clangxx-sysroot', default=None, + help='Sysroot for clang++', + dest='clangxx_sysroot') + +@conf +def find_clangxx(conf): + """ + Finds the program clang++, and executes it to ensure it really is clang++ + """ + + import os + + cxx = conf.find_program('clang++', var='CXX') + + if conf.options.clangxx_target_triple != None: + conf.env.append_value('CXX', ['-target', conf.options.clangxx_target_triple]) + + if conf.options.clangxx_sysroot != None: + sysroot = str() + + if os.path.isabs(conf.options.clangxx_sysroot): + sysroot = conf.options.clangxx_sysroot + else: + sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clangxx_sysroot)) + + conf.env.append_value('CXX', ['--sysroot', sysroot]) + + conf.get_cc_version(cxx, clang=True) + conf.env.CXX_NAME = 'clang' + +@conf +def clangxx_modifier_x86_64_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clangxx_modifier_i386_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clangxx_modifier_msvc(conf): + v = conf.env + v.cxxprogram_PATTERN = v.cprogram_PATTERN + v.cxxshlib_PATTERN = v.cshlib_PATTERN + + v.CXXFLAGS_cxxshlib = [] + v.LINKFLAGS_cxxshlib = v.LINKFLAGS_cshlib + v.cxxstlib_PATTERN = v.cstlib_PATTERN + + v.LINK_CXX = v.CXX + ['-fuse-ld=lld', '-nostdlib'] + v.CXXLNK_TGT_F = v.CCLNK_TGT_F + +@conf +def clangxx_modifier_x86_64_windows_msvc(conf): + conf.clang_modifier_msvc() + conf.clangxx_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clangxx_modifier_x86_64_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +@conf +def clangxx_modifier_i386_windows_msvc(conf): + conf.clang_modifier_msvc() + conf.clangxx_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clangxx_modifier_i386_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +def configure(conf): + conf.find_clangxx() + conf.find_program(['llvm-ar', 'ar'], var='AR') + conf.find_ar() + conf.gxx_common_flags() + # Allow the user to provide flags for the target platform. + conf.gxx_modifier_platform() + # And allow more fine grained control based on the compiler's triplet. + conf.clang_modifier_target_triple(cpp=True) + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/c_nec.py lilv-0.24.6/waflib/extras/c_nec.py --- lilv-0.24.4~dfsg0/waflib/extras/c_nec.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/c_nec.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,74 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +""" +NEC SX Compiler for SX vector systems +""" + +import re +from waflib import Utils +from waflib.Tools import ccroot,ar +from waflib.Configure import conf + +from waflib.Tools import xlc # method xlc_common_flags +from waflib.Tools.compiler_c import c_compiler +c_compiler['linux'].append('c_nec') + +@conf +def find_sxc(conf): + cc = conf.find_program(['sxcc'], var='CC') + conf.get_sxc_version(cc) + conf.env.CC = cc + conf.env.CC_NAME = 'sxcc' + +@conf +def get_sxc_version(conf, fc): + version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P\d*)\.(?P\d*)", re.I).search + cmd = fc + ['-V'] + p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None) + out, err = p.communicate() + + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + conf.fatal('Could not determine the NEC C compiler version.') + k = match.groupdict() + conf.env['C_VERSION'] = (k['major'], k['minor']) + +@conf +def sxc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']: + v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cprogram']=[''] + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=[''] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']='lib%s.a' + +def configure(conf): + conf.find_sxc() + conf.find_program('sxar',VAR='AR') + conf.sxc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/codelite.py lilv-0.24.6/waflib/extras/codelite.py --- lilv-0.24.4~dfsg0/waflib/extras/codelite.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/codelite.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,875 @@ +#! /usr/bin/env python +# encoding: utf-8 +# CodeLite Project +# Christian Klein (chrikle@berlios.de) +# Created: Jan 2012 +# As templete for this file I used the msvs.py +# I hope this template will work proper + +""" +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +""" + +""" + + +To add this tool to your project: +def options(conf): + opt.load('codelite') + +It can be a good idea to add the sync_exec tool too. + +To generate solution files: +$ waf configure codelite + +To customize the outputs, provide subclasses in your wscript files: + +from waflib.extras import codelite +class vsnode_target(codelite.vsnode_target): + def get_build_command(self, props): + # likely to be required + return "waf.bat build" + def collect_source(self): + # likely to be required + ... +class codelite_bar(codelite.codelite_generator): + def init(self): + codelite.codelite_generator.init(self) + self.vsnode_target = vsnode_target + +The codelite class re-uses the same build() function for reading the targets (task generators), +you may therefore specify codelite settings on the context object: + +def build(bld): + bld.codelite_solution_name = 'foo.workspace' + bld.waf_command = 'waf.bat' + bld.projects_dir = bld.srcnode.make_node('') + bld.projects_dir.mkdir() + + +ASSUMPTIONS: +* a project can be either a directory or a target, project files are written only for targets that have source files +* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path +""" + +import os, re, sys +import uuid # requires python 2.5 +from waflib.Build import BuildContext +from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options + +HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)' + +PROJECT_TEMPLATE = r''' + + + + + + + + + + ${for x in project.source} + ${if (project.get_key(x)=="sourcefile")} + + ${endif} + ${endfor} + + + ${for x in project.source} + ${if (project.get_key(x)=="headerfile")} + + ${endif} + ${endfor} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $b = project.build_properties[0]} + ${xml:project.get_rebuild_command(project.build_properties[0])} + ${xml:project.get_clean_command(project.build_properties[0])} + ${xml:project.get_build_command(project.build_properties[0])} + ${xml:project.get_install_command(project.build_properties[0])} + ${xml:project.get_build_and_install_command(project.build_properties[0])} + ${xml:project.get_build_all_command(project.build_properties[0])} + ${xml:project.get_rebuild_all_command(project.build_properties[0])} + ${xml:project.get_clean_all_command(project.build_properties[0])} + ${xml:project.get_build_and_install_all_command(project.build_properties[0])} + + + + None + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +''' + + + + +SOLUTION_TEMPLATE = ''' + +${for p in project.all_projects} + +${endfor} + + +${for p in project.all_projects} + +${endfor} + + +''' + + + +COMPILE_TEMPLATE = '''def f(project): + lst = [] + def xml_escape(value): + return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">") + + %s + + #f = open('cmd.txt', 'w') + #f.write(str(lst)) + #f.close() + return ''.join(lst) +''' +reg_act = re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P[^}]*?)\})", re.M) +def compile_template(line): + """ + Compile a template expression into a python function (like jsps, but way shorter) + """ + extr = [] + def repl(match): + g = match.group + if g('dollar'): + return "$" + elif g('backslash'): + return "\\" + elif g('subst'): + extr.append(g('code')) + return "<<|@|>>" + return None + + line2 = reg_act.sub(repl, line) + params = line2.split('<<|@|>>') + assert(extr) + + + indent = 0 + buf = [] + app = buf.append + + def app(txt): + buf.append(indent * '\t' + txt) + + for x in range(len(extr)): + if params[x]: + app("lst.append(%r)" % params[x]) + + f = extr[x] + if f.startswith(('if', 'for')): + app(f + ':') + indent += 1 + elif f.startswith('py:'): + app(f[3:]) + elif f.startswith(('endif', 'endfor')): + indent -= 1 + elif f.startswith(('else', 'elif')): + indent -= 1 + app(f + ':') + indent += 1 + elif f.startswith('xml:'): + app('lst.append(xml_escape(%s))' % f[4:]) + else: + #app('lst.append((%s) or "cannot find %s")' % (f, f)) + app('lst.append(%s)' % f) + + if extr: + if params[-1]: + app("lst.append(%r)" % params[-1]) + + fun = COMPILE_TEMPLATE % "\n\t".join(buf) + #print(fun) + return Task.funex(fun) + + +re_blank = re.compile('(\n|\r|\\s)*\n', re.M) +def rm_blank_lines(txt): + txt = re_blank.sub('\r\n', txt) + return txt + +BOM = '\xef\xbb\xbf' +try: + BOM = bytes(BOM, 'latin-1') # python 3 +except (TypeError, NameError): + pass + +def stealth_write(self, data, flags='wb'): + try: + unicode + except NameError: + data = data.encode('utf-8') # python 3 + else: + data = data.decode(sys.getfilesystemencoding(), 'replace') + data = data.encode('utf-8') + + if self.name.endswith('.project'): + data = BOM + data + + try: + txt = self.read(flags='rb') + if txt != data: + raise ValueError('must write') + except (IOError, ValueError): + self.write(data, flags=flags) + else: + Logs.debug('codelite: skipping %r', self) +Node.Node.stealth_write = stealth_write + +re_quote = re.compile("[^a-zA-Z0-9-]") +def quote(s): + return re_quote.sub("_", s) + +def xml_escape(value): + return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">") + +def make_uuid(v, prefix = None): + """ + simple utility function + """ + if isinstance(v, dict): + keys = list(v.keys()) + keys.sort() + tmp = str([(k, v[k]) for k in keys]) + else: + tmp = str(v) + d = Utils.md5(tmp.encode()).hexdigest().upper() + if prefix: + d = '%s%s' % (prefix, d[8:]) + gid = uuid.UUID(d, version = 4) + return str(gid).upper() + +def diff(node, fromnode): + # difference between two nodes, but with "(..)" instead of ".." + c1 = node + c2 = fromnode + + c1h = c1.height() + c2h = c2.height() + + lst = [] + up = 0 + + while c1h > c2h: + lst.append(c1.name) + c1 = c1.parent + c1h -= 1 + + while c2h > c1h: + up += 1 + c2 = c2.parent + c2h -= 1 + + while id(c1) != id(c2): + lst.append(c1.name) + up += 1 + + c1 = c1.parent + c2 = c2.parent + + for i in range(up): + lst.append('(..)') + lst.reverse() + return tuple(lst) + +class build_property(object): + pass + +class vsnode(object): + """ + Abstract class representing visual studio elements + We assume that all visual studio nodes have a uuid and a parent + """ + def __init__(self, ctx): + self.ctx = ctx # codelite context + self.name = '' # string, mandatory + self.vspath = '' # path in visual studio (name for dirs, absolute path for projects) + self.uuid = '' # string, mandatory + self.parent = None # parent node for visual studio nesting + + def get_waf(self): + """ + Override in subclasses... + """ + return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf')) + + def ptype(self): + """ + Return a special uuid for projects written in the solution file + """ + pass + + def write(self): + """ + Write the project file, by default, do nothing + """ + pass + + def make_uuid(self, val): + """ + Alias for creating uuid values easily (the templates cannot access global variables) + """ + return make_uuid(val) + +class vsnode_vsdir(vsnode): + """ + Nodes representing visual studio folders (which do not match the filesystem tree!) + """ + VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8" + def __init__(self, ctx, uuid, name, vspath=''): + vsnode.__init__(self, ctx) + self.title = self.name = name + self.uuid = uuid + self.vspath = vspath or name + + def ptype(self): + return self.VS_GUID_SOLUTIONFOLDER + +class vsnode_project(vsnode): + """ + Abstract class representing visual studio project elements + A project is assumed to be writable, and has a node representing the file to write to + """ + VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942" + def ptype(self): + return self.VS_GUID_VCPROJ + + def __init__(self, ctx, node): + vsnode.__init__(self, ctx) + self.path = node + self.uuid = make_uuid(node.abspath()) + self.name = node.name + self.title = self.path.abspath() + self.source = [] # list of node objects + self.build_properties = [] # list of properties (nmake commands, output dir, etc) + + def dirs(self): + """ + Get the list of parent folders of the source files (header files included) + for writing the filters + """ + lst = [] + def add(x): + if x.height() > self.tg.path.height() and x not in lst: + lst.append(x) + add(x.parent) + for x in self.source: + add(x.parent) + return lst + + def write(self): + Logs.debug('codelite: creating %r', self.path) + #print "self.name:",self.name + + # first write the project file + template1 = compile_template(PROJECT_TEMPLATE) + proj_str = template1(self) + proj_str = rm_blank_lines(proj_str) + self.path.stealth_write(proj_str) + + # then write the filter + #template2 = compile_template(FILTER_TEMPLATE) + #filter_str = template2(self) + #filter_str = rm_blank_lines(filter_str) + #tmp = self.path.parent.make_node(self.path.name + '.filters') + #tmp.stealth_write(filter_str) + + def get_key(self, node): + """ + required for writing the source files + """ + name = node.name + if name.endswith(('.cpp', '.c')): + return 'sourcefile' + return 'headerfile' + + def collect_properties(self): + """ + Returns a list of triplet (configuration, platform, output_directory) + """ + ret = [] + for c in self.ctx.configurations: + for p in self.ctx.platforms: + x = build_property() + x.outdir = '' + + x.configuration = c + x.platform = p + + x.preprocessor_definitions = '' + x.includes_search_path = '' + + # can specify "deploy_dir" too + ret.append(x) + self.build_properties = ret + + def get_build_params(self, props): + opt = '' + return (self.get_waf(), opt) + + def get_build_command(self, props): + return "%s build %s" % self.get_build_params(props) + + def get_clean_command(self, props): + return "%s clean %s" % self.get_build_params(props) + + def get_rebuild_command(self, props): + return "%s clean build %s" % self.get_build_params(props) + + def get_install_command(self, props): + return "%s install %s" % self.get_build_params(props) + def get_build_and_install_command(self, props): + return "%s build install %s" % self.get_build_params(props) + + def get_build_and_install_all_command(self, props): + return "%s build install" % self.get_build_params(props)[0] + + def get_clean_all_command(self, props): + return "%s clean" % self.get_build_params(props)[0] + + def get_build_all_command(self, props): + return "%s build" % self.get_build_params(props)[0] + + def get_rebuild_all_command(self, props): + return "%s clean build" % self.get_build_params(props)[0] + + def get_filter_name(self, node): + lst = diff(node, self.tg.path) + return '\\'.join(lst) or '.' + +class vsnode_alias(vsnode_project): + def __init__(self, ctx, node, name): + vsnode_project.__init__(self, ctx, node) + self.name = name + self.output_file = '' + +class vsnode_build_all(vsnode_alias): + """ + Fake target used to emulate the behaviour of "make all" (starting one process by target is slow) + This is the only alias enabled by default + """ + def __init__(self, ctx, node, name='build_all_projects'): + vsnode_alias.__init__(self, ctx, node, name) + self.is_active = True + +class vsnode_install_all(vsnode_alias): + """ + Fake target used to emulate the behaviour of "make install" + """ + def __init__(self, ctx, node, name='install_all_projects'): + vsnode_alias.__init__(self, ctx, node, name) + + def get_build_command(self, props): + return "%s build install %s" % self.get_build_params(props) + + def get_clean_command(self, props): + return "%s clean %s" % self.get_build_params(props) + + def get_rebuild_command(self, props): + return "%s clean build install %s" % self.get_build_params(props) + +class vsnode_project_view(vsnode_alias): + """ + Fake target used to emulate a file system view + """ + def __init__(self, ctx, node, name='project_view'): + vsnode_alias.__init__(self, ctx, node, name) + self.tg = self.ctx() # fake one, cannot remove + self.exclude_files = Node.exclude_regs + ''' +waf-2* +waf3-2*/** +.waf-2* +.waf3-2*/** +**/*.sdf +**/*.suo +**/*.ncb +**/%s + ''' % Options.lockfile + + def collect_source(self): + # this is likely to be slow + self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files) + + def get_build_command(self, props): + params = self.get_build_params(props) + (self.ctx.cmd,) + return "%s %s %s" % params + + def get_clean_command(self, props): + return "" + + def get_rebuild_command(self, props): + return self.get_build_command(props) + +class vsnode_target(vsnode_project): + """ + CodeLite project representing a targets (programs, libraries, etc) and bound + to a task generator + """ + def __init__(self, ctx, tg): + """ + A project is more or less equivalent to a file/folder + """ + base = getattr(ctx, 'projects_dir', None) or tg.path + node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node + vsnode_project.__init__(self, ctx, node) + self.name = quote(tg.name) + self.tg = tg # task generator + + def get_build_params(self, props): + """ + Override the default to add the target name + """ + opt = '' + if getattr(self, 'tg', None): + opt += " --targets=%s" % self.tg.name + return (self.get_waf(), opt) + + def collect_source(self): + tg = self.tg + source_files = tg.to_nodes(getattr(tg, 'source', [])) + include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', [])) + include_files = [] + for x in include_dirs: + if isinstance(x, str): + x = tg.path.find_node(x) + if x: + lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)] + include_files.extend(lst) + + # remove duplicates + self.source.extend(list(set(source_files + include_files))) + self.source.sort(key=lambda x: x.abspath()) + + def collect_properties(self): + """ + CodeLite projects are associated with platforms and configurations (for building especially) + """ + super(vsnode_target, self).collect_properties() + for x in self.build_properties: + x.outdir = self.path.parent.abspath() + x.preprocessor_definitions = '' + x.includes_search_path = '' + + try: + tsk = self.tg.link_task + except AttributeError: + pass + else: + x.output_file = tsk.outputs[0].abspath() + x.preprocessor_definitions = ';'.join(tsk.env.DEFINES) + x.includes_search_path = ';'.join(self.tg.env.INCPATHS) + +class codelite_generator(BuildContext): + '''generates a CodeLite workspace''' + cmd = 'codelite' + fun = 'build' + + def init(self): + """ + Some data that needs to be present + """ + if not getattr(self, 'configurations', None): + self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc + if not getattr(self, 'platforms', None): + self.platforms = ['Win32'] + if not getattr(self, 'all_projects', None): + self.all_projects = [] + if not getattr(self, 'project_extension', None): + self.project_extension = '.project' + if not getattr(self, 'projects_dir', None): + self.projects_dir = self.srcnode.make_node('') + self.projects_dir.mkdir() + + # bind the classes to the object, so that subclass can provide custom generators + if not getattr(self, 'vsnode_vsdir', None): + self.vsnode_vsdir = vsnode_vsdir + if not getattr(self, 'vsnode_target', None): + self.vsnode_target = vsnode_target + if not getattr(self, 'vsnode_build_all', None): + self.vsnode_build_all = vsnode_build_all + if not getattr(self, 'vsnode_install_all', None): + self.vsnode_install_all = vsnode_install_all + if not getattr(self, 'vsnode_project_view', None): + self.vsnode_project_view = vsnode_project_view + + self.numver = '11.00' + self.vsver = '2010' + + def execute(self): + """ + Entry point + """ + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + + # user initialization + self.init() + + # two phases for creating the solution + self.collect_projects() # add project objects into "self.all_projects" + self.write_files() # write the corresponding project and solution files + + def collect_projects(self): + """ + Fill the list self.all_projects with project objects + Fill the list of build targets + """ + self.collect_targets() + #self.add_aliases() + #self.collect_dirs() + default_project = getattr(self, 'default_project', None) + def sortfun(x): + if x.name == default_project: + return '' + return getattr(x, 'path', None) and x.path.abspath() or x.name + self.all_projects.sort(key=sortfun) + + def write_files(self): + """ + Write the project and solution files from the data collected + so far. It is unlikely that you will want to change this + """ + for p in self.all_projects: + p.write() + + # and finally write the solution file + node = self.get_solution_node() + node.parent.mkdir() + Logs.warn('Creating %r', node) + #a = dir(self.root) + #for b in a: + # print b + #print self.group_names + #print "Hallo2: ",self.root.listdir() + #print getattr(self, 'codelite_solution_name', None) + template1 = compile_template(SOLUTION_TEMPLATE) + sln_str = template1(self) + sln_str = rm_blank_lines(sln_str) + node.stealth_write(sln_str) + + def get_solution_node(self): + """ + The solution filename is required when writing the .vcproj files + return self.solution_node and if it does not exist, make one + """ + try: + return self.solution_node + except: + pass + + codelite_solution_name = getattr(self, 'codelite_solution_name', None) + if not codelite_solution_name: + codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace' + setattr(self, 'codelite_solution_name', codelite_solution_name) + if os.path.isabs(codelite_solution_name): + self.solution_node = self.root.make_node(codelite_solution_name) + else: + self.solution_node = self.srcnode.make_node(codelite_solution_name) + return self.solution_node + + def project_configurations(self): + """ + Helper that returns all the pairs (config,platform) + """ + ret = [] + for c in self.configurations: + for p in self.platforms: + ret.append((c, p)) + return ret + + def collect_targets(self): + """ + Process the list of task generators + """ + for g in self.groups: + for tg in g: + if not isinstance(tg, TaskGen.task_gen): + continue + + if not hasattr(tg, 'codelite_includes'): + tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', [])) + tg.post() + if not getattr(tg, 'link_task', None): + continue + + p = self.vsnode_target(self, tg) + p.collect_source() # delegate this processing + p.collect_properties() + self.all_projects.append(p) + + def add_aliases(self): + """ + Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7 + We also add an alias for "make install" (disabled by default) + """ + base = getattr(self, 'projects_dir', None) or self.tg.path + + node_project = base.make_node('build_all_projects' + self.project_extension) # Node + p_build = self.vsnode_build_all(self, node_project) + p_build.collect_properties() + self.all_projects.append(p_build) + + node_project = base.make_node('install_all_projects' + self.project_extension) # Node + p_install = self.vsnode_install_all(self, node_project) + p_install.collect_properties() + self.all_projects.append(p_install) + + node_project = base.make_node('project_view' + self.project_extension) # Node + p_view = self.vsnode_project_view(self, node_project) + p_view.collect_source() + p_view.collect_properties() + self.all_projects.append(p_view) + + n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases") + p_build.parent = p_install.parent = p_view.parent = n + self.all_projects.append(n) + + def collect_dirs(self): + """ + Create the folder structure in the CodeLite project view + """ + seen = {} + def make_parents(proj): + # look at a project, try to make a parent + if getattr(proj, 'parent', None): + # aliases already have parents + return + x = proj.iter_path + if x in seen: + proj.parent = seen[x] + return + + # There is not vsnode_vsdir for x. + # So create a project representing the folder "x" + n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name) + n.iter_path = x.parent + self.all_projects.append(n) + + # recurse up to the project directory + if x.height() > self.srcnode.height() + 1: + make_parents(n) + + for p in self.all_projects[:]: # iterate over a copy of all projects + if not getattr(p, 'tg', None): + # but only projects that have a task generator + continue + + # make a folder for each task generator + p.iter_path = p.tg.path + make_parents(p) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/color_gcc.py lilv-0.24.6/waflib/extras/color_gcc.py --- lilv-0.24.4~dfsg0/waflib/extras/color_gcc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/color_gcc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Replaces the default formatter by one which understands GCC output and colorizes it. + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2012" + +import sys +from waflib import Logs + +class ColorGCCFormatter(Logs.formatter): + def __init__(self, colors): + self.colors = colors + Logs.formatter.__init__(self) + def format(self, rec): + frame = sys._getframe() + while frame: + func = frame.f_code.co_name + if func == 'exec_command': + cmd = frame.f_locals.get('cmd') + if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]): + lines = [] + for line in rec.msg.splitlines(): + if 'warning: ' in line: + lines.append(self.colors.YELLOW + line) + elif 'error: ' in line: + lines.append(self.colors.RED + line) + elif 'note: ' in line: + lines.append(self.colors.CYAN + line) + else: + lines.append(line) + rec.msg = "\n".join(lines) + frame = frame.f_back + return Logs.formatter.format(self, rec) + +def options(opt): + Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors)) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/color_msvc.py lilv-0.24.6/waflib/extras/color_msvc.py --- lilv-0.24.4~dfsg0/waflib/extras/color_msvc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/color_msvc.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Replaces the default formatter by one which understands MSVC output and colorizes it. +# Modified from color_gcc.py + +__author__ = __maintainer__ = "Alibek Omarov " +__copyright__ = "Alibek Omarov, 2019" + +import sys +from waflib import Logs + +class ColorMSVCFormatter(Logs.formatter): + def __init__(self, colors): + self.colors = colors + Logs.formatter.__init__(self) + + def parseMessage(self, line, color): + # Split messaage from 'disk:filepath: type: message' + arr = line.split(':', 3) + if len(arr) < 4: + return line + + colored = self.colors.BOLD + arr[0] + ':' + arr[1] + ':' + self.colors.NORMAL + colored += color + arr[2] + ':' + self.colors.NORMAL + colored += arr[3] + return colored + + def format(self, rec): + frame = sys._getframe() + while frame: + func = frame.f_code.co_name + if func == 'exec_command': + cmd = frame.f_locals.get('cmd') + if isinstance(cmd, list): + # Fix file case, it may be CL.EXE or cl.exe + argv0 = cmd[0].lower() + if 'cl.exe' in argv0: + lines = [] + # This will not work with "localized" versions + # of MSVC + for line in rec.msg.splitlines(): + if ': warning ' in line: + lines.append(self.parseMessage(line, self.colors.YELLOW)) + elif ': error ' in line: + lines.append(self.parseMessage(line, self.colors.RED)) + elif ': fatal error ' in line: + lines.append(self.parseMessage(line, self.colors.RED + self.colors.BOLD)) + elif ': note: ' in line: + lines.append(self.parseMessage(line, self.colors.CYAN)) + else: + lines.append(line) + rec.msg = "\n".join(lines) + frame = frame.f_back + return Logs.formatter.format(self, rec) + +def options(opt): + Logs.log.handlers[0].setFormatter(ColorMSVCFormatter(Logs.colors)) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/color_rvct.py lilv-0.24.6/waflib/extras/color_rvct.py --- lilv-0.24.4~dfsg0/waflib/extras/color_rvct.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/color_rvct.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Replaces the default formatter by one which understands RVCT output and colorizes it. + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2012" + +import sys +import atexit +from waflib import Logs + +errors = [] + +def show_errors(): + for i, e in enumerate(errors): + if i > 5: + break + print("Error: %s" % e) + +atexit.register(show_errors) + +class RcvtFormatter(Logs.formatter): + def __init__(self, colors): + Logs.formatter.__init__(self) + self.colors = colors + def format(self, rec): + frame = sys._getframe() + while frame: + func = frame.f_code.co_name + if func == 'exec_command': + cmd = frame.f_locals['cmd'] + if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]): + lines = [] + for line in rec.msg.splitlines(): + if 'Warning: ' in line: + lines.append(self.colors.YELLOW + line) + elif 'Error: ' in line: + lines.append(self.colors.RED + line) + errors.append(line) + elif 'note: ' in line: + lines.append(self.colors.CYAN + line) + else: + lines.append(line) + rec.msg = "\n".join(lines) + frame = frame.f_back + return Logs.formatter.format(self, rec) + +def options(opt): + Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors)) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/compat15.py lilv-0.24.6/waflib/extras/compat15.py --- lilv-0.24.4~dfsg0/waflib/extras/compat15.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/compat15.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,406 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2010 (ita) + +""" +This file is provided to enable compatibility with waf 1.5 +It was enabled by default in waf 1.6, but it is not used in waf 1.7 +""" + +import sys +from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context + +# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure" +sys.modules['Environment'] = ConfigSet +ConfigSet.Environment = ConfigSet.ConfigSet + +sys.modules['Logs'] = Logs +sys.modules['Options'] = Options +sys.modules['Scripting'] = Scripting +sys.modules['Task'] = Task +sys.modules['Build'] = Build +sys.modules['Configure'] = Configure +sys.modules['Node'] = Node +sys.modules['Runner'] = Runner +sys.modules['TaskGen'] = TaskGen +sys.modules['Utils'] = Utils +sys.modules['Constants'] = Context +Context.SRCDIR = '' +Context.BLDDIR = '' + +from waflib.Tools import c_preproc +sys.modules['preproc'] = c_preproc + +from waflib.Tools import c_config +sys.modules['config_c'] = c_config + +ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive +ConfigSet.ConfigSet.set_variant = Utils.nada + +Utils.pproc = Utils.subprocess + +Build.BuildContext.add_subdirs = Build.BuildContext.recurse +Build.BuildContext.new_task_gen = Build.BuildContext.__call__ +Build.BuildContext.is_install = 0 +Node.Node.relpath_gen = Node.Node.path_from + +Utils.pproc = Utils.subprocess +Utils.get_term_cols = Logs.get_term_cols + +def cmd_output(cmd, **kw): + + silent = False + if 'silent' in kw: + silent = kw['silent'] + del(kw['silent']) + + if 'e' in kw: + tmp = kw['e'] + del(kw['e']) + kw['env'] = tmp + + kw['shell'] = isinstance(cmd, str) + kw['stdout'] = Utils.subprocess.PIPE + if silent: + kw['stderr'] = Utils.subprocess.PIPE + + try: + p = Utils.subprocess.Popen(cmd, **kw) + output = p.communicate()[0] + except OSError as e: + raise ValueError(str(e)) + + if p.returncode: + if not silent: + msg = "command execution failed: %s -> %r" % (cmd, str(output)) + raise ValueError(msg) + output = '' + return output +Utils.cmd_output = cmd_output + +def name_to_obj(self, s, env=None): + if Logs.verbose: + Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"') + return self.get_tgen_by_name(s) +Build.BuildContext.name_to_obj = name_to_obj + +def env_of_name(self, name): + try: + return self.all_envs[name] + except KeyError: + Logs.error('no such environment: '+name) + return None +Build.BuildContext.env_of_name = env_of_name + + +def set_env_name(self, name, env): + self.all_envs[name] = env + return env +Configure.ConfigurationContext.set_env_name = set_env_name + +def retrieve(self, name, fromenv=None): + try: + env = self.all_envs[name] + except KeyError: + env = ConfigSet.ConfigSet() + self.prepare_env(env) + self.all_envs[name] = env + else: + if fromenv: + Logs.warn('The environment %s may have been configured already', name) + return env +Configure.ConfigurationContext.retrieve = retrieve + +Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse +Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load +Configure.conftest = Configure.conf +Configure.ConfigurationError = Errors.ConfigurationError +Utils.WafError = Errors.WafError + +Options.OptionsContext.sub_options = Options.OptionsContext.recurse +Options.OptionsContext.tool_options = Context.Context.load +Options.Handler = Options.OptionsContext + +Task.simple_task_type = Task.task_type_from_func = Task.task_factory +Task.Task.classes = Task.classes + +def setitem(self, key, value): + if key.startswith('CCFLAGS'): + key = key[1:] + self.table[key] = value +ConfigSet.ConfigSet.__setitem__ = setitem + +@TaskGen.feature('d') +@TaskGen.before('apply_incpaths') +def old_importpaths(self): + if getattr(self, 'importpaths', []): + self.includes = self.importpaths + +from waflib import Context +eld = Context.load_tool +def load_tool(*k, **kw): + ret = eld(*k, **kw) + if 'set_options' in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "set_options" to options') + ret.options = ret.set_options + if 'detect' in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "detect" to "configure"') + ret.configure = ret.detect + return ret +Context.load_tool = load_tool + +def get_curdir(self): + return self.path.abspath() +Context.Context.curdir = property(get_curdir, Utils.nada) + +def get_srcdir(self): + return self.srcnode.abspath() +Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada) + +def get_blddir(self): + return self.bldnode.abspath() +Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada) + +Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg +Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg + +rev = Context.load_module +def load_module(path, encoding=None): + ret = rev(path, encoding) + if 'set_options' in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "set_options" to "options" (%r)', path) + ret.options = ret.set_options + if 'srcdir' in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "srcdir" to "top" (%r)', path) + ret.top = ret.srcdir + if 'blddir' in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "blddir" to "out" (%r)', path) + ret.out = ret.blddir + Utils.g_module = Context.g_module + Options.launch_dir = Context.launch_dir + return ret +Context.load_module = load_module + +old_post = TaskGen.task_gen.post +def post(self): + self.features = self.to_list(self.features) + if 'cc' in self.features: + if Logs.verbose: + Logs.warn('compat: the feature cc does not exist anymore (use "c")') + self.features.remove('cc') + self.features.append('c') + if 'cstaticlib' in self.features: + if Logs.verbose: + Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")') + self.features.remove('cstaticlib') + self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib') + if getattr(self, 'ccflags', None): + if Logs.verbose: + Logs.warn('compat: "ccflags" was renamed to "cflags"') + self.cflags = self.ccflags + return old_post(self) +TaskGen.task_gen.post = post + +def waf_version(*k, **kw): + Logs.warn('wrong version (waf_version was removed in waf 1.6)') +Utils.waf_version = waf_version + + +import os +@TaskGen.feature('c', 'cxx', 'd') +@TaskGen.before('apply_incpaths', 'propagate_uselib_vars') +@TaskGen.after('apply_link', 'process_source') +def apply_uselib_local(self): + """ + process the uselib_local attribute + execute after apply_link because of the execution order set on 'link_task' + """ + env = self.env + from waflib.Tools.ccroot import stlink_task + + # 1. the case of the libs defined in the project (visit ancestors first) + # the ancestors external libraries (uselib) will be prepended + self.uselib = self.to_list(getattr(self, 'uselib', [])) + self.includes = self.to_list(getattr(self, 'includes', [])) + names = self.to_list(getattr(self, 'uselib_local', [])) + get = self.bld.get_tgen_by_name + seen = set() + seen_uselib = set() + tmp = Utils.deque(names) # consume a copy of the list of names + if tmp: + if Logs.verbose: + Logs.warn('compat: "uselib_local" is deprecated, replace by "use"') + while tmp: + lib_name = tmp.popleft() + # visit dependencies only once + if lib_name in seen: + continue + + y = get(lib_name) + y.post() + seen.add(lib_name) + + # object has ancestors to process (shared libraries): add them to the end of the list + if getattr(y, 'uselib_local', None): + for x in self.to_list(getattr(y, 'uselib_local', [])): + obj = get(x) + obj.post() + if getattr(obj, 'link_task', None): + if not isinstance(obj.link_task, stlink_task): + tmp.append(x) + + # link task and flags + if getattr(y, 'link_task', None): + + link_name = y.target[y.target.rfind(os.sep) + 1:] + if isinstance(y.link_task, stlink_task): + env.append_value('STLIB', [link_name]) + else: + # some linkers can link against programs + env.append_value('LIB', [link_name]) + + # the order + self.link_task.set_run_after(y.link_task) + + # for the recompilation + self.link_task.dep_nodes += y.link_task.outputs + + # add the link path too + tmp_path = y.link_task.outputs[0].parent.bldpath() + if not tmp_path in env['LIBPATH']: + env.prepend_value('LIBPATH', [tmp_path]) + + # add ancestors uselib too - but only propagate those that have no staticlib defined + for v in self.to_list(getattr(y, 'uselib', [])): + if v not in seen_uselib: + seen_uselib.add(v) + if not env['STLIB_' + v]: + if not v in self.uselib: + self.uselib.insert(0, v) + + # if the library task generator provides 'export_includes', add to the include path + # the export_includes must be a list of paths relative to the other library + if getattr(y, 'export_includes', None): + self.includes.extend(y.to_incnodes(y.export_includes)) + +@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib') +@TaskGen.after('apply_link') +def apply_objdeps(self): + "add the .o files produced by some other object files in the same manner as uselib_local" + names = getattr(self, 'add_objects', []) + if not names: + return + names = self.to_list(names) + + get = self.bld.get_tgen_by_name + seen = [] + while names: + x = names[0] + + # visit dependencies only once + if x in seen: + names = names[1:] + continue + + # object does not exist ? + y = get(x) + + # object has ancestors to process first ? update the list of names + if getattr(y, 'add_objects', None): + added = 0 + lst = y.to_list(y.add_objects) + lst.reverse() + for u in lst: + if u in seen: + continue + added = 1 + names = [u]+names + if added: + continue # list of names modified, loop + + # safe to process the current object + y.post() + seen.append(x) + + for t in getattr(y, 'compiled_tasks', []): + self.link_task.inputs.extend(t.outputs) + +@TaskGen.after('apply_link') +def process_obj_files(self): + if not hasattr(self, 'obj_files'): + return + for x in self.obj_files: + node = self.path.find_resource(x) + self.link_task.inputs.append(node) + +@TaskGen.taskgen_method +def add_obj_file(self, file): + """Small example on how to link object files as if they were source + obj = bld.create_obj('cc') + obj.add_obj_file('foo.o')""" + if not hasattr(self, 'obj_files'): + self.obj_files = [] + if not 'process_obj_files' in self.meths: + self.meths.append('process_obj_files') + self.obj_files.append(file) + + +old_define = Configure.ConfigurationContext.__dict__['define'] + +@Configure.conf +def define(self, key, val, quote=True, comment=''): + old_define(self, key, val, quote, comment) + if key.startswith('HAVE_'): + self.env[key] = 1 + +old_undefine = Configure.ConfigurationContext.__dict__['undefine'] + +@Configure.conf +def undefine(self, key, comment=''): + old_undefine(self, key, comment) + if key.startswith('HAVE_'): + self.env[key] = 0 + +# some people might want to use export_incdirs, but it was renamed +def set_incdirs(self, val): + Logs.warn('compat: change "export_incdirs" by "export_includes"') + self.export_includes = val +TaskGen.task_gen.export_incdirs = property(None, set_incdirs) + +def install_dir(self, path): + if not path: + return [] + + destpath = Utils.subst_vars(path, self.env) + + if self.is_install > 0: + Logs.info('* creating %s', destpath) + Utils.check_dir(destpath) + elif self.is_install < 0: + Logs.info('* removing %s', destpath) + try: + os.remove(destpath) + except OSError: + pass +Build.BuildContext.install_dir = install_dir + +# before/after names +repl = {'apply_core': 'process_source', + 'apply_lib_vars': 'process_source', + 'apply_obj_vars': 'propagate_uselib_vars', + 'exec_rule': 'process_rule' +} +def after(*k): + k = [repl.get(key, key) for key in k] + return TaskGen.after_method(*k) + +def before(*k): + k = [repl.get(key, key) for key in k] + return TaskGen.before_method(*k) +TaskGen.before = before + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/cppcheck.py lilv-0.24.6/waflib/extras/cppcheck.py --- lilv-0.24.4~dfsg0/waflib/extras/cppcheck.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/cppcheck.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,591 @@ +#! /usr/bin/env python +# -*- encoding: utf-8 -*- +# Michel Mooij, michel.mooij7@gmail.com + +""" +Tool Description +================ +This module provides a waf wrapper (i.e. waftool) around the C/C++ source code +checking tool 'cppcheck'. + +See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool +itself. +Note that many linux distributions already provide a ready to install version +of cppcheck. On fedora, for instance, it can be installed using yum: + + 'sudo yum install cppcheck' + + +Usage +===== +In order to use this waftool simply add it to the 'options' and 'configure' +functions of your main waf script as shown in the example below: + + def options(opt): + opt.load('cppcheck', tooldir='./waftools') + + def configure(conf): + conf.load('cppcheck') + +Note that example shown above assumes that the cppcheck waftool is located in +the sub directory named 'waftools'. + +When configured as shown in the example above, cppcheck will automatically +perform a source code analysis on all C/C++ build tasks that have been +defined in your waf build system. + +The example shown below for a C program will be used as input for cppcheck when +building the task. + + def build(bld): + bld.program(name='foo', src='foobar.c') + +The result of the source code analysis will be stored both as xml and html +files in the build location for the task. Should any error be detected by +cppcheck the build will be aborted and a link to the html report will be shown. +By default, one index.html file is created for each task generator. A global +index.html file can be obtained by setting the following variable +in the configuration section: + + conf.env.CPPCHECK_SINGLE_HTML = False + +When needed source code checking by cppcheck can be disabled per task, per +detected error or warning for a particular task. It can be also be disabled for +all tasks. + +In order to exclude a task from source code checking add the skip option to the +task as shown below: + + def build(bld): + bld.program( + name='foo', + src='foobar.c' + cppcheck_skip=True + ) + +When needed problems detected by cppcheck may be suppressed using a file +containing a list of suppression rules. The relative or absolute path to this +file can be added to the build task as shown in the example below: + + bld.program( + name='bar', + src='foobar.c', + cppcheck_suppress='bar.suppress' + ) + +A cppcheck suppress file should contain one suppress rule per line. Each of +these rules will be passed as an '--suppress=' argument to cppcheck. + +Dependencies +================ +This waftool depends on the python pygments module, it is used for source code +syntax highlighting when creating the html reports. see http://pygments.org/ for +more information on this package. + +Remarks +================ +The generation of the html report is originally based on the cppcheck-htmlreport.py +script that comes shipped with the cppcheck tool. +""" + +import sys +import xml.etree.ElementTree as ElementTree +from waflib import Task, TaskGen, Logs, Context, Options + +PYGMENTS_EXC_MSG= ''' +The required module 'pygments' could not be found. Please install it using your +platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install', +see 'http://pygments.org/download/' for installation instructions. +''' + +try: + import pygments + from pygments import formatters, lexers +except ImportError as e: + Logs.warn(PYGMENTS_EXC_MSG) + raise e + + +def options(opt): + opt.add_option('--cppcheck-skip', dest='cppcheck_skip', + default=False, action='store_true', + help='do not check C/C++ sources (default=False)') + + opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume', + default=False, action='store_true', + help='continue in case of errors (default=False)') + + opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable', + default='warning,performance,portability,style,unusedFunction', action='store', + help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)") + + opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable', + default='warning,performance,portability,style', action='store', + help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)") + + opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c', + default='c99', action='store', + help='cppcheck standard to use when checking C (default=c99)') + + opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx', + default='c++03', action='store', + help='cppcheck standard to use when checking C++ (default=c++03)') + + opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config', + default=False, action='store_true', + help='forced check for missing buildin include files, e.g. stdio.h (default=False)') + + opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs', + default='20', action='store', + help='maximum preprocessor (--max-configs) define iterations (default=20)') + + opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs', + default='1', action='store', + help='number of jobs (-j) to do the checking work (default=1)') + +def configure(conf): + if conf.options.cppcheck_skip: + conf.env.CPPCHECK_SKIP = [True] + conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c + conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx + conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs + conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable + conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable + conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs + if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable): + Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically') + conf.find_program('cppcheck', var='CPPCHECK') + + # set to True to get a single index.html file + conf.env.CPPCHECK_SINGLE_HTML = False + +@TaskGen.feature('c') +@TaskGen.feature('cxx') +def cppcheck_execute(self): + if hasattr(self.bld, 'conf'): + return + if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip: + return + if getattr(self, 'cppcheck_skip', False): + return + task = self.create_task('cppcheck') + task.cmd = _tgen_create_cmd(self) + task.fatal = [] + if not Options.options.cppcheck_err_resume: + task.fatal.append('error') + + +def _tgen_create_cmd(self): + features = getattr(self, 'features', []) + std_c = self.env.CPPCHECK_STD_C + std_cxx = self.env.CPPCHECK_STD_CXX + max_configs = self.env.CPPCHECK_MAX_CONFIGS + bin_enable = self.env.CPPCHECK_BIN_ENABLE + lib_enable = self.env.CPPCHECK_LIB_ENABLE + jobs = self.env.CPPCHECK_JOBS + + cmd = self.env.CPPCHECK + args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2'] + args.append('--max-configs=%s' % max_configs) + args.append('-j %s' % jobs) + + if 'cxx' in features: + args.append('--language=c++') + args.append('--std=%s' % std_cxx) + else: + args.append('--language=c') + args.append('--std=%s' % std_c) + + if Options.options.cppcheck_check_config: + args.append('--check-config') + + if set(['cprogram','cxxprogram']) & set(features): + args.append('--enable=%s' % bin_enable) + else: + args.append('--enable=%s' % lib_enable) + + for src in self.to_list(getattr(self, 'source', [])): + if not isinstance(src, str): + src = repr(src) + args.append(src) + for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))): + if not isinstance(inc, str): + inc = repr(inc) + args.append('-I%s' % inc) + for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)): + if not isinstance(inc, str): + inc = repr(inc) + args.append('-I%s' % inc) + return cmd + args + + +class cppcheck(Task.Task): + quiet = True + + def run(self): + stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR) + self._save_xml_report(stderr) + defects = self._get_defects(stderr) + index = self._create_html_report(defects) + self._errors_evaluate(defects, index) + return 0 + + def _save_xml_report(self, s): + '''use cppcheck xml result string, add the command string used to invoke cppcheck + and save as xml file. + ''' + header = '%s\n' % s.splitlines()[0] + root = ElementTree.fromstring(s) + cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd') + cmd.text = str(self.cmd) + body = ElementTree.tostring(root).decode('us-ascii') + body_html_name = 'cppcheck-%s.xml' % self.generator.get_name() + if self.env.CPPCHECK_SINGLE_HTML: + body_html_name = 'cppcheck.xml' + node = self.generator.path.get_bld().find_or_declare(body_html_name) + node.write(header + body) + + def _get_defects(self, xml_string): + '''evaluate the xml string returned by cppcheck (on sdterr) and use it to create + a list of defects. + ''' + defects = [] + for error in ElementTree.fromstring(xml_string).iter('error'): + defect = {} + defect['id'] = error.get('id') + defect['severity'] = error.get('severity') + defect['msg'] = str(error.get('msg')).replace('<','<') + defect['verbose'] = error.get('verbose') + for location in error.findall('location'): + defect['file'] = location.get('file') + defect['line'] = str(int(location.get('line')) - 1) + defects.append(defect) + return defects + + def _create_html_report(self, defects): + files, css_style_defs = self._create_html_files(defects) + index = self._create_html_index(files) + self._create_css_file(css_style_defs) + return index + + def _create_html_files(self, defects): + sources = {} + defects = [defect for defect in defects if 'file' in defect] + for defect in defects: + name = defect['file'] + if not name in sources: + sources[name] = [defect] + else: + sources[name].append(defect) + + files = {} + css_style_defs = None + bpath = self.generator.path.get_bld().abspath() + names = list(sources.keys()) + for i in range(0,len(names)): + name = names[i] + if self.env.CPPCHECK_SINGLE_HTML: + htmlfile = 'cppcheck/%i.html' % (i) + else: + htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i) + errors = sources[name] + files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors } + css_style_defs = self._create_html_file(name, htmlfile, errors) + return files, css_style_defs + + def _create_html_file(self, sourcefile, htmlfile, errors): + name = self.generator.get_name() + root = ElementTree.fromstring(CPPCHECK_HTML_FILE) + title = root.find('head/title') + title.text = 'cppcheck - report - %s' % name + + body = root.find('body') + for div in body.findall('div'): + if div.get('id') == 'page': + page = div + break + for div in page.findall('div'): + if div.get('id') == 'header': + h1 = div.find('h1') + h1.text = 'cppcheck report - %s' % name + if div.get('id') == 'menu': + indexlink = div.find('a') + if self.env.CPPCHECK_SINGLE_HTML: + indexlink.attrib['href'] = 'index.html' + else: + indexlink.attrib['href'] = 'index-%s.html' % name + if div.get('id') == 'content': + content = div + srcnode = self.generator.bld.root.find_node(sourcefile) + hl_lines = [e['line'] for e in errors if 'line' in e] + formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line') + formatter.errors = [e for e in errors if 'line' in e] + css_style_defs = formatter.get_style_defs('.highlight') + lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "") + s = pygments.highlight(srcnode.read(), lexer, formatter) + table = ElementTree.fromstring(s) + content.append(table) + + s = ElementTree.tostring(root, method='html').decode('us-ascii') + s = CCPCHECK_HTML_TYPE + s + node = self.generator.path.get_bld().find_or_declare(htmlfile) + node.write(s) + return css_style_defs + + def _create_html_index(self, files): + name = self.generator.get_name() + root = ElementTree.fromstring(CPPCHECK_HTML_FILE) + title = root.find('head/title') + title.text = 'cppcheck - report - %s' % name + + body = root.find('body') + for div in body.findall('div'): + if div.get('id') == 'page': + page = div + break + for div in page.findall('div'): + if div.get('id') == 'header': + h1 = div.find('h1') + h1.text = 'cppcheck report - %s' % name + if div.get('id') == 'content': + content = div + self._create_html_table(content, files) + if div.get('id') == 'menu': + indexlink = div.find('a') + if self.env.CPPCHECK_SINGLE_HTML: + indexlink.attrib['href'] = 'index.html' + else: + indexlink.attrib['href'] = 'index-%s.html' % name + + s = ElementTree.tostring(root, method='html').decode('us-ascii') + s = CCPCHECK_HTML_TYPE + s + index_html_name = 'cppcheck/index-%s.html' % name + if self.env.CPPCHECK_SINGLE_HTML: + index_html_name = 'cppcheck/index.html' + node = self.generator.path.get_bld().find_or_declare(index_html_name) + node.write(s) + return node + + def _create_html_table(self, content, files): + table = ElementTree.fromstring(CPPCHECK_HTML_TABLE) + for name, val in files.items(): + f = val['htmlfile'] + s = '%s\n' % (f,name) + row = ElementTree.fromstring(s) + table.append(row) + + errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint) + for e in errors: + if not 'line' in e: + s = '%s%s%s\n' % (e['id'], e['severity'], e['msg']) + else: + attr = '' + if e['severity'] == 'error': + attr = 'class="error"' + s = '%s' % (f, e['line'], e['line']) + s+= '%s%s%s\n' % (e['id'], e['severity'], attr, e['msg']) + row = ElementTree.fromstring(s) + table.append(row) + content.append(table) + + def _create_css_file(self, css_style_defs): + css = str(CPPCHECK_CSS_FILE) + if css_style_defs: + css = "%s\n%s\n" % (css, css_style_defs) + node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css') + node.write(css) + + def _errors_evaluate(self, errors, http_index): + name = self.generator.get_name() + fatal = self.fatal + severity = [err['severity'] for err in errors] + problems = [err for err in errors if err['severity'] != 'information'] + + if set(fatal) & set(severity): + exc = "\n" + exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name + exc += "\n file://%r" % (http_index) + exc += "\n" + self.generator.bld.fatal(exc) + + elif len(problems): + msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name + msg += "\n file://%r" % http_index + msg += "\n" + Logs.error(msg) + + +class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter): + errors = [] + + def wrap(self, source, outfile): + line_no = 1 + for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile): + # If this is a source code line we want to add a span tag at the end. + if i == 1: + for error in self.errors: + if int(error['line']) == line_no: + t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg']) + line_no += 1 + yield i, t + + +CCPCHECK_HTML_TYPE = \ +'\n' + +CPPCHECK_HTML_FILE = """ +]> + + + cppcheck - report - XXX + + + + + +
+ + +
+
+ +   +
+ + + +""" + +CPPCHECK_HTML_TABLE = """ + + + + + + + +
LineIdSeverityMessage
+""" + +CPPCHECK_HTML_ERROR = \ +'<--- %s\n' + +CPPCHECK_CSS_FILE = """ +body.body { + font-family: Arial; + font-size: 13px; + background-color: black; + padding: 0px; + margin: 0px; +} + +.error { + font-family: Arial; + font-size: 13px; + background-color: #ffb7b7; + padding: 0px; + margin: 0px; +} + +th, td { + min-width: 100px; + text-align: left; +} + +#page-header { + clear: both; + width: 1200px; + margin: 20px auto 0px auto; + height: 10px; + border-bottom-width: 2px; + border-bottom-style: solid; + border-bottom-color: #aaaaaa; +} + +#page { + width: 1160px; + margin: auto; + border-left-width: 2px; + border-left-style: solid; + border-left-color: #aaaaaa; + border-right-width: 2px; + border-right-style: solid; + border-right-color: #aaaaaa; + background-color: White; + padding: 20px; +} + +#page-footer { + clear: both; + width: 1200px; + margin: auto; + height: 10px; + border-top-width: 2px; + border-top-style: solid; + border-top-color: #aaaaaa; +} + +#header { + width: 100%; + height: 70px; + background-image: url(logo.png); + background-repeat: no-repeat; + background-position: left top; + border-bottom-style: solid; + border-bottom-width: thin; + border-bottom-color: #aaaaaa; +} + +#menu { + margin-top: 5px; + text-align: left; + float: left; + width: 100px; + height: 300px; +} + +#menu > a { + margin-left: 10px; + display: block; +} + +#content { + float: left; + width: 1020px; + margin: 5px; + padding: 0px 10px 10px 10px; + border-left-style: solid; + border-left-width: thin; + border-left-color: #aaaaaa; +} + +#footer { + padding-bottom: 5px; + padding-top: 5px; + border-top-style: solid; + border-top-width: thin; + border-top-color: #aaaaaa; + clear: both; + font-size: 10px; +} + +#footer > div { + float: left; + width: 33%; +} + +""" + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/cpplint.py lilv-0.24.6/waflib/extras/cpplint.py --- lilv-0.24.4~dfsg0/waflib/extras/cpplint.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/cpplint.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,209 @@ +#! /usr/bin/env python +# encoding: utf-8 +# +# written by Sylvain Rouquette, 2014 + +''' + +This is an extra tool, not bundled with the default waf binary. +To add the cpplint tool to the waf file: +$ ./waf-light --tools=compat15,cpplint + +this tool also requires cpplint for python. +If you have PIP, you can install it like this: pip install cpplint + +When using this tool, the wscript will look like: + + def options(opt): + opt.load('compiler_cxx cpplint') + + def configure(conf): + conf.load('compiler_cxx cpplint') + # optional, you can also specify them on the command line + conf.env.CPPLINT_FILTERS = ','.join(( + '-whitespace/newline', # c++11 lambda + '-readability/braces', # c++11 constructor + '-whitespace/braces', # c++11 constructor + '-build/storage_class', # c++11 for-range + '-whitespace/blank_line', # user pref + '-whitespace/labels' # user pref + )) + + def build(bld): + bld(features='cpplint', source='main.cpp', target='app') + # add include files, because they aren't usually built + bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp')) +''' + +from __future__ import absolute_import +import sys, re +import logging +from waflib import Errors, Task, TaskGen, Logs, Options, Node, Utils + + +critical_errors = 0 +CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n' +RE_EMACS = re.compile(r'(?P.*):(?P\d+): (?P.*) \[(?P.*)\] \[(?P\d+)\]') +CPPLINT_RE = { + 'waf': RE_EMACS, + 'emacs': RE_EMACS, + 'vs7': re.compile(r'(?P.*)\((?P\d+)\): (?P.*) \[(?P.*)\] \[(?P\d+)\]'), + 'eclipse': re.compile(r'(?P.*):(?P\d+): warning: (?P.*) \[(?P.*)\] \[(?P\d+)\]'), +} +CPPLINT_STR = ('${CPPLINT} ' + '--verbose=${CPPLINT_LEVEL} ' + '--output=${CPPLINT_OUTPUT} ' + '--filter=${CPPLINT_FILTERS} ' + '--root=${CPPLINT_ROOT} ' + '--linelength=${CPPLINT_LINE_LENGTH} ') + + +def options(opt): + opt.add_option('--cpplint-filters', type='string', + default='', dest='CPPLINT_FILTERS', + help='add filters to cpplint') + opt.add_option('--cpplint-length', type='int', + default=80, dest='CPPLINT_LINE_LENGTH', + help='specify the line length (default: 80)') + opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL', + help='specify the log level (default: 1)') + opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK', + help='break the build if error >= level (default: 5)') + opt.add_option('--cpplint-root', type='string', + default='', dest='CPPLINT_ROOT', + help='root directory used to derive header guard') + opt.add_option('--cpplint-skip', action='store_true', + default=False, dest='CPPLINT_SKIP', + help='skip cpplint during build') + opt.add_option('--cpplint-output', type='string', + default='waf', dest='CPPLINT_OUTPUT', + help='select output format (waf, emacs, vs7, eclipse)') + + +def configure(conf): + try: + conf.find_program('cpplint', var='CPPLINT') + except Errors.ConfigurationError: + conf.env.CPPLINT_SKIP = True + + +class cpplint_formatter(Logs.formatter, object): + def __init__(self, fmt): + logging.Formatter.__init__(self, CPPLINT_FORMAT) + self.fmt = fmt + + def format(self, rec): + if self.fmt == 'waf': + result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict() + rec.msg = CPPLINT_FORMAT % result + if rec.levelno <= logging.INFO: + rec.c1 = Logs.colors.CYAN + return super(cpplint_formatter, self).format(rec) + + +class cpplint_handler(Logs.log_handler, object): + def __init__(self, stream=sys.stderr, **kw): + super(cpplint_handler, self).__init__(stream, **kw) + self.stream = stream + + def emit(self, rec): + rec.stream = self.stream + self.emit_override(rec) + self.flush() + + +class cpplint_wrapper(object): + def __init__(self, logger, threshold, fmt): + self.logger = logger + self.threshold = threshold + self.fmt = fmt + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + if isinstance(exc_value, Utils.subprocess.CalledProcessError): + messages = [m for m in exc_value.output.splitlines() + if 'Done processing' not in m + and 'Total errors found' not in m] + for message in messages: + self.write(message) + return True + + def write(self, message): + global critical_errors + result = CPPLINT_RE[self.fmt].match(message) + if not result: + return + level = int(result.groupdict()['confidence']) + if level >= self.threshold: + critical_errors += 1 + if level <= 2: + self.logger.info(message) + elif level <= 4: + self.logger.warning(message) + else: + self.logger.error(message) + + +cpplint_logger = None +def get_cpplint_logger(fmt): + global cpplint_logger + if cpplint_logger: + return cpplint_logger + cpplint_logger = logging.getLogger('cpplint') + hdlr = cpplint_handler() + hdlr.setFormatter(cpplint_formatter(fmt)) + cpplint_logger.addHandler(hdlr) + cpplint_logger.setLevel(logging.DEBUG) + return cpplint_logger + + +class cpplint(Task.Task): + color = 'PINK' + + def __init__(self, *k, **kw): + super(cpplint, self).__init__(*k, **kw) + + def run(self): + global critical_errors + with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT): + params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key} + if params['CPPLINT_OUTPUT'] is 'waf': + params['CPPLINT_OUTPUT'] = 'emacs' + params['CPPLINT'] = self.env.get_flat('CPPLINT') + cmd = Utils.subst_vars(CPPLINT_STR, params) + env = self.env.env or None + Utils.subprocess.check_output(cmd + self.inputs[0].abspath(), + stderr=Utils.subprocess.STDOUT, + env=env, shell=True) + return critical_errors + +@TaskGen.extension('.h', '.hh', '.hpp', '.hxx') +def cpplint_includes(self, node): + pass + +@TaskGen.feature('cpplint') +@TaskGen.before_method('process_source') +def post_cpplint(self): + if not self.env.CPPLINT_INITIALIZED: + for key, value in Options.options.__dict__.items(): + if not key.startswith('CPPLINT_') or self.env[key]: + continue + self.env[key] = value + self.env.CPPLINT_INITIALIZED = True + + if self.env.CPPLINT_SKIP: + return + + if not self.env.CPPLINT_OUTPUT in CPPLINT_RE: + return + + for src in self.to_list(getattr(self, 'source', [])): + if isinstance(src, Node.Node): + node = src + else: + node = self.path.find_or_declare(src) + if not node: + self.bld.fatal('Could not find %r' % src) + self.create_task('cpplint', node) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/cross_gnu.py lilv-0.24.6/waflib/extras/cross_gnu.py --- lilv-0.24.4~dfsg0/waflib/extras/cross_gnu.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/cross_gnu.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,227 @@ +#!/usr/bin/python +# -*- coding: utf-8 vi:ts=4:noexpandtab +# Tool to provide dedicated variables for cross-compilation + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2014" + +""" +This tool allows to use environment variables to define cross-compilation +variables intended for build variants. + +The variables are obtained from the environment in 3 ways: + +1. By defining CHOST, they can be derived as ${CHOST}-${TOOL} +2. By defining HOST_x +3. By defining ${CHOST//-/_}_x + +else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``. + +Usage: + +- In your build script:: + + def configure(cfg): + ... + for variant in x_variants: + setenv(variant) + conf.load('cross_gnu') + conf.xcheck_host_var('POUET') + ... + + +- Then:: + + CHOST=arm-hardfloat-linux-gnueabi waf configure + env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure + CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure + HOST_CC="clang -..." waf configure + +This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH): + +.. code:: python + + from waflib import Configure + + #from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971 + import waf_variants + + variants='pc fw/variant1 fw/variant2'.split() + + top = "." + out = "../build" + + PIC = '33FJ128GP804' #dsPICxxx + + @Configure.conf + def gcc_modifier_xc16(cfg): + v = cfg.env + v.cprogram_PATTERN = '%s.elf' + v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld', + '--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections', + '--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem']) + v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1', + '-msfr-warn=off','-mno-override-inline','-finline','-Winline'] + + def configure(cfg): + if 'fw' in cfg.variant: #firmware + cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too + cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16'] + ... + else: #configure for pc SW + ... + + def build(bld): + if 'fw' in bld.variant: #firmware + bld.program(source='maintst.c', target='maintst'); + bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf") + else: #build for pc SW + ... + +""" + +import os +from waflib import Utils, Configure +from waflib.Tools import ccroot, gcc + +try: + from shlex import quote +except ImportError: + from pipes import quote + +def get_chost_stuff(conf): + """ + Get the CHOST environment variable contents + """ + chost = None + chost_envar = None + if conf.env.CHOST: + chost = conf.env.CHOST[0] + chost_envar = chost.replace('-', '_') + return chost, chost_envar + + +@Configure.conf +def xcheck_var(conf, name, wafname=None, cross=False): + wafname = wafname or name + + if wafname in conf.env: + value = conf.env[wafname] + if isinstance(value, str): + value = [value] + else: + envar = os.environ.get(name) + if not envar: + return + value = Utils.to_list(envar) if envar != '' else [envar] + + conf.env[wafname] = value + if cross: + pretty = 'cross-compilation %s' % wafname + else: + pretty = wafname + conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value)) + +@Configure.conf +def xcheck_host_prog(conf, name, tool, wafname=None): + wafname = wafname or name + + chost, chost_envar = get_chost_stuff(conf) + + specific = None + if chost: + specific = os.environ.get('%s_%s' % (chost_envar, name)) + + if specific: + value = Utils.to_list(specific) + conf.env[wafname] += value + conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name), + " ".join(quote(x) for x in value)) + return + else: + envar = os.environ.get('HOST_%s' % name) + if envar is not None: + value = Utils.to_list(envar) + conf.env[wafname] = value + conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name), + " ".join(quote(x) for x in value)) + return + + if conf.env[wafname]: + return + + value = None + if chost: + value = '%s-%s' % (chost, tool) + + if value: + conf.env[wafname] = value + conf.msg('Will use cross-compilation %s from CHOST' % wafname, value) + +@Configure.conf +def xcheck_host_envar(conf, name, wafname=None): + wafname = wafname or name + + chost, chost_envar = get_chost_stuff(conf) + + specific = None + if chost: + specific = os.environ.get('%s_%s' % (chost_envar, name)) + + if specific: + value = Utils.to_list(specific) + conf.env[wafname] += value + conf.msg('Will use cross-compilation %s from %s_%s' \ + % (name, chost_envar, name), + " ".join(quote(x) for x in value)) + return + + + envar = os.environ.get('HOST_%s' % name) + if envar is None: + return + + value = Utils.to_list(envar) if envar != '' else [envar] + + conf.env[wafname] = value + conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name), + " ".join(quote(x) for x in value)) + + +@Configure.conf +def xcheck_host(conf): + conf.xcheck_var('CHOST', cross=True) + conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS] + conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_') + conf.xcheck_host_prog('CC', 'gcc') + conf.xcheck_host_prog('CXX', 'g++') + conf.xcheck_host_prog('LINK_CC', 'gcc') + conf.xcheck_host_prog('LINK_CXX', 'g++') + conf.xcheck_host_prog('AR', 'ar') + conf.xcheck_host_prog('AS', 'as') + conf.xcheck_host_prog('LD', 'ld') + conf.xcheck_host_envar('CFLAGS') + conf.xcheck_host_envar('CXXFLAGS') + conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS') + conf.xcheck_host_envar('LIB') + conf.xcheck_host_envar('PKG_CONFIG_LIBDIR') + conf.xcheck_host_envar('PKG_CONFIG_PATH') + + if not conf.env.env: + conf.env.env = {} + conf.env.env.update(os.environ) + if conf.env.PKG_CONFIG_LIBDIR: + conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0] + if conf.env.PKG_CONFIG_PATH: + conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0] + +def configure(conf): + """ + Configuration example for gcc, it will not work for g++/clang/clang++ + """ + conf.xcheck_host() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/cython.py lilv-0.24.6/waflib/extras/cython.py --- lilv-0.24.4~dfsg0/waflib/extras/cython.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/cython.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,147 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2010-2015 + +import re +from waflib import Task, Logs +from waflib.TaskGen import extension + +cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*') +re_cyt = re.compile(r""" + ^\s* # must begin with some whitespace characters + (?:from\s+(\w+)(?:\.\w+)*\s+)? # optionally match "from foo(.baz)" and capture foo + c?import\s(\w+|[*]) # require "import bar" and capture bar + """, re.M | re.VERBOSE) + +@extension('.pyx') +def add_cython_file(self, node): + """ + Process a *.pyx* file given in the list of source files. No additional + feature is required:: + + def build(bld): + bld(features='c cshlib pyext', source='main.c foo.pyx', target='app') + """ + ext = '.c' + if 'cxx' in self.features: + self.env.append_unique('CYTHONFLAGS', '--cplus') + ext = '.cc' + + for x in getattr(self, 'cython_includes', []): + # TODO re-use these nodes in "scan" below + d = self.path.find_dir(x) + if d: + self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath()) + + tsk = self.create_task('cython', node, node.change_ext(ext)) + self.source += tsk.outputs + +class cython(Task.Task): + run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}' + color = 'GREEN' + + vars = ['INCLUDES'] + """ + Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended + by the metaclass. + """ + + ext_out = ['.h'] + """ + The creation of a .h file is known only after the build has begun, so it is not + possible to compute a build order just by looking at the task inputs/outputs. + """ + + def runnable_status(self): + """ + Perform a double-check to add the headers created by cython + to the output nodes. The scanner is executed only when the cython task + must be executed (optimization). + """ + ret = super(cython, self).runnable_status() + if ret == Task.ASK_LATER: + return ret + for x in self.generator.bld.raw_deps[self.uid()]: + if x.startswith('header:'): + self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', ''))) + return super(cython, self).runnable_status() + + def post_run(self): + for x in self.outputs: + if x.name.endswith('.h'): + if not x.exists(): + if Logs.verbose: + Logs.warn('Expected %r', x.abspath()) + x.write('') + return Task.Task.post_run(self) + + def scan(self): + """ + Return the dependent files (.pxd) by looking in the include folders. + Put the headers to generate in the custom list "bld.raw_deps". + To inspect the scanne results use:: + + $ waf clean build --zones=deps + """ + node = self.inputs[0] + txt = node.read() + + mods = set() + for m in re_cyt.finditer(txt): + if m.group(1): # matches "from foo import bar" + mods.add(m.group(1)) + else: + mods.add(m.group(2)) + + Logs.debug('cython: mods %r', mods) + incs = getattr(self.generator, 'cython_includes', []) + incs = [self.generator.path.find_dir(x) for x in incs] + incs.append(node.parent) + + found = [] + missing = [] + for x in sorted(mods): + for y in incs: + k = y.find_resource(x + '.pxd') + if k: + found.append(k) + break + else: + missing.append(x) + + # the cython file implicitly depends on a pxd file that might be present + implicit = node.parent.find_resource(node.name[:-3] + 'pxd') + if implicit: + found.append(implicit) + + Logs.debug('cython: found %r', found) + + # Now the .h created - store them in bld.raw_deps for later use + has_api = False + has_public = False + for l in txt.splitlines(): + if cy_api_pat.match(l): + if ' api ' in l: + has_api = True + if ' public ' in l: + has_public = True + name = node.name.replace('.pyx', '') + if has_api: + missing.append('header:%s_api.h' % name) + if has_public: + missing.append('header:%s.h' % name) + + return (found, missing) + +def options(ctx): + ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython') + +def configure(ctx): + if not ctx.env.CC and not ctx.env.CXX: + ctx.fatal('Load a C/C++ compiler first') + if not ctx.env.PYTHON: + ctx.fatal('Load the python tool first!') + ctx.find_program('cython', var='CYTHON') + if hasattr(ctx.options, 'cython_flags'): + ctx.env.CYTHONFLAGS = ctx.options.cython_flags + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/dcc.py lilv-0.24.6/waflib/extras/dcc.py --- lilv-0.24.4~dfsg0/waflib/extras/dcc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/dcc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Jérôme Carretero, 2011 (zougloub) + +from waflib import Options +from waflib.Tools import ccroot +from waflib.Configure import conf + +@conf +def find_dcc(conf): + conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', "")) + conf.env.CC_NAME = 'dcc' + +@conf +def find_dld(conf): + conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', "")) + conf.env.LINK_CC_NAME = 'dld' + +@conf +def find_dar(conf): + conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', "")) + conf.env.AR_NAME = 'dar' + conf.env.ARFLAGS = 'rcs' + +@conf +def find_ddump(conf): + conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', "")) + +@conf +def dcc_common_flags(conf): + v = conf.env + v['CC_SRC_F'] = [] + v['CC_TGT_F'] = ['-c', '-o'] + + # linker + if not v['LINK_CC']: + v['LINK_CC'] = v['CC'] + v['CCLNK_SRC_F'] = [] + v['CCLNK_TGT_F'] = ['-o'] + v['CPPPATH_ST'] = '-I%s' + v['DEFINES_ST'] = '-D%s' + + v['LIB_ST'] = '-l:%s' # template for adding libs + v['LIBPATH_ST'] = '-L%s' # template for adding libpaths + v['STLIB_ST'] = '-l:%s' + v['STLIBPATH_ST'] = '-L%s' + v['RPATH_ST'] = '-Wl,-rpath,%s' + #v['STLIB_MARKER'] = '-Wl,-Bstatic' + + # program + v['cprogram_PATTERN'] = '%s.elf' + + # static lib + v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic'] + v['cstlib_PATTERN'] = 'lib%s.a' + +def configure(conf): + conf.find_dcc() + conf.find_dar() + conf.find_dld() + conf.find_ddump() + conf.dcc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() + +def options(opt): + """ + Add the ``--with-diab-bindir`` command-line options. + """ + opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="") + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/distnet.py lilv-0.24.6/waflib/extras/distnet.py --- lilv-0.24.4~dfsg0/waflib/extras/distnet.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/distnet.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,430 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +waf-powered distributed network builds, with a network cache. + +Caching files from a server has advantages over a NFS/Samba shared folder: + +- builds are much faster because they use local files +- builds just continue to work in case of a network glitch +- permissions are much simpler to manage +""" + +import os, urllib, tarfile, re, shutil, tempfile, sys +from collections import OrderedDict +from waflib import Context, Utils, Logs + +try: + from urllib.parse import urlencode +except ImportError: + urlencode = urllib.urlencode + +def safe_urlencode(data): + x = urlencode(data) + try: + x = x.encode('utf-8') + except Exception: + pass + return x + +try: + from urllib.error import URLError +except ImportError: + from urllib2 import URLError + +try: + from urllib.request import Request, urlopen +except ImportError: + from urllib2 import Request, urlopen + +DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache') +DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/') +TARFORMAT = 'w:bz2' +TIMEOUT = 60 +REQUIRES = 'requires.txt' + +re_com = re.compile(r'\s*#.*', re.M) + +def total_version_order(num): + lst = num.split('.') + template = '%10s' * len(lst) + ret = template % tuple(lst) + return ret + +def get_distnet_cache(): + return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE) + +def get_server_url(): + return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER) + +def get_download_url(): + return '%s/download.py' % get_server_url() + +def get_upload_url(): + return '%s/upload.py' % get_server_url() + +def get_resolve_url(): + return '%s/resolve.py' % get_server_url() + +def send_package_name(): + out = getattr(Context.g_module, 'out', 'build') + pkgfile = '%s/package_to_upload.tarfile' % out + return pkgfile + +class package(Context.Context): + fun = 'package' + cmd = 'package' + + def execute(self): + try: + files = self.files + except AttributeError: + files = self.files = [] + + Context.Context.execute(self) + pkgfile = send_package_name() + if not pkgfile in files: + if not REQUIRES in files: + files.append(REQUIRES) + self.make_tarfile(pkgfile, files, add_to_package=False) + + def make_tarfile(self, filename, files, **kw): + if kw.get('add_to_package', True): + self.files.append(filename) + + with tarfile.open(filename, TARFORMAT) as tar: + endname = os.path.split(filename)[-1] + endname = endname.split('.')[0] + '/' + for x in files: + tarinfo = tar.gettarinfo(x, x) + tarinfo.uid = tarinfo.gid = 0 + tarinfo.uname = tarinfo.gname = 'root' + tarinfo.size = os.stat(x).st_size + + # TODO - more archive creation options? + if kw.get('bare', True): + tarinfo.name = os.path.split(x)[1] + else: + tarinfo.name = endname + x # todo, if tuple, then.. + Logs.debug('distnet: adding %r to %s', tarinfo.name, filename) + with open(x, 'rb') as f: + tar.addfile(tarinfo, f) + Logs.info('Created %s', filename) + +class publish(Context.Context): + fun = 'publish' + cmd = 'publish' + def execute(self): + if hasattr(Context.g_module, 'publish'): + Context.Context.execute(self) + mod = Context.g_module + + rfile = getattr(self, 'rfile', send_package_name()) + if not os.path.isfile(rfile): + self.fatal('Create the release file with "waf release" first! %r' % rfile) + + fdata = Utils.readf(rfile, m='rb') + data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)]) + + req = Request(get_upload_url(), data) + response = urlopen(req, timeout=TIMEOUT) + data = response.read().strip() + + if sys.hexversion>0x300000f: + data = data.decode('utf-8') + + if data != 'ok': + self.fatal('Could not publish the package %r' % data) + +class constraint(object): + def __init__(self, line=''): + self.required_line = line + self.info = [] + + line = line.strip() + if not line: + return + + lst = line.split(',') + if lst: + self.pkgname = lst[0] + self.required_version = lst[1] + for k in lst: + a, b, c = k.partition('=') + if a and c: + self.info.append((a, c)) + def __str__(self): + buf = [] + buf.append(self.pkgname) + buf.append(self.required_version) + for k in self.info: + buf.append('%s=%s' % k) + return ','.join(buf) + + def __repr__(self): + return "requires %s-%s" % (self.pkgname, self.required_version) + + def human_display(self, pkgname, pkgver): + return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version) + + def why(self): + ret = [] + for x in self.info: + if x[0] == 'reason': + ret.append(x[1]) + return ret + + def add_reason(self, reason): + self.info.append(('reason', reason)) + +def parse_constraints(text): + assert(text is not None) + constraints = [] + text = re.sub(re_com, '', text) + lines = text.splitlines() + for line in lines: + line = line.strip() + if not line: + continue + constraints.append(constraint(line)) + return constraints + +def list_package_versions(cachedir, pkgname): + pkgdir = os.path.join(cachedir, pkgname) + try: + versions = os.listdir(pkgdir) + except OSError: + return [] + versions.sort(key=total_version_order) + versions.reverse() + return versions + +class package_reader(Context.Context): + cmd = 'solver' + fun = 'solver' + + def __init__(self, **kw): + Context.Context.__init__(self, **kw) + + self.myproject = getattr(Context.g_module, 'APPNAME', 'project') + self.myversion = getattr(Context.g_module, 'VERSION', '1.0') + self.cache_constraints = {} + self.constraints = [] + + def compute_dependencies(self, filename=REQUIRES): + text = Utils.readf(filename) + data = safe_urlencode([('text', text)]) + + if '--offline' in sys.argv: + self.constraints = self.local_resolve(text) + else: + req = Request(get_resolve_url(), data) + try: + response = urlopen(req, timeout=TIMEOUT) + except URLError as e: + Logs.warn('The package server is down! %r', e) + self.constraints = self.local_resolve(text) + else: + ret = response.read() + try: + ret = ret.decode('utf-8') + except Exception: + pass + self.trace(ret) + self.constraints = parse_constraints(ret) + self.check_errors() + + def check_errors(self): + errors = False + for c in self.constraints: + if not c.required_version: + errors = True + + reasons = c.why() + if len(reasons) == 1: + Logs.error('%s but no matching package could be found in this repository', reasons[0]) + else: + Logs.error('Conflicts on package %r:', c.pkgname) + for r in reasons: + Logs.error(' %s', r) + if errors: + self.fatal('The package requirements cannot be satisfied!') + + def load_constraints(self, pkgname, pkgver, requires=REQUIRES): + try: + return self.cache_constraints[(pkgname, pkgver)] + except KeyError: + text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires)) + ret = parse_constraints(text) + self.cache_constraints[(pkgname, pkgver)] = ret + return ret + + def apply_constraint(self, domain, constraint): + vname = constraint.required_version.replace('*', '.*') + rev = re.compile(vname, re.M) + ret = [x for x in domain if rev.match(x)] + return ret + + def trace(self, *k): + if getattr(self, 'debug', None): + Logs.error(*k) + + def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]): + # breadth first search + n_packages_to_versions = dict(packages_to_versions) + n_packages_to_constraints = dict(packages_to_constraints) + + self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done)) + done = done + [pkgname] + + constraints = self.load_constraints(pkgname, pkgver) + self.trace("constraints %r" % constraints) + + for k in constraints: + try: + domain = n_packages_to_versions[k.pkgname] + except KeyError: + domain = list_package_versions(get_distnet_cache(), k.pkgname) + + + self.trace("constraints?") + if not k.pkgname in done: + todo = todo + [k.pkgname] + + self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain)) + + # apply the constraint + domain = self.apply_constraint(domain, k) + + self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain)) + + n_packages_to_versions[k.pkgname] = domain + + # then store the constraint applied + constraints = list(packages_to_constraints.get(k.pkgname, [])) + constraints.append((pkgname, pkgver, k)) + n_packages_to_constraints[k.pkgname] = constraints + + if not domain: + self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver)) + return (n_packages_to_versions, n_packages_to_constraints) + + # next package on the todo list + if not todo: + return (n_packages_to_versions, n_packages_to_constraints) + + n_pkgname = todo[0] + n_pkgver = n_packages_to_versions[n_pkgname][0] + tmp = dict(n_packages_to_versions) + tmp[n_pkgname] = [n_pkgver] + + self.trace("fixed point %s" % n_pkgname) + + return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done) + + def get_results(self): + return '\n'.join([str(c) for c in self.constraints]) + + def solution_to_constraints(self, versions, constraints): + solution = [] + for p in versions: + c = constraint() + solution.append(c) + + c.pkgname = p + if versions[p]: + c.required_version = versions[p][0] + else: + c.required_version = '' + for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''): + c.add_reason(c2.human_display(from_pkgname, from_pkgver)) + return solution + + def local_resolve(self, text): + self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text) + p2v = OrderedDict({self.myproject: [self.myversion]}) + (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, []) + return self.solution_to_constraints(versions, constraints) + + def download_to_file(self, pkgname, pkgver, subdir, tmp): + data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)]) + req = urlopen(get_download_url(), data, timeout=TIMEOUT) + with open(tmp, 'wb') as f: + while True: + buf = req.read(8192) + if not buf: + break + f.write(buf) + + def extract_tar(self, subdir, pkgdir, tmpfile): + with tarfile.open(tmpfile) as f: + temp = tempfile.mkdtemp(dir=pkgdir) + try: + f.extractall(temp) + os.rename(temp, os.path.join(pkgdir, subdir)) + finally: + try: + shutil.rmtree(temp) + except Exception: + pass + + def get_pkg_dir(self, pkgname, pkgver, subdir): + pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver) + if not os.path.isdir(pkgdir): + os.makedirs(pkgdir) + + target = os.path.join(pkgdir, subdir) + + if os.path.exists(target): + return target + + (fd, tmp) = tempfile.mkstemp(dir=pkgdir) + try: + os.close(fd) + self.download_to_file(pkgname, pkgver, subdir, tmp) + if subdir == REQUIRES: + os.rename(tmp, target) + else: + self.extract_tar(subdir, pkgdir, tmp) + finally: + try: + os.remove(tmp) + except OSError: + pass + + return target + + def __iter__(self): + if not self.constraints: + self.compute_dependencies() + for x in self.constraints: + if x.pkgname == self.myproject: + continue + yield x + + def execute(self): + self.compute_dependencies() + +packages = package_reader() + +def load_tools(ctx, extra): + global packages + for c in packages: + packages.get_pkg_dir(c.pkgname, c.required_version, extra) + noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch') + for x in os.listdir(noarchdir): + if x.startswith('waf_') and x.endswith('.py'): + ctx.load([x.rstrip('.py')], tooldir=[noarchdir]) + +def options(opt): + opt.add_option('--offline', action='store_true') + packages.execute() + load_tools(opt, REQUIRES) + +def configure(conf): + load_tools(conf, conf.variant) + +def build(bld): + load_tools(bld, bld.variant) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/doxygen.py lilv-0.24.6/waflib/extras/doxygen.py --- lilv-0.24.4~dfsg0/waflib/extras/doxygen.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/extras/doxygen.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,40 +1,75 @@ #! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# encoding: UTF-8 +# Thomas Nagy 2008-2010 (ita) -import os,os.path,re -from waflib import Task,Utils,Node +""" + +Doxygen support + +Variables passed to bld(): +* doxyfile -- the Doxyfile to use +* doxy_tar -- destination archive for generated documentation (if desired) +* install_path -- where to install the documentation +* pars -- dictionary overriding doxygen configuration settings + +When using this tool, the wscript will look like: + + def options(opt): + opt.load('doxygen') + + def configure(conf): + conf.load('doxygen') + # check conf.env.DOXYGEN, if it is mandatory + + def build(bld): + if bld.env.DOXYGEN: + bld(features="doxygen", doxyfile='Doxyfile', ...) +""" + +import os, os.path, re +from collections import OrderedDict +from waflib import Task, Utils, Node from waflib.TaskGen import feature -DOXY_STR='"${DOXYGEN}" - ' -DOXY_FMTS='html latex man rft xml'.split() -DOXY_FILE_PATTERNS='*.'+' *.'.join(''' + +DOXY_STR = '"${DOXYGEN}" - ' +DOXY_FMTS = 'html latex man rft xml'.split() +DOXY_FILE_PATTERNS = '*.' + ' *.'.join(''' c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3 inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx '''.split()) -re_rl=re.compile('\\\\\r*\n',re.MULTILINE) -re_nl=re.compile('\r*\n',re.M) + +re_rl = re.compile('\\\\\r*\n', re.MULTILINE) +re_nl = re.compile('\r*\n', re.M) def parse_doxy(txt): - tbl={} - txt=re_rl.sub('',txt) - lines=re_nl.split(txt) + ''' + Parses a doxygen file. + Returns an ordered dictionary. We cannot return a default dictionary, as the + order in which the entries are reported does matter, especially for the + '@INCLUDE' lines. + ''' + tbl = OrderedDict() + txt = re_rl.sub('', txt) + lines = re_nl.split(txt) for x in lines: - x=x.strip() - if not x or x.startswith('#')or x.find('=')<0: + x = x.strip() + if not x or x.startswith('#') or x.find('=') < 0: continue - if x.find('+=')>=0: - tmp=x.split('+=') - key=tmp[0].strip() + if x.find('+=') >= 0: + tmp = x.split('+=') + key = tmp[0].strip() if key in tbl: - tbl[key]+=' '+'+='.join(tmp[1:]).strip() + tbl[key] += ' ' + '+='.join(tmp[1:]).strip() else: - tbl[key]='+='.join(tmp[1:]).strip() + tbl[key] = '+='.join(tmp[1:]).strip() else: - tmp=x.split('=') - tbl[tmp[0].strip()]='='.join(tmp[1:]).strip() + tmp = x.split('=') + tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip() return tbl + class doxygen(Task.Task): - vars=['DOXYGEN','DOXYFLAGS'] - color='BLUE' + vars = ['DOXYGEN', 'DOXYFLAGS'] + color = 'BLUE' + def runnable_status(self): ''' self.pars are populated in runnable_status - because this function is being @@ -42,119 +77,151 @@ set output_dir (node) for the output ''' + for x in self.run_after: if not x.hasrun: return Task.ASK_LATER - if not getattr(self,'pars',None): - txt=self.inputs[0].read() - self.pars=parse_doxy(txt) + + if not getattr(self, 'pars', None): + txt = self.inputs[0].read() + self.pars = parse_doxy(txt) + + # Override with any parameters passed to the task generator + if getattr(self.generator, 'pars', None): + for k, v in self.generator.pars.items(): + self.pars[k] = v + if self.pars.get('OUTPUT_DIRECTORY'): - output_node=self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY']) + # Use the path parsed from the Doxyfile as an absolute path + output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY']) else: - output_node=self.inputs[0].parent.get_bld().make_node(self.inputs[0].name+'.doxy') + # If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy' + output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy') output_node.mkdir() - self.pars['OUTPUT_DIRECTORY']=output_node.abspath() - if getattr(self.generator,'pars',None): - for k,v in self.generator.pars.items(): - self.pars[k]=v - self.doxy_inputs=getattr(self,'doxy_inputs',[]) + self.pars['OUTPUT_DIRECTORY'] = output_node.abspath() + + self.doxy_inputs = getattr(self, 'doxy_inputs', []) if not self.pars.get('INPUT'): self.doxy_inputs.append(self.inputs[0].parent) else: for i in self.pars.get('INPUT').split(): if os.path.isabs(i): - node=self.generator.bld.root.find_node(i) + node = self.generator.bld.root.find_node(i) else: - node=self.inputs[0].parent.find_node(i) + node = self.inputs[0].parent.find_node(i) if not node: - self.generator.bld.fatal('Could not find the doxygen input %r'%i) + self.generator.bld.fatal('Could not find the doxygen input %r' % i) self.doxy_inputs.append(node) - if not getattr(self,'output_dir',None): - bld=self.generator.bld - self.output_dir=bld.root.find_dir(self.pars['OUTPUT_DIRECTORY']) + + if not getattr(self, 'output_dir', None): + bld = self.generator.bld + # Output path is always an absolute path as it was transformed above. + self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY']) + self.signature() - ret=Task.Task.runnable_status(self) - if ret==Task.SKIP_ME: + ret = Task.Task.runnable_status(self) + if ret == Task.SKIP_ME: + # in case the files were removed self.add_install() return ret + def scan(self): - exclude_patterns=self.pars.get('EXCLUDE_PATTERNS','').split() - exclude_patterns=[pattern.replace('*/','**/')for pattern in exclude_patterns] - file_patterns=self.pars.get('FILE_PATTERNS','').split() + exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split() + exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns] + file_patterns = self.pars.get('FILE_PATTERNS','').split() if not file_patterns: - file_patterns=DOXY_FILE_PATTERNS.split() - if self.pars.get('RECURSIVE')=='YES': - file_patterns=["**/%s"%pattern for pattern in file_patterns] - nodes=[] - names=[] + file_patterns = DOXY_FILE_PATTERNS.split() + if self.pars.get('RECURSIVE') == 'YES': + file_patterns = ["**/%s" % pattern for pattern in file_patterns] + nodes = [] + names = [] for node in self.doxy_inputs: if os.path.isdir(node.abspath()): - for m in node.ant_glob(incl=file_patterns,excl=exclude_patterns): + for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns): nodes.append(m) else: nodes.append(node) - return(nodes,names) + return (nodes, names) + def run(self): - dct=self.pars.copy() - code='\n'.join(['%s = %s'%(x,dct[x])for x in self.pars]) - code=code.encode() - cmd=Utils.subst_vars(DOXY_STR,self.env) - env=self.env.env or None - proc=Utils.subprocess.Popen(cmd,shell=True,stdin=Utils.subprocess.PIPE,env=env,cwd=self.inputs[0].parent.abspath()) + dct = self.pars.copy() + code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars]) + code = code.encode() # for python 3 + #fmt = DOXY_STR % (self.inputs[0].parent.abspath()) + cmd = Utils.subst_vars(DOXY_STR, self.env) + env = self.env.env or None + proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath()) proc.communicate(code) return proc.returncode + def post_run(self): - nodes=self.output_dir.ant_glob('**/*',quiet=True) + nodes = self.output_dir.ant_glob('**/*', quiet=True) for x in nodes: - self.generator.bld.node_sigs[x]=self.uid() + self.generator.bld.node_sigs[x] = self.uid() self.add_install() return Task.Task.post_run(self) + def add_install(self): - nodes=self.output_dir.ant_glob('**/*',quiet=True) - self.outputs+=nodes - if getattr(self.generator,'install_path',None): - if not getattr(self.generator,'doxy_tar',None): - self.generator.add_install_files(install_to=self.generator.install_path,install_from=self.outputs,postpone=False,cwd=self.output_dir,relative_trick=True) + nodes = self.output_dir.ant_glob('**/*', quiet=True) + self.outputs += nodes + if getattr(self.generator, 'install_path', None): + if not getattr(self.generator, 'doxy_tar', None): + self.generator.add_install_files(install_to=self.generator.install_path, + install_from=self.outputs, + postpone=False, + cwd=self.output_dir, + relative_trick=True) + class tar(Task.Task): - run_str='${TAR} ${TAROPTS} ${TGT} ${SRC}' - color='RED' - after=['doxygen'] + "quick tar creation" + run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}' + color = 'RED' + after = ['doxygen'] def runnable_status(self): - for x in getattr(self,'input_tasks',[]): + for x in getattr(self, 'input_tasks', []): if not x.hasrun: return Task.ASK_LATER - if not getattr(self,'tar_done_adding',None): - self.tar_done_adding=True - for x in getattr(self,'input_tasks',[]): + + if not getattr(self, 'tar_done_adding', None): + # execute this only once + self.tar_done_adding = True + for x in getattr(self, 'input_tasks', []): self.set_inputs(x.outputs) if not self.inputs: return Task.SKIP_ME return Task.Task.runnable_status(self) + def __str__(self): - tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs]) - return'%s: %s\n'%(self.__class__.__name__,tgt_str) + tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs]) + return '%s: %s\n' % (self.__class__.__name__, tgt_str) + @feature('doxygen') def process_doxy(self): - if not getattr(self,'doxyfile',None): + if not getattr(self, 'doxyfile', None): self.bld.fatal('no doxyfile variable specified??') - node=self.doxyfile - if not isinstance(node,Node.Node): - node=self.path.find_resource(node) + + node = self.doxyfile + if not isinstance(node, Node.Node): + node = self.path.find_resource(node) if not node: - self.bld.fatal('doxygen file %s not found'%self.doxyfile) - dsk=self.create_task('doxygen',node) - if getattr(self,'doxy_tar',None): - tsk=self.create_task('tar') - tsk.input_tasks=[dsk] + self.bld.fatal('doxygen file %s not found' % self.doxyfile) + + # the task instance + dsk = self.create_task('doxygen', node) + + if getattr(self, 'doxy_tar', None): + tsk = self.create_task('tar') + tsk.input_tasks = [dsk] tsk.set_outputs(self.path.find_or_declare(self.doxy_tar)) if self.doxy_tar.endswith('bz2'): - tsk.env['TAROPTS']=['cjf'] + tsk.env['TAROPTS'] = ['cjf'] elif self.doxy_tar.endswith('gz'): - tsk.env['TAROPTS']=['czf'] + tsk.env['TAROPTS'] = ['czf'] else: - tsk.env['TAROPTS']=['cf'] - if getattr(self,'install_path',None): - self.add_install_files(install_to=self.install_path,install_from=tsk.outputs) + tsk.env['TAROPTS'] = ['cf'] + if getattr(self, 'install_path', None): + self.add_install_files(install_to=self.install_path, install_from=tsk.outputs) + def configure(conf): ''' Check if doxygen and tar commands are present in the system @@ -163,5 +230,6 @@ variables will be set. Detection can be controlled by setting DOXYGEN and TAR environmental variables. ''' - conf.find_program('doxygen',var='DOXYGEN',mandatory=False) - conf.find_program('tar',var='TAR',mandatory=False) + + conf.find_program('doxygen', var='DOXYGEN', mandatory=False) + conf.find_program('tar', var='TAR', mandatory=False) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/dpapi.py lilv-0.24.6/waflib/extras/dpapi.py --- lilv-0.24.4~dfsg0/waflib/extras/dpapi.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/dpapi.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,87 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Matt Clarkson, 2012 + +''' +DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx) +This file uses code originally created by Crusher Joe: +http://article.gmane.org/gmane.comp.python.ctypes/420 +And modified by Wayne Koorts: +http://stackoverflow.com/questions/463832/using-dpapi-with-python +''' + +from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer +from ctypes.wintypes import DWORD +from waflib.Configure import conf + +LocalFree = windll.kernel32.LocalFree +memcpy = cdll.msvcrt.memcpy +CryptProtectData = windll.crypt32.CryptProtectData +CryptUnprotectData = windll.crypt32.CryptUnprotectData +CRYPTPROTECT_UI_FORBIDDEN = 0x01 +try: + extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii') +except AttributeError: + extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd' + +class DATA_BLOB(Structure): + _fields_ = [ + ('cbData', DWORD), + ('pbData', POINTER(c_char)) + ] + +def get_data(blob_out): + cbData = int(blob_out.cbData) + pbData = blob_out.pbData + buffer = c_buffer(cbData) + memcpy(buffer, pbData, cbData) + LocalFree(pbData) + return buffer.raw + +@conf +def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy): + ''' + Encrypts data and returns byte string + + :param input_bytes: The data to be encrypted + :type input_bytes: String or Bytes + :param entropy: Extra entropy to add to the encryption process (optional) + :type entropy: String or Bytes + ''' + if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes): + self.fatal('The inputs to dpapi must be bytes') + buffer_in = c_buffer(input_bytes, len(input_bytes)) + buffer_entropy = c_buffer(entropy, len(entropy)) + blob_in = DATA_BLOB(len(input_bytes), buffer_in) + blob_entropy = DATA_BLOB(len(entropy), buffer_entropy) + blob_out = DATA_BLOB() + + if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy), + None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)): + return get_data(blob_out) + else: + self.fatal('Failed to decrypt data') + +@conf +def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy): + ''' + Decrypts data and returns byte string + + :param encrypted_bytes: The encrypted data + :type encrypted_bytes: Bytes + :param entropy: Extra entropy to add to the encryption process (optional) + :type entropy: String or Bytes + ''' + if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes): + self.fatal('The inputs to dpapi must be bytes') + buffer_in = c_buffer(encrypted_bytes, len(encrypted_bytes)) + buffer_entropy = c_buffer(entropy, len(entropy)) + blob_in = DATA_BLOB(len(encrypted_bytes), buffer_in) + blob_entropy = DATA_BLOB(len(entropy), buffer_entropy) + blob_out = DATA_BLOB() + if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None, + None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)): + return get_data(blob_out) + else: + self.fatal('Failed to decrypt data') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/eclipse.py lilv-0.24.6/waflib/extras/eclipse.py --- lilv-0.24.4~dfsg0/waflib/extras/eclipse.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/eclipse.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,431 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Eclipse CDT 5.0 generator for Waf +# Richard Quirk 2009-1011 (New BSD License) +# Thomas Nagy 2011 (ported to Waf 1.6) + +""" +Usage: + +def options(opt): + opt.load('eclipse') + +$ waf configure eclipse +""" + +import sys, os +from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node +from xml.dom.minidom import Document + +STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ] + +oe_cdt = 'org.eclipse.cdt' +cdt_mk = oe_cdt + '.make.core' +cdt_core = oe_cdt + '.core' +cdt_bld = oe_cdt + '.build.core' +extbuilder_dir = '.externalToolBuilders' +extbuilder_name = 'Waf_Builder.launch' + +class eclipse(Build.BuildContext): + cmd = 'eclipse' + fun = Scripting.default_cmd + + def execute(self): + """ + Entry point + """ + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + + appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath())) + self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH']) + + # Helper to dump the XML document content to XML with UTF-8 encoding + def write_conf_to_xml(self, filename, document): + self.srcnode.make_node(filename).write(document.toprettyxml(encoding='UTF-8'), flags='wb') + + def create_cproject(self, appname, workspace_includes=[], pythonpath=[]): + """ + Create the Eclipse CDT .project and .cproject files + @param appname The name that will appear in the Project Explorer + @param build The BuildContext object to extract includes from + @param workspace_includes Optional project includes to prevent + "Unresolved Inclusion" errors in the Eclipse editor + @param pythonpath Optional project specific python paths + """ + hasc = hasjava = haspython = False + source_dirs = [] + cpppath = self.env['CPPPATH'] + javasrcpath = [] + javalibpath = [] + includes = STANDARD_INCLUDES + if sys.platform != 'win32': + cc = self.env.CC or self.env.CXX + if cc: + cmd = cc + ['-xc++', '-E', '-Wp,-v', '-'] + try: + gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines() + except Errors.WafError: + pass + else: + includes = [] + for ipath in gccout: + if ipath.startswith(' /'): + includes.append(ipath[1:]) + cpppath += includes + Logs.warn('Generating Eclipse CDT project files') + + for g in self.groups: + for tg in g: + if not isinstance(tg, TaskGen.task_gen): + continue + + tg.post() + + # Add local Python modules paths to configuration so object resolving will work in IDE + # This may also contain generated files (ie. pyqt5 or protoc) that get picked from build + if 'py' in tg.features: + pypath = tg.path.relpath() + py_installfrom = getattr(tg, 'install_from', None) + if isinstance(py_installfrom, Node.Node): + pypath = py_installfrom.path_from(self.root.make_node(self.top_dir)) + if pypath not in pythonpath: + pythonpath.append(pypath) + haspython = True + + # Add Java source directories so object resolving works in IDE + # This may also contain generated files (ie. protoc) that get picked from build + if 'javac' in tg.features: + java_src = tg.path.relpath() + java_srcdir = getattr(tg.javac_task, 'srcdir', None) + if java_srcdir: + if isinstance(java_srcdir, Node.Node): + java_srcdir = [java_srcdir] + for x in Utils.to_list(java_srcdir): + x = x.path_from(self.root.make_node(self.top_dir)) + if x not in javasrcpath: + javasrcpath.append(x) + else: + if java_src not in javasrcpath: + javasrcpath.append(java_src) + hasjava = True + + # Check if there are external dependencies and add them as external jar so they will be resolved by Eclipse + usedlibs=getattr(tg, 'use', []) + for x in Utils.to_list(usedlibs): + for cl in Utils.to_list(tg.env['CLASSPATH_'+x]): + if cl not in javalibpath: + javalibpath.append(cl) + + if not getattr(tg, 'link_task', None): + continue + + features = Utils.to_list(getattr(tg, 'features', '')) + + is_cc = 'c' in features or 'cxx' in features + + incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES']) + for p in incnodes: + path = p.path_from(self.srcnode) + + if (path.startswith("/")): + cpppath.append(path) + else: + workspace_includes.append(path) + + if is_cc and path not in source_dirs: + source_dirs.append(path) + + hasc = True + + waf_executable = os.path.abspath(sys.argv[0]) + project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython, waf_executable) + self.write_conf_to_xml('.project', project) + + if hasc: + project = self.impl_create_cproject(sys.executable, waf_executable, appname, workspace_includes, cpppath, source_dirs) + self.write_conf_to_xml('.cproject', project) + + if haspython: + project = self.impl_create_pydevproject(sys.path, pythonpath) + self.write_conf_to_xml('.pydevproject', project) + + if hasjava: + project = self.impl_create_javaproject(javasrcpath, javalibpath) + self.write_conf_to_xml('.classpath', project) + + def impl_create_project(self, executable, appname, hasc, hasjava, haspython, waf_executable): + doc = Document() + projectDescription = doc.createElement('projectDescription') + self.add(doc, projectDescription, 'name', appname) + self.add(doc, projectDescription, 'comment') + self.add(doc, projectDescription, 'projects') + buildSpec = self.add(doc, projectDescription, 'buildSpec') + buildCommand = self.add(doc, buildSpec, 'buildCommand') + self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,') + arguments = self.add(doc, buildCommand, 'arguments') + dictionaries = {} + + # If CDT is present, instruct this one to call waf as it is more flexible (separate build/clean ...) + if hasc: + self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder') + # the default make-style targets are overwritten by the .cproject values + dictionaries = { + cdt_mk + '.contents': cdt_mk + '.activeConfigSettings', + cdt_mk + '.enableAutoBuild': 'false', + cdt_mk + '.enableCleanBuild': 'true', + cdt_mk + '.enableFullBuild': 'true', + } + else: + # Otherwise for Java/Python an external builder tool is created that will call waf build + self.add(doc, buildCommand, 'name', 'org.eclipse.ui.externaltools.ExternalToolBuilder') + dictionaries = { + 'LaunchConfigHandle': '/%s/%s'%(extbuilder_dir, extbuilder_name), + } + # The definition is in a separate directory XML file + try: + os.mkdir(extbuilder_dir) + except OSError: + pass # Ignore error if already exists + + # Populate here the external builder XML calling waf + builder = Document() + launchConfiguration = doc.createElement('launchConfiguration') + launchConfiguration.setAttribute('type', 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType') + self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'false'}) + self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value': waf_executable}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'full,incremental,'}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': 'build'}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': '${project_loc}'}) + builder.appendChild(launchConfiguration) + # And write the XML to the file references before + self.write_conf_to_xml('%s%s%s'%(extbuilder_dir, os.path.sep, extbuilder_name), builder) + + + for k, v in dictionaries.items(): + self.addDictionary(doc, arguments, k, v) + + natures = self.add(doc, projectDescription, 'natures') + + if hasc: + nature_list = """ + core.ccnature + managedbuilder.core.ScannerConfigNature + managedbuilder.core.managedBuildNature + core.cnature + """.split() + for n in nature_list: + self.add(doc, natures, 'nature', oe_cdt + '.' + n) + + if haspython: + self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature') + if hasjava: + self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature') + + doc.appendChild(projectDescription) + return doc + + def impl_create_cproject(self, executable, waf_executable, appname, workspace_includes, cpppath, source_dirs=[]): + doc = Document() + doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0')) + cconf_id = cdt_core + '.default.config.1' + cproject = doc.createElement('cproject') + storageModule = self.add(doc, cproject, 'storageModule', + {'moduleId': cdt_core + '.settings'}) + cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id}) + + storageModule = self.add(doc, cconf, 'storageModule', + {'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider', + 'id': cconf_id, + 'moduleId': cdt_core + '.settings', + 'name': 'Default'}) + + self.add(doc, storageModule, 'externalSettings') + + extensions = self.add(doc, storageModule, 'extensions') + extension_list = """ + VCErrorParser + MakeErrorParser + GCCErrorParser + GASErrorParser + GLDErrorParser + """.split() + self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'}) + for e in extension_list: + self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'}) + + storageModule = self.add(doc, cconf, 'storageModule', + {'moduleId': 'cdtBuildSystem', 'version': '4.0.0'}) + config = self.add(doc, storageModule, 'configuration', + {'artifactName': appname, + 'id': cconf_id, + 'name': 'Default', + 'parent': cdt_bld + '.prefbase.cfg'}) + folderInfo = self.add(doc, config, 'folderInfo', + {'id': cconf_id+'.', 'name': '/', 'resourcePath': ''}) + + toolChain = self.add(doc, folderInfo, 'toolChain', + {'id': cdt_bld + '.prefbase.toolchain.1', + 'name': 'No ToolChain', + 'resourceTypeBasedDiscovery': 'false', + 'superClass': cdt_bld + '.prefbase.toolchain'}) + + self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''}) + + waf_build = '"%s" %s'%(waf_executable, eclipse.fun) + waf_clean = '"%s" clean'%(waf_executable) + self.add(doc, toolChain, 'builder', + {'autoBuildTarget': waf_build, + 'command': executable, + 'enableAutoBuild': 'false', + 'cleanBuildTarget': waf_clean, + 'enableIncrementalBuild': 'true', + 'id': cdt_bld + '.settings.default.builder.1', + 'incrementalBuildTarget': waf_build, + 'managedBuildOn': 'false', + 'name': 'Gnu Make Builder', + 'superClass': cdt_bld + '.settings.default.builder'}) + + tool_index = 1; + for tool_name in ("Assembly", "GNU C++", "GNU C"): + tool = self.add(doc, toolChain, 'tool', + {'id': cdt_bld + '.settings.holder.' + str(tool_index), + 'name': tool_name, + 'superClass': cdt_bld + '.settings.holder'}) + if cpppath or workspace_includes: + incpaths = cdt_bld + '.settings.holder.incpaths' + option = self.add(doc, tool, 'option', + {'id': incpaths + '.' + str(tool_index), + 'name': 'Include Paths', + 'superClass': incpaths, + 'valueType': 'includePath'}) + for i in workspace_includes: + self.add(doc, option, 'listOptionValue', + {'builtIn': 'false', + 'value': '"${workspace_loc:/%s/%s}"'%(appname, i)}) + for i in cpppath: + self.add(doc, option, 'listOptionValue', + {'builtIn': 'false', + 'value': '"%s"'%(i)}) + if tool_name == "GNU C++" or tool_name == "GNU C": + self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \ + 'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \ + 'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \ + 'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' }) + tool_index += 1 + + if source_dirs: + sourceEntries = self.add(doc, config, 'sourceEntries') + for i in source_dirs: + self.add(doc, sourceEntries, 'entry', + {'excluding': i, + 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED', + 'kind': 'sourcePath', + 'name': ''}) + self.add(doc, sourceEntries, 'entry', + { + 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED', + 'kind': 'sourcePath', + 'name': i}) + + storageModule = self.add(doc, cconf, 'storageModule', + {'moduleId': cdt_mk + '.buildtargets'}) + buildTargets = self.add(doc, storageModule, 'buildTargets') + def addTargetWrap(name, runAll): + return self.addTarget(doc, buildTargets, executable, name, + '"%s" %s'%(waf_executable, name), runAll) + addTargetWrap('configure', True) + addTargetWrap('dist', False) + addTargetWrap('install', False) + addTargetWrap('check', False) + + storageModule = self.add(doc, cproject, 'storageModule', + {'moduleId': 'cdtBuildSystem', + 'version': '4.0.0'}) + + self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname}) + + doc.appendChild(cproject) + return doc + + def impl_create_pydevproject(self, system_path, user_path): + # create a pydevproject file + doc = Document() + doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"')) + pydevproject = doc.createElement('pydev_project') + prop = self.add(doc, pydevproject, + 'pydev_property', + 'python %d.%d'%(sys.version_info[0], sys.version_info[1])) + prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION') + prop = self.add(doc, pydevproject, 'pydev_property', 'Default') + prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER') + # add waf's paths + wafadmin = [p for p in system_path if p.find('wafadmin') != -1] + if wafadmin: + prop = self.add(doc, pydevproject, 'pydev_pathproperty', + {'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'}) + for i in wafadmin: + self.add(doc, prop, 'path', i) + if user_path: + prop = self.add(doc, pydevproject, 'pydev_pathproperty', + {'name':'org.python.pydev.PROJECT_SOURCE_PATH'}) + for i in user_path: + self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i) + + doc.appendChild(pydevproject) + return doc + + def impl_create_javaproject(self, javasrcpath, javalibpath): + # create a .classpath file for java usage + doc = Document() + javaproject = doc.createElement('classpath') + if javasrcpath: + for i in javasrcpath: + self.add(doc, javaproject, 'classpathentry', + {'kind': 'src', 'path': i}) + + if javalibpath: + for i in javalibpath: + self.add(doc, javaproject, 'classpathentry', + {'kind': 'lib', 'path': i}) + + self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'}) + self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name }) + doc.appendChild(javaproject) + return doc + + def addDictionary(self, doc, parent, k, v): + dictionary = self.add(doc, parent, 'dictionary') + self.add(doc, dictionary, 'key', k) + self.add(doc, dictionary, 'value', v) + return dictionary + + def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True): + target = self.add(doc, buildTargets, 'target', + {'name': name, + 'path': '', + 'targetID': oe_cdt + '.build.MakeTargetBuilder'}) + self.add(doc, target, 'buildCommand', executable) + self.add(doc, target, 'buildArguments', None) + self.add(doc, target, 'buildTarget', buildTarget) + self.add(doc, target, 'stopOnError', 'true') + self.add(doc, target, 'useDefaultCommand', 'false') + self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower()) + + def add(self, doc, parent, tag, value = None): + el = doc.createElement(tag) + if (value): + if type(value) == type(str()): + el.appendChild(doc.createTextNode(value)) + elif type(value) == type(dict()): + self.setAttributes(el, value) + parent.appendChild(el) + return el + + def setAttributes(self, node, attrs): + for k, v in attrs.items(): + node.setAttribute(k, v) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/erlang.py lilv-0.24.6/waflib/extras/erlang.py --- lilv-0.24.4~dfsg0/waflib/extras/erlang.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/erlang.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2010 (ita) +# Przemyslaw Rzepecki, 2016 + +""" +Erlang support +""" + +import re +from waflib import Task, TaskGen +from waflib.TaskGen import feature, after_method, before_method +# to load the method "to_incnodes" below +from waflib.Tools import ccroot + +# Those flags are required by the Erlang VM to execute/evaluate code in +# non-interactive mode. It is used in this tool to create Erlang modules +# documentation and run unit tests. The user can pass additional arguments to the +# 'erl' command with ERL_FLAGS environment variable. +EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval'] + +def configure(conf): + conf.find_program('erlc', var='ERLC') + conf.find_program('erl', var='ERL') + conf.add_os_flags('ERLC_FLAGS') + conf.add_os_flags('ERL_FLAGS') + conf.env.ERLC_DEF_PATTERN = '-D%s' + conf.env.ERLC_INC_PATTERN = '-I%s' + +@TaskGen.extension('.erl') +def process_erl_node(self, node): + tsk = self.create_task('erl', node, node.change_ext('.beam')) + tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes) + tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes]) + tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', []))) + tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', []))) + tsk.cwd = tsk.outputs[0].parent + +class erl(Task.Task): + color = 'GREEN' + run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}' + + def scan(task): + node = task.inputs[0] + + deps = [] + scanned = set([]) + nodes_to_scan = [node] + + for n in nodes_to_scan: + if n.abspath() in scanned: + continue + + for i in re.findall(r'-include\("(.*)"\)\.', n.read()): + for d in task.erlc_incnodes: + r = d.find_node(i) + if r: + deps.append(r) + nodes_to_scan.append(r) + break + scanned.add(n.abspath()) + + return (deps, []) + +@TaskGen.extension('.beam') +def process(self, node): + pass + + +class erl_test(Task.Task): + color = 'BLUE' + run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}' + +@feature('eunit') +@after_method('process_source') +def add_erl_test_run(self): + test_modules = [t.outputs[0] for t in self.tasks] + test_task = self.create_task('erl_test') + test_task.set_inputs(self.source + test_modules) + test_task.cwd = test_modules[0].parent + + test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', []))) + + test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules]) + test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list + test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE) + test_task.env.append_value('ERL_TEST_FLAGS', test_flag) + + +class edoc(Task.Task): + color = 'BLUE' + run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}" + def keyword(self): + return 'Generating edoc' + +@feature('edoc') +@before_method('process_source') +def add_edoc_task(self): + # do not process source, it would create double erl->beam task + self.meths.remove('process_source') + e = self.path.find_resource(self.source) + t = e.change_ext('.html') + png = t.parent.make_node('erlang.png') + css = t.parent.make_node('stylesheet.css') + tsk = self.create_task('edoc', e, [t, png, css]) + tsk.cwd = tsk.outputs[0].parent + tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE) + tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath()) + # TODO the above can break if a file path contains '"' + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fast_partial.py lilv-0.24.6/waflib/extras/fast_partial.py --- lilv-0.24.4~dfsg0/waflib/extras/fast_partial.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fast_partial.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,531 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2017-2018 (ita) + +""" +A system for fast partial rebuilds + +Creating a large amount of task objects up front can take some time. +By making a few assumptions, it is possible to avoid posting creating +task objects for targets that are already up-to-date. + +On a silly benchmark the gain observed for 1M tasks can be 5m->10s +for a single file change. + +Usage:: + + def options(opt): + opt.load('fast_partial') + +Assumptions: +* Start with a clean build (run "waf distclean" after enabling) +* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled) + try it in the folder generated by utils/genbench.py +* For full project builds: no --targets and no pruning from subfolders +* The installation phase is ignored +* `use=` dependencies are specified up front even across build groups +* Task generator source files are not obtained from globs + +Implementation details: +* The first layer obtains file timestamps to recalculate file hashes only + when necessary (similar to md5_tstamp); the timestamps are then stored + in a dedicated pickle file +* A second layer associates each task generator to a file set to help + detecting changes. Task generators are to create their tasks only when + the related files have been modified. A specific db file is created + to store such data (5m -> 1m10) +* A third layer binds build context proxies onto task generators, replacing + the default context. While loading data for the full build uses more memory + (4GB -> 9GB), partial builds are then much faster (1m10 -> 13s) +* A fourth layer enables a 2-level cache on file signatures to + reduce the size of the main pickle file (13s -> 10s) +""" + +import os +from waflib import Build, Context, Errors, Logs, Task, TaskGen, Utils +from waflib.TaskGen import feature, after_method, taskgen_method +import waflib.Node + +DONE = 0 +DIRTY = 1 +NEEDED = 2 + +SKIPPABLE = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib', 'cprogram', 'cxxprogram'] + +TSTAMP_DB = '.wafpickle_tstamp_db_file' + +SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split() + +class bld_proxy(object): + def __init__(self, bld): + object.__setattr__(self, 'bld', bld) + + object.__setattr__(self, 'node_class', type('Nod3', (waflib.Node.Node,), {})) + self.node_class.__module__ = 'waflib.Node' + self.node_class.ctx = self + + object.__setattr__(self, 'root', self.node_class('', None)) + for x in SAVED_ATTRS: + if x != 'root': + object.__setattr__(self, x, {}) + + self.fix_nodes() + + def __setattr__(self, name, value): + bld = object.__getattribute__(self, 'bld') + setattr(bld, name, value) + + def __delattr__(self, name): + bld = object.__getattribute__(self, 'bld') + delattr(bld, name) + + def __getattribute__(self, name): + try: + return object.__getattribute__(self, name) + except AttributeError: + bld = object.__getattribute__(self, 'bld') + return getattr(bld, name) + + def __call__(self, *k, **kw): + return self.bld(*k, **kw) + + def fix_nodes(self): + for x in ('srcnode', 'path', 'bldnode'): + node = self.root.find_dir(getattr(self.bld, x).abspath()) + object.__setattr__(self, x, node) + + def set_key(self, store_key): + object.__setattr__(self, 'store_key', store_key) + + def fix_tg_path(self, *tgs): + # changing Node objects on task generators is possible + # yet, all Node objects must belong to the same parent + for tg in tgs: + tg.path = self.root.make_node(tg.path.abspath()) + + def restore(self): + dbfn = os.path.join(self.variant_dir, Context.DBFILE + self.store_key) + Logs.debug('rev_use: reading %s', dbfn) + try: + data = Utils.readf(dbfn, 'rb') + except (EnvironmentError, EOFError): + # handle missing file/empty file + Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn) + else: + try: + waflib.Node.pickle_lock.acquire() + waflib.Node.Nod3 = self.node_class + try: + data = Build.cPickle.loads(data) + except Exception as e: + Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e) + else: + for x in SAVED_ATTRS: + object.__setattr__(self, x, data.get(x, {})) + finally: + waflib.Node.pickle_lock.release() + self.fix_nodes() + + def store(self): + data = {} + for x in Build.SAVED_ATTRS: + data[x] = getattr(self, x) + db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key) + + with waflib.Node.pickle_lock: + waflib.Node.Nod3 = self.node_class + try: + x = Build.cPickle.dumps(data, Build.PROTOCOL) + except Build.cPickle.PicklingError: + root = data['root'] + for node_deps in data['node_deps'].values(): + for idx, node in enumerate(node_deps): + # there may be more cross-context Node objects to fix, + # but this should be the main source + node_deps[idx] = root.find_node(node.abspath()) + x = Build.cPickle.dumps(data, Build.PROTOCOL) + + Logs.debug('rev_use: storing %s', db) + Utils.writef(db + '.tmp', x, m='wb') + try: + st = os.stat(db) + os.remove(db) + if not Utils.is_win32: + os.chown(db + '.tmp', st.st_uid, st.st_gid) + except (AttributeError, OSError): + pass + os.rename(db + '.tmp', db) + +class bld(Build.BuildContext): + def __init__(self, **kw): + super(bld, self).__init__(**kw) + self.hashes_md5_tstamp = {} + + def __call__(self, *k, **kw): + # this is one way of doing it, one could use a task generator method too + bld = kw['bld'] = bld_proxy(self) + ret = TaskGen.task_gen(*k, **kw) + self.task_gen_cache_names = {} + self.add_to_group(ret, group=kw.get('group')) + ret.bld = bld + bld.set_key(ret.path.abspath().replace(os.sep, '') + str(ret.idx)) + return ret + + def is_dirty(self): + return True + + def store_tstamps(self): + # Called after a build is finished + # For each task generator, record all files involved in task objects + # optimization: done only if there was something built + do_store = False + try: + f_deps = self.f_deps + except AttributeError: + f_deps = self.f_deps = {} + self.f_tstamps = {} + + allfiles = set() + for g in self.groups: + for tg in g: + try: + staleness = tg.staleness + except AttributeError: + staleness = DIRTY + + if staleness != DIRTY: + # DONE case: there was nothing built + # NEEDED case: the tg was brought in because of 'use' propagation + # but nothing really changed for them, there may be incomplete + # tasks (object files) and in this case it is best to let the next build + # figure out if an input/output file changed + continue + + do_cache = False + for tsk in tg.tasks: + if tsk.hasrun == Task.SUCCESS: + do_cache = True + pass + elif tsk.hasrun == Task.SKIPPED: + pass + else: + # one failed task, clear the cache for this tg + try: + del f_deps[(tg.path.abspath(), tg.idx)] + except KeyError: + pass + else: + # just store the new state because there is a change + do_store = True + + # skip the rest because there is no valid cache possible + break + else: + if not do_cache: + # all skipped, but is there anything in cache? + try: + f_deps[(tg.path.abspath(), tg.idx)] + except KeyError: + # probably cleared because a wscript file changed + # store it + do_cache = True + + if do_cache: + + # there was a rebuild, store the data structure too + tg.bld.store() + + # all tasks skipped but no cache + # or a successful task build + do_store = True + st = set() + for tsk in tg.tasks: + st.update(tsk.inputs) + st.update(self.node_deps.get(tsk.uid(), [])) + + # TODO do last/when loading the tgs? + lst = [] + for k in ('wscript', 'wscript_build'): + n = tg.path.find_node(k) + if n: + n.get_bld_sig() + lst.append(n.abspath()) + + lst.extend(sorted(x.abspath() for x in st)) + allfiles.update(lst) + f_deps[(tg.path.abspath(), tg.idx)] = lst + + for x in allfiles: + # f_tstamps has everything, while md5_tstamp can be relatively empty on partial builds + self.f_tstamps[x] = self.hashes_md5_tstamp[x][0] + + if do_store: + dbfn = os.path.join(self.variant_dir, TSTAMP_DB) + Logs.debug('rev_use: storing %s', dbfn) + dbfn_tmp = dbfn + '.tmp' + x = Build.cPickle.dumps([self.f_tstamps, f_deps], Build.PROTOCOL) + Utils.writef(dbfn_tmp, x, m='wb') + os.rename(dbfn_tmp, dbfn) + Logs.debug('rev_use: stored %s', dbfn) + + def store(self): + self.store_tstamps() + if self.producer.dirty: + Build.BuildContext.store(self) + + def compute_needed_tgs(self): + # assume the 'use' keys are not modified during the build phase + + dbfn = os.path.join(self.variant_dir, TSTAMP_DB) + Logs.debug('rev_use: Loading %s', dbfn) + try: + data = Utils.readf(dbfn, 'rb') + except (EnvironmentError, EOFError): + Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn) + self.f_deps = {} + self.f_tstamps = {} + else: + try: + self.f_tstamps, self.f_deps = Build.cPickle.loads(data) + except Exception as e: + Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e) + self.f_deps = {} + self.f_tstamps = {} + else: + Logs.debug('rev_use: Loaded %s', dbfn) + + + # 1. obtain task generators that contain rebuilds + # 2. obtain the 'use' graph and its dual + stales = set() + reverse_use_map = Utils.defaultdict(list) + use_map = Utils.defaultdict(list) + + for g in self.groups: + for tg in g: + if tg.is_stale(): + stales.add(tg) + + try: + lst = tg.use = Utils.to_list(tg.use) + except AttributeError: + pass + else: + for x in lst: + try: + xtg = self.get_tgen_by_name(x) + except Errors.WafError: + pass + else: + use_map[tg].append(xtg) + reverse_use_map[xtg].append(tg) + + Logs.debug('rev_use: found %r stale tgs', len(stales)) + + # 3. dfs to post downstream tg as stale + visited = set() + def mark_down(tg): + if tg in visited: + return + visited.add(tg) + Logs.debug('rev_use: marking down %r as stale', tg.name) + tg.staleness = DIRTY + for x in reverse_use_map[tg]: + mark_down(x) + for tg in stales: + mark_down(tg) + + # 4. dfs to find ancestors tg to mark as needed + self.needed_tgs = needed_tgs = set() + def mark_needed(tg): + if tg in needed_tgs: + return + needed_tgs.add(tg) + if tg.staleness == DONE: + Logs.debug('rev_use: marking up %r as needed', tg.name) + tg.staleness = NEEDED + for x in use_map[tg]: + mark_needed(x) + for xx in visited: + mark_needed(xx) + + # so we have the whole tg trees to post in the set "needed" + # load their build trees + for tg in needed_tgs: + tg.bld.restore() + tg.bld.fix_tg_path(tg) + + # the stale ones should be fully build, while the needed ones + # may skip a few tasks, see create_compiled_task and apply_link_after below + Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs)) + + def post_group(self): + # assumption: we can ignore the folder/subfolders cuts + def tgpost(tg): + try: + f = tg.post + except AttributeError: + pass + else: + f() + + if not self.targets or self.targets == '*': + for tg in self.groups[self.current_group]: + # this can cut quite a lot of tg objects + if tg in self.needed_tgs: + tgpost(tg) + else: + # default implementation + return Build.BuildContext.post_group() + + def get_build_iterator(self): + if not self.targets or self.targets == '*': + self.compute_needed_tgs() + return Build.BuildContext.get_build_iterator(self) + +@taskgen_method +def is_stale(self): + # assume no globs + self.staleness = DIRTY + + # 1. the case of always stale targets + if getattr(self, 'always_stale', False): + return True + + # 2. check if the db file exists + db = os.path.join(self.bld.variant_dir, Context.DBFILE) + try: + dbstat = os.stat(db).st_mtime + except OSError: + Logs.debug('rev_use: must post %r because this is a clean build') + return True + + # 3.a check if the configuration exists + cache_node = self.bld.bldnode.find_node('c4che/build.config.py') + if not cache_node: + return True + + # 3.b check if the configuration changed + if os.stat(cache_node.abspath()).st_mtime > dbstat: + Logs.debug('rev_use: must post %r because the configuration has changed', self.name) + return True + + # 3.c any tstamp data? + try: + f_deps = self.bld.f_deps + except AttributeError: + Logs.debug('rev_use: must post %r because there is no f_deps', self.name) + return True + + # 4. check if this is the first build (no cache) + try: + lst = f_deps[(self.path.abspath(), self.idx)] + except KeyError: + Logs.debug('rev_use: must post %r because there it has no cached data', self.name) + return True + + try: + cache = self.bld.cache_tstamp_rev_use + except AttributeError: + cache = self.bld.cache_tstamp_rev_use = {} + + # 5. check the timestamp of each dependency files listed is unchanged + f_tstamps = self.bld.f_tstamps + for x in lst: + try: + old_ts = f_tstamps[x] + except KeyError: + Logs.debug('rev_use: must post %r because %r is not in cache', self.name, x) + return True + + try: + try: + ts = cache[x] + except KeyError: + ts = cache[x] = os.stat(x).st_mtime + except OSError: + del f_deps[(self.path.abspath(), self.idx)] + Logs.debug('rev_use: must post %r because %r does not exist anymore', self.name, x) + return True + else: + if ts != old_ts: + Logs.debug('rev_use: must post %r because the timestamp on %r changed %r %r', self.name, x, old_ts, ts) + return True + + self.staleness = DONE + return False + +@taskgen_method +def create_compiled_task(self, name, node): + # skip the creation of object files + # assumption: object-only targets are not skippable + if self.staleness == NEEDED: + # only libraries/programs can skip object files + for x in SKIPPABLE: + if x in self.features: + return None + + out = '%s.%d.o' % (node.name, self.idx) + task = self.create_task(name, node, node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks = [task] + return task + +@feature(*SKIPPABLE) +@after_method('apply_link') +def apply_link_after(self): + # cprogram/cxxprogram might be unnecessary + if self.staleness != NEEDED: + return + for tsk in self.tasks: + tsk.hasrun = Task.SKIPPED + +def path_from(self, node): + # handle nodes of distinct types + if node.ctx is not self.ctx: + node = self.ctx.root.make_node(node.abspath()) + return self.default_path_from(node) +waflib.Node.Node.default_path_from = waflib.Node.Node.path_from +waflib.Node.Node.path_from = path_from + +def h_file(self): + # similar to md5_tstamp.py, but with 2-layer cache + # global_cache for the build context common for all task generators + # local_cache for the build context proxy (one by task generator) + # + # the global cache is not persistent + # the local cache is persistent and meant for partial builds + # + # assume all calls are made from a single thread + # + filename = self.abspath() + st = os.stat(filename) + + global_cache = self.ctx.bld.hashes_md5_tstamp + local_cache = self.ctx.hashes_md5_tstamp + + if filename in global_cache: + # value already calculated in this build + cval = global_cache[filename] + + # the value in global cache is assumed to be calculated once + # reverifying it could cause task generators + # to get distinct tstamp values, thus missing rebuilds + local_cache[filename] = cval + return cval[1] + + if filename in local_cache: + cval = local_cache[filename] + if cval[0] == st.st_mtime: + # correct value from a previous build + # put it in the global cache + global_cache[filename] = cval + return cval[1] + + ret = Utils.h_file(filename) + local_cache[filename] = global_cache[filename] = (st.st_mtime, ret) + return ret +waflib.Node.Node.h_file = h_file + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_bgxlf.py lilv-0.24.6/waflib/extras/fc_bgxlf.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_bgxlf.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_bgxlf.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +from waflib.Tools import fc, fc_config, fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].insert(0, 'fc_bgxlf') + +@conf +def find_bgxlf(conf): + fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC') + conf.get_xlf_version(fc) + conf.env.FC_NAME = 'BGXLF' + +@conf +def bg_flags(self): + self.env.SONAME_ST = '' + self.env.FCSHLIB_MARKER = '' + self.env.FCSTLIB_MARKER = '' + self.env.FCFLAGS_fcshlib = ['-fPIC'] + self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull'] + +def configure(conf): + conf.find_bgxlf() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.xlf_flags() + conf.bg_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_cray.py lilv-0.24.6/waflib/extras/fc_cray.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_cray.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_cray.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,51 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +import re +from waflib.Tools import fc, fc_config, fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].append('fc_cray') + +@conf +def find_crayftn(conf): + """Find the Cray fortran compiler (will look in the environment variable 'FC')""" + fc = conf.find_program(['crayftn'], var='FC') + conf.get_crayftn_version(fc) + conf.env.FC_NAME = 'CRAY' + conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod' + +@conf +def crayftn_flags(conf): + v = conf.env + v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directory + v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings + v['FCFLAGS_fcshlib'] = ['-h pic'] + v['LINKFLAGS_fcshlib'] = ['-h shared'] + + v['FCSTLIB_MARKER'] = '-h static' + v['FCSHLIB_MARKER'] = '-h dynamic' + +@conf +def get_crayftn_version(conf, fc): + version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P\d*)\.(?P\d*)", re.I).search + cmd = fc + ['-V'] + out,err = fc_config.getoutput(conf, cmd, stdin=False) + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + conf.fatal('Could not determine the Cray Fortran compiler version.') + k = match.groupdict() + conf.env['FC_VERSION'] = (k['major'], k['minor']) + +def configure(conf): + conf.find_crayftn() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.crayftn_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_nag.py lilv-0.24.6/waflib/extras/fc_nag.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_nag.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_nag.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,61 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].insert(0, 'fc_nag') + +@conf +def find_nag(conf): + """Find the NAG Fortran Compiler (will look in the environment variable 'FC')""" + + fc = conf.find_program(['nagfor'], var='FC') + conf.get_nag_version(fc) + conf.env.FC_NAME = 'NAG' + conf.env.FC_MOD_CAPITALIZATION = 'lower' + +@conf +def nag_flags(conf): + v = conf.env + v.FCFLAGS_DEBUG = ['-C=all'] + v.FCLNK_TGT_F = ['-o', ''] + v.FC_TGT_F = ['-c', '-o', ''] + +@conf +def nag_modifier_platform(conf): + dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None) + if nag_modifier_func: + nag_modifier_func() + +@conf +def get_nag_version(conf, fc): + """Get the NAG compiler version""" + + version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P\d*)\.(?P\d*)", re.M).search + cmd = fc + ['-V'] + + out, err = fc_config.getoutput(conf,cmd,stdin=False) + if out: + match = version_re(out) + if not match: + match = version_re(err) + else: match = version_re(err) + if not match: + conf.fatal('Could not determine the NAG version.') + k = match.groupdict() + conf.env['FC_VERSION'] = (k['major'], k['minor']) + +def configure(conf): + conf.find_nag() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.nag_flags() + conf.nag_modifier_platform() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_nec.py lilv-0.24.6/waflib/extras/fc_nec.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_nec.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_nec.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,60 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +import re +from waflib.Tools import fc, fc_config, fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].append('fc_nec') + +@conf +def find_sxfc(conf): + """Find the NEC fortran compiler (will look in the environment variable 'FC')""" + fc = conf.find_program(['sxf90','sxf03'], var='FC') + conf.get_sxfc_version(fc) + conf.env.FC_NAME = 'NEC' + conf.env.FC_MOD_CAPITALIZATION = 'lower' + +@conf +def sxfc_flags(conf): + v = conf.env + v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directory + v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings + v['FCFLAGS_fcshlib'] = [] + v['LINKFLAGS_fcshlib'] = [] + + v['FCSTLIB_MARKER'] = '' + v['FCSHLIB_MARKER'] = '' + +@conf +def get_sxfc_version(conf, fc): + version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P\d*)\.(?P\d*)", re.I).search + cmd = fc + ['-V'] + out,err = fc_config.getoutput(conf, cmd, stdin=False) + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P\S*)\s*\(c\)\s*(?P\d*)",re.I).search + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + conf.fatal('Could not determine the NEC Fortran compiler version.') + k = match.groupdict() + conf.env['FC_VERSION'] = (k['major'], k['minor']) + +def configure(conf): + conf.find_sxfc() + conf.find_program('sxar',var='AR') + conf.add_os_flags('ARFLAGS') + if not conf.env.ARFLAGS: + conf.env.ARFLAGS=['rcs'] + + conf.fc_flags() + conf.fc_add_flags() + conf.sxfc_flags() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_nfort.py lilv-0.24.6/waflib/extras/fc_nfort.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_nfort.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_nfort.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,52 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Detection of the NEC Fortran compiler for Aurora Tsubasa + +import re +from waflib.Tools import fc,fc_config,fc_scan +from waflib.Configure import conf +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].append('fc_nfort') + +@conf +def find_nfort(conf): + fc=conf.find_program(['nfort'],var='FC') + conf.get_nfort_version(fc) + conf.env.FC_NAME='NFORT' + conf.env.FC_MOD_CAPITALIZATION='lower' + +@conf +def nfort_flags(conf): + v=conf.env + v['_FCMODOUTFLAGS']=[] + v['FCFLAGS_DEBUG']=[] + v['FCFLAGS_fcshlib']=[] + v['LINKFLAGS_fcshlib']=[] + v['FCSTLIB_MARKER']='' + v['FCSHLIB_MARKER']='' + +@conf +def get_nfort_version(conf,fc): + version_re=re.compile(r"nfort\s*\(NFORT\)\s*(?P\d+)\.(?P\d+)\.",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + return(False) + conf.fatal('Could not determine the NEC NFORT Fortran compiler version.') + else: + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) + +def configure(conf): + conf.find_nfort() + conf.find_program('nar',var='AR') + conf.add_os_flags('ARFLAGS') + if not conf.env.ARFLAGS: + conf.env.ARFLAGS=['rcs'] + conf.fc_flags() + conf.fc_add_flags() + conf.nfort_flags() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_open64.py lilv-0.24.6/waflib/extras/fc_open64.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_open64.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_open64.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,58 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].insert(0, 'fc_open64') + +@conf +def find_openf95(conf): + """Find the Open64 Fortran Compiler (will look in the environment variable 'FC')""" + + fc = conf.find_program(['openf95', 'openf90'], var='FC') + conf.get_open64_version(fc) + conf.env.FC_NAME = 'OPEN64' + conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod' + +@conf +def openf95_flags(conf): + v = conf.env + v['FCFLAGS_DEBUG'] = ['-fullwarn'] + +@conf +def openf95_modifier_platform(conf): + dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None) + if openf95_modifier_func: + openf95_modifier_func() + +@conf +def get_open64_version(conf, fc): + """Get the Open64 compiler version""" + + version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P\d*)\.(?P\d*)", re.I).search + cmd = fc + ['-version'] + + out, err = fc_config.getoutput(conf,cmd,stdin=False) + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + conf.fatal('Could not determine the Open64 version.') + k = match.groupdict() + conf.env['FC_VERSION'] = (k['major'], k['minor']) + +def configure(conf): + conf.find_openf95() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.openf95_flags() + conf.openf95_modifier_platform() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_pgfortran.py lilv-0.24.6/waflib/extras/fc_pgfortran.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_pgfortran.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_pgfortran.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +import re +from waflib.Tools import fc, fc_config, fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].append('fc_pgfortran') + +@conf +def find_pgfortran(conf): + """Find the PGI fortran compiler (will look in the environment variable 'FC')""" + fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC') + conf.get_pgfortran_version(fc) + conf.env.FC_NAME = 'PGFC' + +@conf +def pgfortran_flags(conf): + v = conf.env + v['FCFLAGS_fcshlib'] = ['-shared'] + v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not + v['FCSTLIB_MARKER'] = '-Bstatic' + v['FCSHLIB_MARKER'] = '-Bdynamic' + v['SONAME_ST'] = '-soname %s' + +@conf +def get_pgfortran_version(conf,fc): + version_re = re.compile(r"The Portland Group", re.I).search + cmd = fc + ['-V'] + out,err = fc_config.getoutput(conf, cmd, stdin=False) + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + conf.fatal('Could not verify PGI signature') + cmd = fc + ['-help=variable'] + out,err = fc_config.getoutput(conf, cmd, stdin=False) + if out.find('COMPVER')<0: + conf.fatal('Could not determine the compiler type') + k = {} + prevk = '' + out = out.splitlines() + for line in out: + lst = line.partition('=') + if lst[1] == '=': + key = lst[0].rstrip() + if key == '': + key = prevk + val = lst[2].rstrip() + k[key] = val + else: + prevk = line.partition(' ')[0] + def isD(var): + return var in k + def isT(var): + return var in k and k[var]!='0' + conf.env['FC_VERSION'] = (k['COMPVER'].split('.')) + +def configure(conf): + conf.find_pgfortran() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.pgfortran_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_solstudio.py lilv-0.24.6/waflib/extras/fc_solstudio.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_solstudio.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_solstudio.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,62 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].append('fc_solstudio') + +@conf +def find_solstudio(conf): + """Find the Solaris Studio compiler (will look in the environment variable 'FC')""" + + fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC') + conf.get_solstudio_version(fc) + conf.env.FC_NAME = 'SOL' + +@conf +def solstudio_flags(conf): + v = conf.env + v['FCFLAGS_fcshlib'] = ['-Kpic'] + v['FCFLAGS_DEBUG'] = ['-w3'] + v['LINKFLAGS_fcshlib'] = ['-G'] + v['FCSTLIB_MARKER'] = '-Bstatic' + v['FCSHLIB_MARKER'] = '-Bdynamic' + v['SONAME_ST'] = '-h %s' + +@conf +def solstudio_modifier_platform(conf): + dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None) + if solstudio_modifier_func: + solstudio_modifier_func() + +@conf +def get_solstudio_version(conf, fc): + """Get the compiler version""" + + version_re = re.compile(r"Sun Fortran 95 *(?P\d*)\.(?P\d*)", re.I).search + cmd = fc + ['-V'] + + out, err = fc_config.getoutput(conf,cmd,stdin=False) + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + conf.fatal('Could not determine the Sun Studio Fortran version.') + k = match.groupdict() + conf.env['FC_VERSION'] = (k['major'], k['minor']) + +def configure(conf): + conf.find_solstudio() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.solstudio_flags() + conf.solstudio_modifier_platform() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fc_xlf.py lilv-0.24.6/waflib/extras/fc_xlf.py --- lilv-0.24.4~dfsg0/waflib/extras/fc_xlf.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fc_xlf.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,63 @@ +#! /usr/bin/env python +# encoding: utf-8 +# harald at klimachs.de + +import re +from waflib import Utils,Errors +from waflib.Tools import fc,fc_config,fc_scan +from waflib.Configure import conf + +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['aix'].insert(0, 'fc_xlf') + +@conf +def find_xlf(conf): + """Find the xlf program (will look in the environment variable 'FC')""" + + fc = conf.find_program(['xlf2003_r', 'xlf2003', 'xlf95_r', 'xlf95', 'xlf90_r', 'xlf90', 'xlf_r', 'xlf'], var='FC') + conf.get_xlf_version(fc) + conf.env.FC_NAME='XLF' + +@conf +def xlf_flags(conf): + v = conf.env + v['FCDEFINES_ST'] = '-WF,-D%s' + v['FCFLAGS_fcshlib'] = ['-qpic=small'] + v['FCFLAGS_DEBUG'] = ['-qhalt=w'] + v['LINKFLAGS_fcshlib'] = ['-Wl,-shared'] + +@conf +def xlf_modifier_platform(conf): + dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + xlf_modifier_func = getattr(conf, 'xlf_modifier_' + dest_os, None) + if xlf_modifier_func: + xlf_modifier_func() + +@conf +def get_xlf_version(conf, fc): + """Get the compiler version""" + + cmd = fc + ['-qversion'] + try: + out, err = conf.cmd_and_log(cmd, output=0) + except Errors.WafError: + conf.fatal('Could not find xlf %r' % cmd) + + for v in (r"IBM XL Fortran.* V(?P\d*)\.(?P\d*)",): + version_re = re.compile(v, re.I).search + match = version_re(out or err) + if match: + k = match.groupdict() + conf.env['FC_VERSION'] = (k['major'], k['minor']) + break + else: + conf.fatal('Could not determine the XLF version.') + +def configure(conf): + conf.find_xlf() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.xlf_flags() + conf.xlf_modifier_platform() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/file_to_object.py lilv-0.24.6/waflib/extras/file_to_object.py --- lilv-0.24.4~dfsg0/waflib/extras/file_to_object.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/file_to_object.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,137 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Tool to embed file into objects + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2014" + +""" + +This tool allows to embed file contents in object files (.o). +It is not exactly portable, and the file contents are reachable +using various non-portable fashions. +The goal here is to provide a functional interface to the embedding +of file data in objects. +See the ``playground/embedded_resources`` example for an example. + +Usage:: + + bld( + name='pipeline', + # ^ Reference this in use="..." for things using the generated code + features='file_to_object', + source='some.file', + # ^ Name of the file to embed in binary section. + ) + +Known issues: + +- Destination is named like source, with extension renamed to .o + eg. some.file -> some.o + +""" + +import os +from waflib import Task, TaskGen, Errors + +def filename_c_escape(x): + return x.replace("\\", "\\\\") + +class file_to_object_s(Task.Task): + color = 'CYAN' + vars = ['DEST_CPU', 'DEST_BINFMT'] + + def run(self): + name = [] + for i, x in enumerate(self.inputs[0].name): + if x.isalnum(): + name.append(x) + else: + name.append('_') + file = self.inputs[0].abspath() + size = os.path.getsize(file) + if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'): + unit = 'quad' + align = 8 + elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'): + unit = 'long' + align = 4 + else: + raise Errors.WafError("Unsupported DEST_CPU, please report bug!") + + file = filename_c_escape(file) + name = "_binary_" + "".join(name) + rodata = ".section .rodata" + if self.env.DEST_BINFMT == "mac-o": + name = "_" + name + rodata = ".section __TEXT,__const" + + with open(self.outputs[0].abspath(), 'w') as f: + f.write(\ +""" + .global %(name)s_start + .global %(name)s_end + .global %(name)s_size + %(rodata)s +%(name)s_start: + .incbin "%(file)s" +%(name)s_end: + .align %(align)d +%(name)s_size: + .%(unit)s 0x%(size)x +""" % locals()) + +class file_to_object_c(Task.Task): + color = 'CYAN' + def run(self): + name = [] + for i, x in enumerate(self.inputs[0].name): + if x.isalnum(): + name.append(x) + else: + name.append('_') + file = self.inputs[0].abspath() + size = os.path.getsize(file) + + name = "_binary_" + "".join(name) + + data = self.inputs[0].read('rb') + lines, line = [], [] + for idx_byte, byte in enumerate(data): + line.append(byte) + if len(line) > 15 or idx_byte == size-1: + lines.append(", ".join(("0x%02x" % ord(x)) for x in line)) + line = [] + data = ",\n ".join(lines) + + self.outputs[0].write(\ +""" +unsigned long %(name)s_size = %(size)dL; +char const %(name)s_start[] = { + %(data)s +}; +char const %(name)s_end[] = {}; +""" % locals()) + +@TaskGen.feature('file_to_object') +@TaskGen.before_method('process_source') +def tg_file_to_object(self): + bld = self.bld + sources = self.to_nodes(self.source) + targets = [] + for src in sources: + if bld.env.F2O_METHOD == ["asm"]: + tgt = src.parent.find_or_declare(src.name + '.f2o.s') + tsk = self.create_task('file_to_object_s', src, tgt) + tsk.cwd = src.parent.abspath() # verify + else: + tgt = src.parent.find_or_declare(src.name + '.f2o.c') + tsk = self.create_task('file_to_object_c', src, tgt) + tsk.cwd = src.parent.abspath() # verify + targets.append(tgt) + self.source = targets + +def configure(conf): + conf.load('gas') + conf.env.F2O_METHOD = ["c"] + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fluid.py lilv-0.24.6/waflib/extras/fluid.py --- lilv-0.24.4~dfsg0/waflib/extras/fluid.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fluid.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,30 @@ +#!/usr/bin/python +# encoding: utf-8 +# Grygoriy Fuchedzhy 2009 + +""" +Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjunction with the 'cxx' feature. +""" + +from waflib import Task +from waflib.TaskGen import extension + +class fluid(Task.Task): + color = 'BLUE' + ext_out = ['.h'] + run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}' + +@extension('.fl') +def process_fluid(self, node): + """add the .fl to the source list; the cxx file generated will be compiled when possible""" + cpp = node.change_ext('.cpp') + hpp = node.change_ext('.hpp') + self.create_task('fluid', node, [cpp, hpp]) + + if 'cxx' in self.features: + self.source.append(cpp) + +def configure(conf): + conf.find_program('fluid', var='FLUID') + conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/freeimage.py lilv-0.24.6/waflib/extras/freeimage.py --- lilv-0.24.4~dfsg0/waflib/extras/freeimage.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/freeimage.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,74 @@ +#!/usr/bin/env python +# encoding: utf-8 +# +# written by Sylvain Rouquette, 2011 + +''' +To add the freeimage tool to the waf file: +$ ./waf-light --tools=compat15,freeimage + or, if you have waf >= 1.6.2 +$ ./waf update --files=freeimage + +The wscript will look like: + +def options(opt): + opt.load('compiler_cxx freeimage') + +def configure(conf): + conf.load('compiler_cxx freeimage') + + # you can call check_freeimage with some parameters. + # It's optional on Linux, it's 'mandatory' on Windows if + # you didn't use --fi-path on the command-line + + # conf.check_freeimage(path='FreeImage/Dist', fip=True) + +def build(bld): + bld(source='main.cpp', target='app', use='FREEIMAGE') +''' + +from waflib import Utils +from waflib.Configure import conf + + +def options(opt): + opt.add_option('--fi-path', type='string', default='', dest='fi_path', + help='''path to the FreeImage directory \ + where the files are e.g. /FreeImage/Dist''') + opt.add_option('--fip', action='store_true', default=False, dest='fip', + help='link with FreeImagePlus') + opt.add_option('--fi-static', action='store_true', + default=False, dest='fi_static', + help="link as shared libraries") + + +@conf +def check_freeimage(self, path=None, fip=False): + self.start_msg('Checking FreeImage') + if not self.env['CXX']: + self.fatal('you must load compiler_cxx before loading freeimage') + prefix = self.options.fi_static and 'ST' or '' + platform = Utils.unversioned_sys_platform() + if platform == 'win32': + if not path: + self.fatal('you must specify the path to FreeImage. \ + use --fi-path=/FreeImage/Dist') + else: + self.env['INCLUDES_FREEIMAGE'] = path + self.env['%sLIBPATH_FREEIMAGE' % prefix] = path + libs = ['FreeImage'] + if self.options.fip: + libs.append('FreeImagePlus') + if platform == 'win32': + self.env['%sLIB_FREEIMAGE' % prefix] = libs + else: + self.env['%sLIB_FREEIMAGE' % prefix] = [i.lower() for i in libs] + self.end_msg('ok') + + +def configure(conf): + platform = Utils.unversioned_sys_platform() + if platform == 'win32' and not conf.options.fi_path: + return + conf.check_freeimage(conf.options.fi_path, conf.options.fip) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fsb.py lilv-0.24.6/waflib/extras/fsb.py --- lilv-0.24.4~dfsg0/waflib/extras/fsb.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fsb.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,31 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2011 (ita) + +""" +Fully sequential builds + +The previous tasks from task generators are re-processed, and this may lead to speed issues +Yet, if you are using this, speed is probably a minor concern +""" + +from waflib import Build + +def options(opt): + pass + +def configure(conf): + pass + +class FSBContext(Build.BuildContext): + def __call__(self, *k, **kw): + ret = Build.BuildContext.__call__(self, *k, **kw) + + # evaluate the results immediately + Build.BuildContext.compile(self) + + return ret + + def compile(self): + pass + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/fsc.py lilv-0.24.6/waflib/extras/fsc.py --- lilv-0.24.4~dfsg0/waflib/extras/fsc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/fsc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2011 (ita) + +""" +Experimental F# stuff + +FSC="mono /path/to/fsc.exe" waf configure build +""" + +from waflib import Utils, Task +from waflib.TaskGen import before_method, after_method, feature +from waflib.Tools import ccroot, cs + +ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES']) + +@feature('fs') +@before_method('process_source') +def apply_fsc(self): + cs_nodes = [] + no_nodes = [] + for x in self.to_nodes(self.source): + if x.name.endswith('.fs'): + cs_nodes.append(x) + else: + no_nodes.append(x) + self.source = no_nodes + + bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe') + self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen)) + tsk.env.CSTYPE = '/target:%s' % bintype + tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath() + + inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}') + if inst_to: + # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically + mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644) + self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod) + +feature('fs')(cs.use_cs) +after_method('apply_fsc')(cs.use_cs) + +feature('fs')(cs.debug_cs) +after_method('apply_fsc', 'use_cs')(cs.debug_cs) + +class fsc(Task.Task): + """ + Compile F# files + """ + color = 'YELLOW' + run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' + +def configure(conf): + """ + Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc) + """ + conf.find_program(['fsc.exe', 'fsharpc'], var='FSC') + conf.env.ASS_ST = '/r:%s' + conf.env.RES_ST = '/resource:%s' + + conf.env.FS_NAME = 'fsc' + if str(conf.env.FSC).lower().find('fsharpc') > -1: + conf.env.FS_NAME = 'mono' + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/gccdeps.py lilv-0.24.6/waflib/extras/gccdeps.py --- lilv-0.24.4~dfsg0/waflib/extras/gccdeps.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/gccdeps.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,214 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2008-2010 (ita) + +""" +Execute the tasks with gcc -MD, read the dependencies from the .d file +and prepare the dependency calculation for the next run. +This affects the cxx class, so make sure to load Qt5 after this tool. + +Usage:: + + def options(opt): + opt.load('compiler_cxx') + def configure(conf): + conf.load('compiler_cxx gccdeps') +""" + +import os, re, threading +from waflib import Task, Logs, Utils, Errors +from waflib.Tools import c_preproc +from waflib.TaskGen import before_method, feature + +lock = threading.Lock() + +gccdeps_flags = ['-MD'] +if not c_preproc.go_absolute: + gccdeps_flags = ['-MMD'] + +# Third-party tools are allowed to add extra names in here with append() +supported_compilers = ['gcc', 'icc', 'clang'] + +def scan(self): + if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: + return super(self.derived_gccdeps, self).scan() + nodes = self.generator.bld.node_deps.get(self.uid(), []) + names = [] + return (nodes, names) + +re_o = re.compile(r"\.o$") +re_splitter = re.compile(r'(?= 0: + return line[sep_idx + 2:] + else: + return line + +def path_to_node(base_node, path, cached_nodes): + # Take the base node and the path and return a node + # Results are cached because searching the node tree is expensive + # The following code is executed by threads, it is not safe, so a lock is needed... + if getattr(path, '__hash__'): + node_lookup_key = (base_node, path) + else: + # Not hashable, assume it is a list and join into a string + node_lookup_key = (base_node, os.path.sep.join(path)) + try: + lock.acquire() + node = cached_nodes[node_lookup_key] + except KeyError: + node = base_node.find_resource(path) + cached_nodes[node_lookup_key] = node + finally: + lock.release() + return node + +def post_run(self): + if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: + return super(self.derived_gccdeps, self).post_run() + + name = self.outputs[0].abspath() + name = re_o.sub('.d', name) + try: + txt = Utils.readf(name) + except EnvironmentError: + Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?') + raise + #os.remove(name) + + # Compilers have the choice to either output the file's dependencies + # as one large Makefile rule: + # + # /path/to/file.o: /path/to/dep1.h \ + # /path/to/dep2.h \ + # /path/to/dep3.h \ + # ... + # + # or as many individual rules: + # + # /path/to/file.o: /path/to/dep1.h + # /path/to/file.o: /path/to/dep2.h + # /path/to/file.o: /path/to/dep3.h + # ... + # + # So the first step is to sanitize the input by stripping out the left- + # hand side of all these lines. After that, whatever remains are the + # implicit dependencies of task.outputs[0] + txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()]) + + # Now join all the lines together + txt = txt.replace('\\\n', '') + + val = txt.strip() + val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] + + nodes = [] + bld = self.generator.bld + + # Dynamically bind to the cache + try: + cached_nodes = bld.cached_nodes + except AttributeError: + cached_nodes = bld.cached_nodes = {} + + for x in val: + + node = None + if os.path.isabs(x): + node = path_to_node(bld.root, x, cached_nodes) + else: + # TODO waf 1.9 - single cwd value + path = getattr(bld, 'cwdx', bld.bldnode) + # when calling find_resource, make sure the path does not contain '..' + x = [k for k in Utils.split_path(x) if k and k != '.'] + while '..' in x: + idx = x.index('..') + if idx == 0: + x = x[1:] + path = path.parent + else: + del x[idx] + del x[idx-1] + + node = path_to_node(path, x, cached_nodes) + + if not node: + raise ValueError('could not find %r for %r' % (x, self)) + if id(node) == id(self.inputs[0]): + # ignore the source file, it is already in the dependencies + # this way, successful config tests may be retrieved from the cache + continue + nodes.append(node) + + Logs.debug('deps: gccdeps for %s returned %s', self, nodes) + + bld.node_deps[self.uid()] = nodes + bld.raw_deps[self.uid()] = [] + + try: + del self.cache_sig + except AttributeError: + pass + + Task.Task.post_run(self) + +def sig_implicit_deps(self): + if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: + return super(self.derived_gccdeps, self).sig_implicit_deps() + try: + return Task.Task.sig_implicit_deps(self) + except Errors.WafError: + return Utils.SIG_NIL + +def wrap_compiled_task(classname): + derived_class = type(classname, (Task.classes[classname],), {}) + derived_class.derived_gccdeps = derived_class + derived_class.post_run = post_run + derived_class.scan = scan + derived_class.sig_implicit_deps = sig_implicit_deps + +for k in ('c', 'cxx'): + if k in Task.classes: + wrap_compiled_task(k) + +@before_method('process_source') +@feature('force_gccdeps') +def force_gccdeps(self): + self.env.ENABLE_GCCDEPS = ['c', 'cxx'] + +def configure(conf): + # in case someone provides a --enable-gccdeps command-line option + if not getattr(conf.options, 'enable_gccdeps', True): + return + + global gccdeps_flags + flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags + if conf.env.CC_NAME in supported_compilers: + try: + conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags)) + except Errors.ConfigurationError: + pass + else: + conf.env.append_value('CFLAGS', flags) + conf.env.append_unique('ENABLE_GCCDEPS', 'c') + + if conf.env.CXX_NAME in supported_compilers: + try: + conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags)) + except Errors.ConfigurationError: + pass + else: + conf.env.append_value('CXXFLAGS', flags) + conf.env.append_unique('ENABLE_GCCDEPS', 'cxx') + +def options(opt): + raise ValueError('Do not load gccdeps options') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/gdbus.py lilv-0.24.6/waflib/extras/gdbus.py --- lilv-0.24.4~dfsg0/waflib/extras/gdbus.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/gdbus.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright Garmin International or its subsidiaries, 2018 +# +# Heavily based on dbus.py + +""" +Compiles dbus files with **gdbus-codegen** +Typical usage:: + def options(opt): + opt.load('compiler_c gdbus') + def configure(conf): + conf.load('compiler_c gdbus') + def build(bld): + tg = bld.program( + includes = '.', + source = bld.path.ant_glob('*.c'), + target = 'gnome-hello') + tg.add_gdbus_file('test.xml', 'com.example.example.', 'Example') +""" + +from waflib import Task, Errors, Utils +from waflib.TaskGen import taskgen_method, before_method + +@taskgen_method +def add_gdbus_file(self, filename, prefix, namespace, export=False): + """ + Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*. + :param filename: xml file to compile + :type filename: string + :param prefix: interface prefix (--interface-prefix=prefix) + :type prefix: string + :param mode: C namespace (--c-namespace=namespace) + :type mode: string + :param export: Export Headers? + :type export: boolean + """ + if not hasattr(self, 'gdbus_lst'): + self.gdbus_lst = [] + if not 'process_gdbus' in self.meths: + self.meths.append('process_gdbus') + self.gdbus_lst.append([filename, prefix, namespace, export]) + +@before_method('process_source') +def process_gdbus(self): + """ + Processes the dbus files stored in the attribute *gdbus_lst* to create :py:class:`gdbus_binding_tool` instances. + """ + output_node = self.path.get_bld().make_node(['gdbus', self.get_name()]) + sources = [] + + for filename, prefix, namespace, export in getattr(self, 'gdbus_lst', []): + node = self.path.find_resource(filename) + if not node: + raise Errors.WafError('file not found ' + filename) + c_file = output_node.find_or_declare(node.change_ext('.c').name) + h_file = output_node.find_or_declare(node.change_ext('.h').name) + tsk = self.create_task('gdbus_binding_tool', node, [c_file, h_file]) + tsk.cwd = output_node.abspath() + + tsk.env.GDBUS_CODEGEN_INTERFACE_PREFIX = prefix + tsk.env.GDBUS_CODEGEN_NAMESPACE = namespace + tsk.env.GDBUS_CODEGEN_OUTPUT = node.change_ext('').name + sources.append(c_file) + + if sources: + output_node.mkdir() + self.source = Utils.to_list(self.source) + sources + self.includes = [output_node] + self.to_incnodes(getattr(self, 'includes', [])) + if export: + self.export_includes = [output_node] + self.to_incnodes(getattr(self, 'export_includes', [])) + +class gdbus_binding_tool(Task.Task): + """ + Compiles a dbus file + """ + color = 'BLUE' + ext_out = ['.h', '.c'] + run_str = '${GDBUS_CODEGEN} --interface-prefix ${GDBUS_CODEGEN_INTERFACE_PREFIX} --generate-c-code ${GDBUS_CODEGEN_OUTPUT} --c-namespace ${GDBUS_CODEGEN_NAMESPACE} --c-generate-object-manager ${SRC[0].abspath()}' + shell = True + +def configure(conf): + """ + Detects the program gdbus-codegen and sets ``conf.env.GDBUS_CODEGEN`` + """ + conf.find_program('gdbus-codegen', var='GDBUS_CODEGEN') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/genpybind.py lilv-0.24.6/waflib/extras/genpybind.py --- lilv-0.24.4~dfsg0/waflib/extras/genpybind.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/genpybind.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,194 @@ +import os +import pipes +import subprocess +import sys + +from waflib import Logs, Task, Context +from waflib.Tools.c_preproc import scan as scan_impl +# ^-- Note: waflib.extras.gccdeps.scan does not work for us, +# due to its current implementation: +# The -MD flag is injected into the {C,CXX}FLAGS environment variable and +# dependencies are read out in a separate step after compiling by reading +# the .d file saved alongside the object file. +# As the genpybind task refers to a header file that is never compiled itself, +# gccdeps will not be able to extract the list of dependencies. + +from waflib.TaskGen import feature, before_method + + +def join_args(args): + return " ".join(pipes.quote(arg) for arg in args) + + +def configure(cfg): + cfg.load("compiler_cxx") + cfg.load("python") + cfg.check_python_version(minver=(2, 7)) + if not cfg.env.LLVM_CONFIG: + cfg.find_program("llvm-config", var="LLVM_CONFIG") + if not cfg.env.GENPYBIND: + cfg.find_program("genpybind", var="GENPYBIND") + + # find clang reasource dir for builtin headers + cfg.env.GENPYBIND_RESOURCE_DIR = os.path.join( + cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--libdir"]).strip(), + "clang", + cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--version"]).strip()) + if os.path.exists(cfg.env.GENPYBIND_RESOURCE_DIR): + cfg.msg("Checking clang resource dir", cfg.env.GENPYBIND_RESOURCE_DIR) + else: + cfg.fatal("Clang resource dir not found") + + +@feature("genpybind") +@before_method("process_source") +def generate_genpybind_source(self): + """ + Run genpybind on the headers provided in `source` and compile/link the + generated code instead. This works by generating the code on the fly and + swapping the source node before `process_source` is run. + """ + # name of module defaults to name of target + module = getattr(self, "module", self.target) + + # create temporary source file in build directory to hold generated code + out = "genpybind-%s.%d.cpp" % (module, self.idx) + out = self.path.get_bld().find_or_declare(out) + + task = self.create_task("genpybind", self.to_nodes(self.source), out) + # used to detect whether CFLAGS or CXXFLAGS should be passed to genpybind + task.features = self.features + task.module = module + # can be used to select definitions to include in the current module + # (when header files are shared by more than one module) + task.genpybind_tags = self.to_list(getattr(self, "genpybind_tags", [])) + # additional include directories + task.includes = self.to_list(getattr(self, "includes", [])) + task.genpybind = self.env.GENPYBIND + + # Tell waf to compile/link the generated code instead of the headers + # originally passed-in via the `source` parameter. (see `process_source`) + self.source = [out] + + +class genpybind(Task.Task): # pylint: disable=invalid-name + """ + Runs genpybind on headers provided as input to this task. + Generated code will be written to the first (and only) output node. + """ + quiet = True + color = "PINK" + scan = scan_impl + + @staticmethod + def keyword(): + return "Analyzing" + + def run(self): + if not self.inputs: + return + + args = self.find_genpybind() + self._arguments( + resource_dir=self.env.GENPYBIND_RESOURCE_DIR) + + output = self.run_genpybind(args) + + # For debugging / log output + pasteable_command = join_args(args) + + # write generated code to file in build directory + # (will be compiled during process_source stage) + (output_node,) = self.outputs + output_node.write("// {}\n{}\n".format( + pasteable_command.replace("\n", "\n// "), output)) + + def find_genpybind(self): + return self.genpybind + + def run_genpybind(self, args): + bld = self.generator.bld + + kwargs = dict(cwd=bld.variant_dir) + if hasattr(bld, "log_command"): + bld.log_command(args, kwargs) + else: + Logs.debug("runner: {!r}".format(args)) + proc = subprocess.Popen( + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) + stdout, stderr = proc.communicate() + + if not isinstance(stdout, str): + stdout = stdout.decode(sys.stdout.encoding, errors="replace") + if not isinstance(stderr, str): + stderr = stderr.decode(sys.stderr.encoding, errors="replace") + + if proc.returncode != 0: + bld.fatal( + "genpybind returned {code} during the following call:" + "\n{command}\n\n{stdout}\n\n{stderr}".format( + code=proc.returncode, + command=join_args(args), + stdout=stdout, + stderr=stderr, + )) + + if stderr.strip(): + Logs.debug("non-fatal warnings during genpybind run:\n{}".format(stderr)) + + return stdout + + def _include_paths(self): + return self.generator.to_incnodes(self.includes + self.env.INCLUDES) + + def _inputs_as_relative_includes(self): + include_paths = self._include_paths() + relative_includes = [] + for node in self.inputs: + for inc in include_paths: + if node.is_child_of(inc): + relative_includes.append(node.path_from(inc)) + break + else: + self.generator.bld.fatal("could not resolve {}".format(node)) + return relative_includes + + def _arguments(self, genpybind_parse=None, resource_dir=None): + args = [] + relative_includes = self._inputs_as_relative_includes() + is_cxx = "cxx" in self.features + + # options for genpybind + args.extend(["--genpybind-module", self.module]) + if self.genpybind_tags: + args.extend(["--genpybind-tag"] + self.genpybind_tags) + if relative_includes: + args.extend(["--genpybind-include"] + relative_includes) + if genpybind_parse: + args.extend(["--genpybind-parse", genpybind_parse]) + + args.append("--") + + # headers to be processed by genpybind + args.extend(node.abspath() for node in self.inputs) + + args.append("--") + + # options for clang/genpybind-parse + args.append("-D__GENPYBIND__") + args.append("-xc++" if is_cxx else "-xc") + has_std_argument = False + for flag in self.env["CXXFLAGS" if is_cxx else "CFLAGS"]: + flag = flag.replace("-std=gnu", "-std=c") + if flag.startswith("-std=c"): + has_std_argument = True + args.append(flag) + if not has_std_argument: + args.append("-std=c++14") + args.extend("-I{}".format(n.abspath()) for n in self._include_paths()) + args.extend("-D{}".format(p) for p in self.env.DEFINES) + + # point to clang resource dir, if specified + if resource_dir: + args.append("-resource-dir={}".format(resource_dir)) + + return args diff -Nru lilv-0.24.4~dfsg0/waflib/extras/gob2.py lilv-0.24.6/waflib/extras/gob2.py --- lilv-0.24.4~dfsg0/waflib/extras/gob2.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/gob2.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Ali Sabil, 2007 + +from waflib import TaskGen + +TaskGen.declare_chain( + name = 'gob2', + rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}', + ext_in = '.gob', + ext_out = '.c' +) + +def configure(conf): + conf.find_program('gob2', var='GOB2') + conf.env['GOB2FLAGS'] = '' + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/halide.py lilv-0.24.6/waflib/extras/halide.py --- lilv-0.24.4~dfsg0/waflib/extras/halide.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/halide.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,151 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Halide code generation tool + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2014" + +""" + +Tool to run `Halide `_ code generators. + +Usage:: + + bld( + name='pipeline', + # ^ Reference this in use="..." for things using the generated code + #target=['pipeline.o', 'pipeline.h'] + # ^ by default, name.{o,h} is added, but you can set the outputs here + features='halide', + halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug", + # ^ Environment passed to the generator, + # can be a dict, k/v list, or string. + args=[], + # ^ Command-line arguments to the generator (optional), + # eg. to give parameters to the scheduling + source='pipeline_gen', + # ^ Name of the source executable + ) + + +Known issues: + + +- Currently only supports Linux (no ".exe") + +- Doesn't rerun on input modification when input is part of a build + chain, and has been modified externally. + +""" + +import os +from waflib import Task, Utils, Options, TaskGen, Errors + +class run_halide_gen(Task.Task): + color = 'CYAN' + vars = ['HALIDE_ENV', 'HALIDE_ARGS'] + run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}" + def __str__(self): + stuff = "halide" + stuff += ("[%s]" % (",".join( + ('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items())))) + return Task.Task.__str__(self).replace(self.__class__.__name__, + stuff) + +@TaskGen.feature('halide') +@TaskGen.before_method('process_source') +def halide(self): + Utils.def_attrs(self, + args=[], + halide_env={}, + ) + + bld = self.bld + + env = self.halide_env + try: + if isinstance(env, str): + env = dict(x.split('=') for x in env.split()) + elif isinstance(env, list): + env = dict(x.split('=') for x in env) + assert isinstance(env, dict) + except Exception as e: + if not isinstance(e, ValueError) \ + and not isinstance(e, AssertionError): + raise + raise Errors.WafError( + "halide_env must be under the form" \ + " {'HL_x':'a', 'HL_y':'b'}" \ + " or ['HL_x=y', 'HL_y=b']" \ + " or 'HL_x=y HL_y=b'") + + src = self.to_nodes(self.source) + assert len(src) == 1, "Only one source expected" + src = src[0] + + args = Utils.to_list(self.args) + + def change_ext(src, ext): + # Return a node with a new extension, in an appropriate folder + name = src.name + xpos = src.name.rfind('.') + if xpos == -1: + xpos = len(src.name) + newname = name[:xpos] + ext + if src.is_child_of(bld.bldnode): + node = src.get_src().parent.find_or_declare(newname) + else: + node = bld.bldnode.find_or_declare(newname) + return node + + def to_nodes(self, lst, path=None): + tmp = [] + path = path or self.path + find = path.find_or_declare + + if isinstance(lst, self.path.__class__): + lst = [lst] + + for x in Utils.to_list(lst): + if isinstance(x, str): + node = find(x) + else: + node = x + tmp.append(node) + return tmp + + tgt = to_nodes(self, self.target) + if not tgt: + tgt = [change_ext(src, '.o'), change_ext(src, '.h')] + cwd = tgt[0].parent.abspath() + task = self.create_task('run_halide_gen', src, tgt, cwd=cwd) + task.env.append_unique('HALIDE_ARGS', args) + if task.env.env == []: + task.env.env = {} + task.env.env.update(env) + task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items())) + task.env.HALIDE_ARGS = args + + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks = [task] + self.source = [] + +def configure(conf): + if Options.options.halide_root is None: + conf.check_cfg(package='Halide', args='--cflags --libs') + else: + halide_root = Options.options.halide_root + conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ] + conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ] + conf.env.LIB_HALIDE = ["Halide"] + + # You might want to add this, while upstream doesn't fix it + #conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread'] + +def options(opt): + opt.add_option('--halide-root', + help="path to Halide include and lib files", + ) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/__init__.py lilv-0.24.6/waflib/extras/__init__.py --- lilv-0.24.4~dfsg0/waflib/extras/__init__.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/extras/__init__.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,4 +1,3 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - +# Thomas Nagy, 2005-2010 (ita) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/javatest.py lilv-0.24.6/waflib/extras/javatest.py --- lilv-0.24.4~dfsg0/waflib/extras/javatest.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/javatest.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,118 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin, 2017 (fedepell) + +""" +Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest` +task via the **javatest** feature. + +This gives the possibility to run unit test and have them integrated into the +standard waf unit test environment. It has been tested with TestNG and JUnit +but should be easily expandable to other frameworks given the flexibility of +ut_str provided by the standard waf unit test environment. + +Example usage: + +def options(opt): + opt.load('java waf_unit_test javatest') + +def configure(conf): + conf.load('java javatest') + +def build(bld): + + [ ... mainprog is built here ... ] + + bld(features = 'javac javatest', + srcdir = 'test/', + outdir = 'test', + sourcepath = ['test'], + classpath = [ 'src' ], + basedir = 'test', + use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/ + ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}', + jtest_source = bld.path.ant_glob('test/*.xml'), + ) + + +At command line the CLASSPATH where to find the testing environment and the +test runner (default TestNG) that will then be seen in the environment as +CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for +dependencies and ut_str generation. + +Example configure for TestNG: + waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG + or as default runner is TestNG: + waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar + +Example configure for JUnit: + waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore + +The runner class presence on the system is checked for at configuration stage. + +""" + +import os +from waflib import Task, TaskGen, Options + +@TaskGen.feature('javatest') +@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath') +def make_javatest(self): + """ + Creates a ``utest`` task with a populated environment for Java Unit test execution + + """ + tsk = self.create_task('utest') + tsk.set_run_after(self.javac_task) + + # Put test input files as waf_unit_test relies on that for some prints and log generation + # If jtest_source is there, this is specially useful for passing XML for TestNG + # that contain test specification, use that as inputs, otherwise test sources + if getattr(self, 'jtest_source', None): + tsk.inputs = self.to_nodes(self.jtest_source) + else: + if self.javac_task.srcdir[0].exists(): + tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False) + + if getattr(self, 'ut_str', None): + self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False)) + tsk.vars = lst + tsk.vars + + if getattr(self, 'ut_cwd', None): + if isinstance(self.ut_cwd, str): + # we want a Node instance + if os.path.isabs(self.ut_cwd): + self.ut_cwd = self.bld.root.make_node(self.ut_cwd) + else: + self.ut_cwd = self.path.make_node(self.ut_cwd) + else: + self.ut_cwd = self.bld.bldnode + + # Get parent CLASSPATH and add output dir of test, we run from wscript dir + # We have to change it from list to the standard java -cp format (: separated) + tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath() + + if not self.ut_cwd.exists(): + self.ut_cwd.mkdir() + + if not hasattr(self, 'ut_env'): + self.ut_env = dict(os.environ) + +def configure(ctx): + cp = ctx.env.CLASSPATH or '.' + if getattr(Options.options, 'jtpath', None): + ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':') + cp += ':' + getattr(Options.options, 'jtpath') + + if getattr(Options.options, 'jtrunner', None): + ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner') + + if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp): + ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER) + +def options(opt): + opt.add_option('--jtpath', action='store', default='', dest='jtpath', + help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH') + opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner', + help='Class to run javatest test [default: org.testng.TestNG]') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/kde4.py lilv-0.24.6/waflib/extras/kde4.py --- lilv-0.24.4~dfsg0/waflib/extras/kde4.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/kde4.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2010 (ita) + +""" +Support for the KDE4 libraries and msgfmt +""" + +import os, re +from waflib import Task, Utils +from waflib.TaskGen import feature + +@feature('msgfmt') +def apply_msgfmt(self): + """ + Process all languages to create .mo files and to install them:: + + def build(bld): + bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}') + """ + for lang in self.to_list(self.langs): + node = self.path.find_resource(lang+'.po') + task = self.create_task('msgfmt', node, node.change_ext('.mo')) + + langname = lang.split('/') + langname = langname[-1] + + inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}') + + self.add_install_as( + inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo', + inst_from = task.outputs[0], + chmod = getattr(self, 'chmod', Utils.O644)) + +class msgfmt(Task.Task): + """ + Transform .po files into .mo files + """ + color = 'BLUE' + run_str = '${MSGFMT} ${SRC} -o ${TGT}' + +def configure(self): + """ + Detect kde4-config and set various variables for the *use* system:: + + def options(opt): + opt.load('compiler_cxx kde4') + def configure(conf): + conf.load('compiler_cxx kde4') + def build(bld): + bld.program(source='main.c', target='app', use='KDECORE KIO KHTML') + """ + kdeconfig = self.find_program('kde4-config') + prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip() + fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix + try: + os.stat(fname) + except OSError: + fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix + try: + os.stat(fname) + except OSError: + self.fatal('could not open %s' % fname) + + try: + txt = Utils.readf(fname) + except EnvironmentError: + self.fatal('could not read %s' % fname) + + txt = txt.replace('\\\n', '\n') + fu = re.compile('#(.*)\n') + txt = fu.sub('', txt) + + setregexp = re.compile(r'([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + found = setregexp.findall(txt) + + for (_, key, val) in found: + #print key, val + self.env[key] = val + + # well well, i could just write an interpreter for cmake files + self.env['LIB_KDECORE']= ['kdecore'] + self.env['LIB_KDEUI'] = ['kdeui'] + self.env['LIB_KIO'] = ['kio'] + self.env['LIB_KHTML'] = ['khtml'] + self.env['LIB_KPARTS'] = ['kparts'] + + self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR] + self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']] + self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE']) + + self.find_program('msgfmt', var='MSGFMT') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/local_rpath.py lilv-0.24.6/waflib/extras/local_rpath.py --- lilv-0.24.4~dfsg0/waflib/extras/local_rpath.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/local_rpath.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,21 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2011 (ita) + +import copy +from waflib.TaskGen import after_method, feature + +@after_method('propagate_uselib_vars') +@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib') +def add_rpath_stuff(self): + all = copy.copy(self.to_list(getattr(self, 'use', []))) + while all: + name = all.pop() + try: + tg = self.bld.get_tgen_by_name(name) + except: + continue + if hasattr(tg, 'link_task'): + self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath()) + all.extend(self.to_list(getattr(tg, 'use', []))) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/lv2.py lilv-0.24.6/waflib/extras/lv2.py --- lilv-0.24.4~dfsg0/waflib/extras/lv2.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/lv2.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,75 @@ +import os +import sys + +from waflib import Logs +from waflib import Options + +def options(opt): + conf_opts = opt.get_option_group('Configuration options') + conf_opts.add_option('--lv2-user', action='store_true', default=False, dest='lv2_user', + help='install LV2 bundles to user location') + conf_opts.add_option('--lv2dir', type='string', + help='LV2 bundles [Default: LIBDIR/lv2]') + +def register_lv2_path(conf, path): + """Return the default LV2_PATH to use for this system""" + if 'LV2_PATH' not in conf.run_env and 'LV2_PATH' not in os.environ: + conf.run_env['LV2_PATH'] = [conf.env['LV2DIR']] + + conf.run_env.append_unique('LV2_PATH', path) + +def default_lv2_path(conf): + """Return the default LV2_PATH for the build target as a list""" + if conf.env.DEST_OS == 'darwin': + return ['~/Library/Audio/Plug-Ins/LV2', + '~/.lv2', + '/usr/local/lib/lv2', + '/usr/lib/lv2', + '/Library/Audio/Plug-Ins/LV2'] + elif conf.env.DEST_OS == 'haiku': + return ['~/.lv2', + '/boot/common/add-ons/lv2'] + elif conf.env.DEST_OS == 'win32': + return ['%APPDATA%\\\\LV2', + '%COMMONPROGRAMFILES%\\\\LV2'] + else: + libdirname = os.path.basename(conf.env.LIBDIR) + return ['~/.lv2', + '/usr/%s/lv2' % libdirname, + '/usr/local/%s/lv2' % libdirname] + +def configure(conf): + def env_path(parent_dir_var, name): + parent = os.getenv(parent_dir_var) + if parent: + return os.path.join(parent, name) + else: + Logs.warn('Environment variable %s unset, using LIBDIR\n' % parent_dir_var) + return os.path.join(conf.env['LIBDIR'], name) + + def normpath(path): + if sys.platform == 'win32': + return os.path.normpath(path).replace('\\', '/') + else: + return os.path.normpath(path) + + if Options.options.lv2dir: + conf.env['LV2DIR'] = Options.options.lv2dir + elif Options.options.lv2_user: + if conf.env.DEST_OS == 'darwin': + conf.env['LV2DIR'] = env_path('HOME', 'Library/Audio/Plug-Ins/LV2') + elif conf.env.DEST_OS == 'win32': + conf.env['LV2DIR'] = env_path('APPDATA', 'LV2') + else: + conf.env['LV2DIR'] = env_path('HOME', '.lv2') + else: + if conf.env.DEST_OS == 'darwin': + conf.env['LV2DIR'] = '/Library/Audio/Plug-Ins/LV2' + elif conf.env.DEST_OS == 'win32': + conf.env['LV2DIR'] = env_path('COMMONPROGRAMFILES', 'LV2') + else: + conf.env['LV2DIR'] = os.path.join(conf.env['LIBDIR'], 'lv2') + + # Add default LV2_PATH to runtime environment for tests that use plugins + if 'LV2_PATH' not in os.environ: + conf.run_env['LV2_PATH'] = default_lv2_path(conf) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/make.py lilv-0.24.6/waflib/extras/make.py --- lilv-0.24.4~dfsg0/waflib/extras/make.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/make.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,142 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2011 (ita) + +""" +A make-like way of executing the build, following the relationships between inputs/outputs + +This algorithm will lead to slower builds, will not be as flexible as "waf build", but +it might be useful for building data files (?) + +It is likely to break in the following cases: +- files are created dynamically (no inputs or outputs) +- headers +- building two files from different groups +""" + +import re +from waflib import Options, Task +from waflib.Build import BuildContext + +class MakeContext(BuildContext): + '''executes tasks in a step-by-step manner, following dependencies between inputs/outputs''' + cmd = 'make' + fun = 'build' + + def __init__(self, **kw): + super(MakeContext, self).__init__(**kw) + self.files = Options.options.files + + def get_build_iterator(self): + if not self.files: + while 1: + yield super(MakeContext, self).get_build_iterator() + + for g in self.groups: + for tg in g: + try: + f = tg.post + except AttributeError: + pass + else: + f() + + provides = {} + uses = {} + all_tasks = [] + tasks = [] + for pat in self.files.split(','): + matcher = self.get_matcher(pat) + for tg in g: + if isinstance(tg, Task.Task): + lst = [tg] + else: + lst = tg.tasks + for tsk in lst: + all_tasks.append(tsk) + + do_exec = False + for node in tsk.inputs: + try: + uses[node].append(tsk) + except: + uses[node] = [tsk] + + if matcher(node, output=False): + do_exec = True + break + + for node in tsk.outputs: + try: + provides[node].append(tsk) + except: + provides[node] = [tsk] + + if matcher(node, output=True): + do_exec = True + break + if do_exec: + tasks.append(tsk) + + # so we have the tasks that we need to process, the list of all tasks, + # the map of the tasks providing nodes, and the map of tasks using nodes + + if not tasks: + # if there are no tasks matching, return everything in the current group + result = all_tasks + else: + # this is like a big filter... + result = set() + seen = set() + cur = set(tasks) + while cur: + result |= cur + tosee = set() + for tsk in cur: + for node in tsk.inputs: + if node in seen: + continue + seen.add(node) + tosee |= set(provides.get(node, [])) + cur = tosee + result = list(result) + + Task.set_file_constraints(result) + Task.set_precedence_constraints(result) + yield result + + while 1: + yield [] + + def get_matcher(self, pat): + # this returns a function + inn = True + out = True + if pat.startswith('in:'): + out = False + pat = pat.replace('in:', '') + elif pat.startswith('out:'): + inn = False + pat = pat.replace('out:', '') + + anode = self.root.find_node(pat) + pattern = None + if not anode: + if not pat.startswith('^'): + pat = '^.+?%s' % pat + if not pat.endswith('$'): + pat = '%s$' % pat + pattern = re.compile(pat) + + def match(node, output): + if output and not out: + return False + if not output and not inn: + return False + + if anode: + return anode == node + else: + return pattern.match(node.abspath()) + return match + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/midl.py lilv-0.24.6/waflib/extras/midl.py --- lilv-0.24.4~dfsg0/waflib/extras/midl.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/midl.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# Issue 1185 ultrix gmail com + +""" +Microsoft Interface Definition Language support. Given ComObject.idl, this tool +will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c + +To declare targets using midl:: + + def configure(conf): + conf.load('msvc') + conf.load('midl') + + def build(bld): + bld( + features='c cshlib', + # Note: ComObject_i.c is generated from ComObject.idl + source = 'main.c ComObject.idl ComObject_i.c', + target = 'ComObject.dll') +""" + +from waflib import Task, Utils +from waflib.TaskGen import feature, before_method +import os + +def configure(conf): + conf.find_program(['midl'], var='MIDL') + + conf.env.MIDLFLAGS = [ + '/nologo', + '/D', + '_DEBUG', + '/W1', + '/char', + 'signed', + '/Oicf', + ] + +@feature('c', 'cxx') +@before_method('process_source') +def idl_file(self): + # Do this before process_source so that the generated header can be resolved + # when scanning source dependencies. + idl_nodes = [] + src_nodes = [] + for node in Utils.to_list(self.source): + if str(node).endswith('.idl'): + idl_nodes.append(node) + else: + src_nodes.append(node) + + for node in self.to_nodes(idl_nodes): + t = node.change_ext('.tlb') + h = node.change_ext('_i.h') + c = node.change_ext('_i.c') + p = node.change_ext('_p.c') + d = node.parent.find_or_declare('dlldata.c') + self.create_task('midl', node, [t, h, c, p, d]) + + self.source = src_nodes + +class midl(Task.Task): + """ + Compile idl files + """ + color = 'YELLOW' + run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}' + before = ['winrc'] + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/msvcdeps.py lilv-0.24.6/waflib/extras/msvcdeps.py --- lilv-0.24.4~dfsg0/waflib/extras/msvcdeps.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/msvcdeps.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,251 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright Garmin International or its subsidiaries, 2012-2013 + +''' +Off-load dependency scanning from Python code to MSVC compiler + +This tool is safe to load in any environment; it will only activate the +MSVC exploits when it finds that a particular taskgen uses MSVC to +compile. + +Empirical testing shows about a 10% execution time savings from using +this tool as compared to c_preproc. + +The technique of gutting scan() and pushing the dependency calculation +down to post_run() is cribbed from gccdeps.py. + +This affects the cxx class, so make sure to load Qt5 after this tool. + +Usage:: + + def options(opt): + opt.load('compiler_cxx') + def configure(conf): + conf.load('compiler_cxx msvcdeps') +''' + +import os, sys, tempfile, threading + +from waflib import Context, Errors, Logs, Task, Utils +from waflib.Tools import c_preproc, c, cxx, msvc +from waflib.TaskGen import feature, before_method + +lock = threading.Lock() +nodes = {} # Cache the path -> Node lookup + +PREPROCESSOR_FLAG = '/showIncludes' +INCLUDE_PATTERN = 'Note: including file:' + +# Extensible by outside tools +supported_compilers = ['msvc'] + +@feature('c', 'cxx') +@before_method('process_source') +def apply_msvcdeps_flags(taskgen): + if taskgen.env.CC_NAME not in supported_compilers: + return + + for flag in ('CFLAGS', 'CXXFLAGS'): + if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0: + taskgen.env.append_value(flag, PREPROCESSOR_FLAG) + +def path_to_node(base_node, path, cached_nodes): + ''' + Take the base node and the path and return a node + Results are cached because searching the node tree is expensive + The following code is executed by threads, it is not safe, so a lock is needed... + ''' + # normalize the path because ant_glob() does not understand + # parent path components (..) + path = os.path.normpath(path) + + # normalize the path case to increase likelihood of a cache hit + path = os.path.normcase(path) + + # ant_glob interprets [] and () characters, so those must be replaced + path = path.replace('[', '?').replace(']', '?').replace('(', '[(]').replace(')', '[)]') + + node_lookup_key = (base_node, path) + + try: + node = cached_nodes[node_lookup_key] + except KeyError: + # retry with lock on cache miss + with lock: + try: + node = cached_nodes[node_lookup_key] + except KeyError: + node_list = base_node.ant_glob([path], ignorecase=True, remove=False, quiet=True, regex=False) + node = cached_nodes[node_lookup_key] = node_list[0] if node_list else None + + return node + +def post_run(self): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).post_run() + + # TODO this is unlikely to work with netcache + if getattr(self, 'cached', None): + return Task.Task.post_run(self) + + bld = self.generator.bld + unresolved_names = [] + resolved_nodes = [] + + # Dynamically bind to the cache + try: + cached_nodes = bld.cached_nodes + except AttributeError: + cached_nodes = bld.cached_nodes = {} + + for path in self.msvcdeps_paths: + node = None + if os.path.isabs(path): + node = path_to_node(bld.root, path, cached_nodes) + else: + # when calling find_resource, make sure the path does not begin with '..' + base_node = bld.bldnode + path = [k for k in Utils.split_path(path) if k and k != '.'] + while path[0] == '..': + path.pop(0) + base_node = base_node.parent + path = os.sep.join(path) + + node = path_to_node(base_node, path, cached_nodes) + + if not node: + raise ValueError('could not find %r for %r' % (path, self)) + else: + if not c_preproc.go_absolute: + if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): + # System library + Logs.debug('msvcdeps: Ignoring system include %r', node) + continue + + if id(node) == id(self.inputs[0]): + # Self-dependency + continue + + resolved_nodes.append(node) + + bld.node_deps[self.uid()] = resolved_nodes + bld.raw_deps[self.uid()] = unresolved_names + + try: + del self.cache_sig + except AttributeError: + pass + + Task.Task.post_run(self) + +def scan(self): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).scan() + + resolved_nodes = self.generator.bld.node_deps.get(self.uid(), []) + unresolved_names = [] + return (resolved_nodes, unresolved_names) + +def sig_implicit_deps(self): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).sig_implicit_deps() + + try: + return Task.Task.sig_implicit_deps(self) + except Errors.WafError: + return Utils.SIG_NIL + +def exec_command(self, cmd, **kw): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).exec_command(cmd, **kw) + + if not 'cwd' in kw: + kw['cwd'] = self.get_cwd() + + if self.env.PATH: + env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ) + env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH) + + # The Visual Studio IDE adds an environment variable that causes + # the MS compiler to send its textual output directly to the + # debugging window rather than normal stdout/stderr. + # + # This is unrecoverably bad for this tool because it will cause + # all the dependency scanning to see an empty stdout stream and + # assume that the file being compiled uses no headers. + # + # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx + # + # Attempting to repair the situation by deleting the offending + # envvar at this point in tool execution will not be good enough-- + # its presence poisons the 'waf configure' step earlier. We just + # want to put a sanity check here in order to help developers + # quickly diagnose the issue if an otherwise-good Waf tree + # is then executed inside the MSVS IDE. + assert 'VS_UNICODE_OUTPUT' not in kw['env'] + + cmd, args = self.split_argfile(cmd) + try: + (fd, tmp) = tempfile.mkstemp() + os.write(fd, '\r\n'.join(args).encode()) + os.close(fd) + + self.msvcdeps_paths = [] + kw['env'] = kw.get('env', os.environ.copy()) + kw['cwd'] = kw.get('cwd', os.getcwd()) + kw['quiet'] = Context.STDOUT + kw['output'] = Context.STDOUT + + out = [] + if Logs.verbose: + Logs.debug('argfile: @%r -> %r', tmp, args) + try: + raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw) + ret = 0 + except Errors.WafError as e: + # Use e.msg if e.stdout is not set + raw_out = getattr(e, 'stdout', e.msg) + + # Return non-zero error code even if we didn't + # get one from the exception object + ret = getattr(e, 'returncode', 1) + + for line in raw_out.splitlines(): + if line.startswith(INCLUDE_PATTERN): + inc_path = line[len(INCLUDE_PATTERN):].strip() + Logs.debug('msvcdeps: Regex matched %s', inc_path) + self.msvcdeps_paths.append(inc_path) + else: + out.append(line) + + # Pipe through the remaining stdout content (not related to /showIncludes) + if self.generator.bld.logger: + self.generator.bld.logger.debug('out: %s' % os.linesep.join(out)) + else: + sys.stdout.write(os.linesep.join(out) + os.linesep) + + return ret + finally: + try: + os.remove(tmp) + except OSError: + # anti-virus and indexers can keep files open -_- + pass + + +def wrap_compiled_task(classname): + derived_class = type(classname, (Task.classes[classname],), {}) + derived_class.derived_msvcdeps = derived_class + derived_class.post_run = post_run + derived_class.scan = scan + derived_class.sig_implicit_deps = sig_implicit_deps + derived_class.exec_command = exec_command + +for k in ('c', 'cxx'): + if k in Task.classes: + wrap_compiled_task(k) + +def options(opt): + raise ValueError('Do not load msvcdeps options') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/msvs.py lilv-0.24.6/waflib/extras/msvs.py --- lilv-0.24.4~dfsg0/waflib/extras/msvs.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/msvs.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,1048 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Avalanche Studios 2009-2011 +# Thomas Nagy 2011 + +""" +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +""" + +""" +To add this tool to your project: +def options(conf): + opt.load('msvs') + +It can be a good idea to add the sync_exec tool too. + +To generate solution files: +$ waf configure msvs + +To customize the outputs, provide subclasses in your wscript files:: + + from waflib.extras import msvs + class vsnode_target(msvs.vsnode_target): + def get_build_command(self, props): + # likely to be required + return "waf.bat build" + def collect_source(self): + # likely to be required + ... + class msvs_bar(msvs.msvs_generator): + def init(self): + msvs.msvs_generator.init(self) + self.vsnode_target = vsnode_target + +The msvs class re-uses the same build() function for reading the targets (task generators), +you may therefore specify msvs settings on the context object:: + + def build(bld): + bld.solution_name = 'foo.sln' + bld.waf_command = 'waf.bat' + bld.projects_dir = bld.srcnode.make_node('.depproj') + bld.projects_dir.mkdir() + +For visual studio 2008, the command is called 'msvs2008', and the classes +such as vsnode_target are wrapped by a decorator class 'wrap_2008' to +provide special functionality. + +To customize platform toolsets, pass additional parameters, for example:: + + class msvs_2013(msvs.msvs_generator): + cmd = 'msvs2013' + numver = '13.00' + vsver = '2013' + platform_toolset_ver = 'v120' + +ASSUMPTIONS: +* a project can be either a directory or a target, vcxproj files are written only for targets that have source files +* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path +""" + +import os, re, sys +import uuid # requires python 2.5 +from waflib.Build import BuildContext +from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options + +HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)' + +PROJECT_TEMPLATE = r''' + + + + ${for b in project.build_properties} + + ${b.configuration} + ${b.platform} + + ${endfor} + + + + {${project.uuid}} + MakeFileProj + ${project.name} + + + + ${for b in project.build_properties} + + Makefile + ${b.outdir} + ${project.platform_toolset_ver} + + ${endfor} + + + + + + ${for b in project.build_properties} + + + + ${endfor} + + ${for b in project.build_properties} + + ${xml:project.get_build_command(b)} + ${xml:project.get_rebuild_command(b)} + ${xml:project.get_clean_command(b)} + ${xml:b.includes_search_path} + ${xml:b.preprocessor_definitions};$(NMakePreprocessorDefinitions) + ${xml:b.includes_search_path} + $(ExecutablePath) + + ${if getattr(b, 'output_file', None)} + ${xml:b.output_file} + ${endif} + ${if getattr(b, 'deploy_dir', None)} + ${xml:b.deploy_dir} + ${endif} + + ${endfor} + + ${for b in project.build_properties} + ${if getattr(b, 'deploy_dir', None)} + + + CopyToHardDrive + + + ${endif} + ${endfor} + + + ${for x in project.source} + <${project.get_key(x)} Include='${x.win32path()}' /> + ${endfor} + + + + + +''' + +FILTER_TEMPLATE = ''' + + + ${for x in project.source} + <${project.get_key(x)} Include="${x.win32path()}"> + ${project.get_filter_name(x.parent)} + + ${endfor} + + + ${for x in project.dirs()} + + {${project.make_uuid(x.win32path())}} + + ${endfor} + + +''' + +PROJECT_2008_TEMPLATE = r''' + + + ${if project.build_properties} + ${for b in project.build_properties} + + ${endfor} + ${else} + + ${endif} + + + + + ${if project.build_properties} + ${for b in project.build_properties} + + + + ${endfor} + ${else} + + + ${endif} + + + + +${project.display_filter()} + + +''' + +SOLUTION_TEMPLATE = '''Microsoft Visual Studio Solution File, Format Version ${project.numver} +# Visual Studio ${project.vsver} +${for p in project.all_projects} +Project("{${p.ptype()}}") = "${p.name}", "${p.title}", "{${p.uuid}}" +EndProject${endfor} +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + ${if project.all_projects} + ${for (configuration, platform) in project.all_projects[0].ctx.project_configurations()} + ${configuration}|${platform} = ${configuration}|${platform} + ${endfor} + ${endif} + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + ${for p in project.all_projects} + ${if hasattr(p, 'source')} + ${for b in p.build_properties} + {${p.uuid}}.${b.configuration}|${b.platform}.ActiveCfg = ${b.configuration}|${b.platform} + ${if getattr(p, 'is_active', None)} + {${p.uuid}}.${b.configuration}|${b.platform}.Build.0 = ${b.configuration}|${b.platform} + ${endif} + ${if getattr(p, 'is_deploy', None)} + {${p.uuid}}.${b.configuration}|${b.platform}.Deploy.0 = ${b.configuration}|${b.platform} + ${endif} + ${endfor} + ${endif} + ${endfor} + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + ${for p in project.all_projects} + ${if p.parent} + {${p.uuid}} = {${p.parent.uuid}} + ${endif} + ${endfor} + EndGlobalSection +EndGlobal +''' + +COMPILE_TEMPLATE = '''def f(project): + lst = [] + def xml_escape(value): + return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">") + + %s + + #f = open('cmd.txt', 'w') + #f.write(str(lst)) + #f.close() + return ''.join(lst) +''' +reg_act = re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P[^}]*?)\})", re.M) +def compile_template(line): + """ + Compile a template expression into a python function (like jsps, but way shorter) + """ + extr = [] + def repl(match): + g = match.group + if g('dollar'): + return "$" + elif g('backslash'): + return "\\" + elif g('subst'): + extr.append(g('code')) + return "<<|@|>>" + return None + + line2 = reg_act.sub(repl, line) + params = line2.split('<<|@|>>') + assert(extr) + + + indent = 0 + buf = [] + app = buf.append + + def app(txt): + buf.append(indent * '\t' + txt) + + for x in range(len(extr)): + if params[x]: + app("lst.append(%r)" % params[x]) + + f = extr[x] + if f.startswith(('if', 'for')): + app(f + ':') + indent += 1 + elif f.startswith('py:'): + app(f[3:]) + elif f.startswith(('endif', 'endfor')): + indent -= 1 + elif f.startswith(('else', 'elif')): + indent -= 1 + app(f + ':') + indent += 1 + elif f.startswith('xml:'): + app('lst.append(xml_escape(%s))' % f[4:]) + else: + #app('lst.append((%s) or "cannot find %s")' % (f, f)) + app('lst.append(%s)' % f) + + if extr: + if params[-1]: + app("lst.append(%r)" % params[-1]) + + fun = COMPILE_TEMPLATE % "\n\t".join(buf) + #print(fun) + return Task.funex(fun) + + +re_blank = re.compile('(\n|\r|\\s)*\n', re.M) +def rm_blank_lines(txt): + txt = re_blank.sub('\r\n', txt) + return txt + +BOM = '\xef\xbb\xbf' +try: + BOM = bytes(BOM, 'latin-1') # python 3 +except TypeError: + pass + +def stealth_write(self, data, flags='wb'): + try: + unicode + except NameError: + data = data.encode('utf-8') # python 3 + else: + data = data.decode(sys.getfilesystemencoding(), 'replace') + data = data.encode('utf-8') + + if self.name.endswith(('.vcproj', '.vcxproj')): + data = BOM + data + + try: + txt = self.read(flags='rb') + if txt != data: + raise ValueError('must write') + except (IOError, ValueError): + self.write(data, flags=flags) + else: + Logs.debug('msvs: skipping %s', self.win32path()) +Node.Node.stealth_write = stealth_write + +re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I) +def win32path(self): + p = self.abspath() + m = re_win32.match(p) + if m: + return "%s:%s" % (m.group(2).upper(), m.group(3)) + return p +Node.Node.win32path = win32path + +re_quote = re.compile("[^a-zA-Z0-9-]") +def quote(s): + return re_quote.sub("_", s) + +def xml_escape(value): + return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">") + +def make_uuid(v, prefix = None): + """ + simple utility function + """ + if isinstance(v, dict): + keys = list(v.keys()) + keys.sort() + tmp = str([(k, v[k]) for k in keys]) + else: + tmp = str(v) + d = Utils.md5(tmp.encode()).hexdigest().upper() + if prefix: + d = '%s%s' % (prefix, d[8:]) + gid = uuid.UUID(d, version = 4) + return str(gid).upper() + +def diff(node, fromnode): + # difference between two nodes, but with "(..)" instead of ".." + c1 = node + c2 = fromnode + + c1h = c1.height() + c2h = c2.height() + + lst = [] + up = 0 + + while c1h > c2h: + lst.append(c1.name) + c1 = c1.parent + c1h -= 1 + + while c2h > c1h: + up += 1 + c2 = c2.parent + c2h -= 1 + + while id(c1) != id(c2): + lst.append(c1.name) + up += 1 + + c1 = c1.parent + c2 = c2.parent + + for i in range(up): + lst.append('(..)') + lst.reverse() + return tuple(lst) + +class build_property(object): + pass + +class vsnode(object): + """ + Abstract class representing visual studio elements + We assume that all visual studio nodes have a uuid and a parent + """ + def __init__(self, ctx): + self.ctx = ctx # msvs context + self.name = '' # string, mandatory + self.vspath = '' # path in visual studio (name for dirs, absolute path for projects) + self.uuid = '' # string, mandatory + self.parent = None # parent node for visual studio nesting + + def get_waf(self): + """ + Override in subclasses... + """ + return 'cd /d "%s" & %s' % (self.ctx.srcnode.win32path(), getattr(self.ctx, 'waf_command', 'waf.bat')) + + def ptype(self): + """ + Return a special uuid for projects written in the solution file + """ + pass + + def write(self): + """ + Write the project file, by default, do nothing + """ + pass + + def make_uuid(self, val): + """ + Alias for creating uuid values easily (the templates cannot access global variables) + """ + return make_uuid(val) + +class vsnode_vsdir(vsnode): + """ + Nodes representing visual studio folders (which do not match the filesystem tree!) + """ + VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8" + def __init__(self, ctx, uuid, name, vspath=''): + vsnode.__init__(self, ctx) + self.title = self.name = name + self.uuid = uuid + self.vspath = vspath or name + + def ptype(self): + return self.VS_GUID_SOLUTIONFOLDER + +class vsnode_project(vsnode): + """ + Abstract class representing visual studio project elements + A project is assumed to be writable, and has a node representing the file to write to + """ + VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942" + def ptype(self): + return self.VS_GUID_VCPROJ + + def __init__(self, ctx, node): + vsnode.__init__(self, ctx) + self.path = node + self.uuid = make_uuid(node.win32path()) + self.name = node.name + self.platform_toolset_ver = getattr(ctx, 'platform_toolset_ver', None) + self.title = self.path.win32path() + self.source = [] # list of node objects + self.build_properties = [] # list of properties (nmake commands, output dir, etc) + + def dirs(self): + """ + Get the list of parent folders of the source files (header files included) + for writing the filters + """ + lst = [] + def add(x): + if x.height() > self.tg.path.height() and x not in lst: + lst.append(x) + add(x.parent) + for x in self.source: + add(x.parent) + return lst + + def write(self): + Logs.debug('msvs: creating %r', self.path) + + # first write the project file + template1 = compile_template(PROJECT_TEMPLATE) + proj_str = template1(self) + proj_str = rm_blank_lines(proj_str) + self.path.stealth_write(proj_str) + + # then write the filter + template2 = compile_template(FILTER_TEMPLATE) + filter_str = template2(self) + filter_str = rm_blank_lines(filter_str) + tmp = self.path.parent.make_node(self.path.name + '.filters') + tmp.stealth_write(filter_str) + + def get_key(self, node): + """ + required for writing the source files + """ + name = node.name + if name.endswith(('.cpp', '.c')): + return 'ClCompile' + return 'ClInclude' + + def collect_properties(self): + """ + Returns a list of triplet (configuration, platform, output_directory) + """ + ret = [] + for c in self.ctx.configurations: + for p in self.ctx.platforms: + x = build_property() + x.outdir = '' + + x.configuration = c + x.platform = p + + x.preprocessor_definitions = '' + x.includes_search_path = '' + + # can specify "deploy_dir" too + ret.append(x) + self.build_properties = ret + + def get_build_params(self, props): + opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path() + return (self.get_waf(), opt) + + def get_build_command(self, props): + return "%s build %s" % self.get_build_params(props) + + def get_clean_command(self, props): + return "%s clean %s" % self.get_build_params(props) + + def get_rebuild_command(self, props): + return "%s clean build %s" % self.get_build_params(props) + + def get_filter_name(self, node): + lst = diff(node, self.tg.path) + return '\\'.join(lst) or '.' + +class vsnode_alias(vsnode_project): + def __init__(self, ctx, node, name): + vsnode_project.__init__(self, ctx, node) + self.name = name + self.output_file = '' + +class vsnode_build_all(vsnode_alias): + """ + Fake target used to emulate the behaviour of "make all" (starting one process by target is slow) + This is the only alias enabled by default + """ + def __init__(self, ctx, node, name='build_all_projects'): + vsnode_alias.__init__(self, ctx, node, name) + self.is_active = True + +class vsnode_install_all(vsnode_alias): + """ + Fake target used to emulate the behaviour of "make install" + """ + def __init__(self, ctx, node, name='install_all_projects'): + vsnode_alias.__init__(self, ctx, node, name) + + def get_build_command(self, props): + return "%s build install %s" % self.get_build_params(props) + + def get_clean_command(self, props): + return "%s clean %s" % self.get_build_params(props) + + def get_rebuild_command(self, props): + return "%s clean build install %s" % self.get_build_params(props) + +class vsnode_project_view(vsnode_alias): + """ + Fake target used to emulate a file system view + """ + def __init__(self, ctx, node, name='project_view'): + vsnode_alias.__init__(self, ctx, node, name) + self.tg = self.ctx() # fake one, cannot remove + self.exclude_files = Node.exclude_regs + ''' +waf-2* +waf3-2*/** +.waf-2* +.waf3-2*/** +**/*.sdf +**/*.suo +**/*.ncb +**/%s + ''' % Options.lockfile + + def collect_source(self): + # this is likely to be slow + self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files) + + def get_build_command(self, props): + params = self.get_build_params(props) + (self.ctx.cmd,) + return "%s %s %s" % params + + def get_clean_command(self, props): + return "" + + def get_rebuild_command(self, props): + return self.get_build_command(props) + +class vsnode_target(vsnode_project): + """ + Visual studio project representing a targets (programs, libraries, etc) and bound + to a task generator + """ + def __init__(self, ctx, tg): + """ + A project is more or less equivalent to a file/folder + """ + base = getattr(ctx, 'projects_dir', None) or tg.path + node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node + vsnode_project.__init__(self, ctx, node) + self.name = quote(tg.name) + self.tg = tg # task generator + + def get_build_params(self, props): + """ + Override the default to add the target name + """ + opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path() + if getattr(self, 'tg', None): + opt += " --targets=%s" % self.tg.name + return (self.get_waf(), opt) + + def collect_source(self): + tg = self.tg + source_files = tg.to_nodes(getattr(tg, 'source', [])) + include_dirs = Utils.to_list(getattr(tg, 'msvs_includes', [])) + include_files = [] + for x in include_dirs: + if isinstance(x, str): + x = tg.path.find_node(x) + if x: + lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)] + include_files.extend(lst) + + # remove duplicates + self.source.extend(list(set(source_files + include_files))) + self.source.sort(key=lambda x: x.win32path()) + + def collect_properties(self): + """ + Visual studio projects are associated with platforms and configurations (for building especially) + """ + super(vsnode_target, self).collect_properties() + for x in self.build_properties: + x.outdir = self.path.parent.win32path() + x.preprocessor_definitions = '' + x.includes_search_path = '' + + try: + tsk = self.tg.link_task + except AttributeError: + pass + else: + x.output_file = tsk.outputs[0].win32path() + x.preprocessor_definitions = ';'.join(tsk.env.DEFINES) + x.includes_search_path = ';'.join(self.tg.env.INCPATHS) + +class msvs_generator(BuildContext): + '''generates a visual studio 2010 solution''' + cmd = 'msvs' + fun = 'build' + numver = '11.00' # Visual Studio Version Number + vsver = '2010' # Visual Studio Version Year + platform_toolset_ver = 'v110' # Platform Toolset Version Number + + def init(self): + """ + Some data that needs to be present + """ + if not getattr(self, 'configurations', None): + self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc + if not getattr(self, 'platforms', None): + self.platforms = ['Win32'] + if not getattr(self, 'all_projects', None): + self.all_projects = [] + if not getattr(self, 'project_extension', None): + self.project_extension = '.vcxproj' + if not getattr(self, 'projects_dir', None): + self.projects_dir = self.srcnode.make_node('.depproj') + self.projects_dir.mkdir() + + # bind the classes to the object, so that subclass can provide custom generators + if not getattr(self, 'vsnode_vsdir', None): + self.vsnode_vsdir = vsnode_vsdir + if not getattr(self, 'vsnode_target', None): + self.vsnode_target = vsnode_target + if not getattr(self, 'vsnode_build_all', None): + self.vsnode_build_all = vsnode_build_all + if not getattr(self, 'vsnode_install_all', None): + self.vsnode_install_all = vsnode_install_all + if not getattr(self, 'vsnode_project_view', None): + self.vsnode_project_view = vsnode_project_view + + self.numver = self.__class__.numver + self.vsver = self.__class__.vsver + self.platform_toolset_ver = self.__class__.platform_toolset_ver + + def execute(self): + """ + Entry point + """ + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + + # user initialization + self.init() + + # two phases for creating the solution + self.collect_projects() # add project objects into "self.all_projects" + self.write_files() # write the corresponding project and solution files + + def collect_projects(self): + """ + Fill the list self.all_projects with project objects + Fill the list of build targets + """ + self.collect_targets() + self.add_aliases() + self.collect_dirs() + default_project = getattr(self, 'default_project', None) + def sortfun(x): + if x.name == default_project: + return '' + return getattr(x, 'path', None) and x.path.win32path() or x.name + self.all_projects.sort(key=sortfun) + + def write_files(self): + """ + Write the project and solution files from the data collected + so far. It is unlikely that you will want to change this + """ + for p in self.all_projects: + p.write() + + # and finally write the solution file + node = self.get_solution_node() + node.parent.mkdir() + Logs.warn('Creating %r', node) + template1 = compile_template(SOLUTION_TEMPLATE) + sln_str = template1(self) + sln_str = rm_blank_lines(sln_str) + node.stealth_write(sln_str) + + def get_solution_node(self): + """ + The solution filename is required when writing the .vcproj files + return self.solution_node and if it does not exist, make one + """ + try: + return self.solution_node + except AttributeError: + pass + + solution_name = getattr(self, 'solution_name', None) + if not solution_name: + solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.sln' + if os.path.isabs(solution_name): + self.solution_node = self.root.make_node(solution_name) + else: + self.solution_node = self.srcnode.make_node(solution_name) + return self.solution_node + + def project_configurations(self): + """ + Helper that returns all the pairs (config,platform) + """ + ret = [] + for c in self.configurations: + for p in self.platforms: + ret.append((c, p)) + return ret + + def collect_targets(self): + """ + Process the list of task generators + """ + for g in self.groups: + for tg in g: + if not isinstance(tg, TaskGen.task_gen): + continue + + if not hasattr(tg, 'msvs_includes'): + tg.msvs_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', [])) + tg.post() + if not getattr(tg, 'link_task', None): + continue + + p = self.vsnode_target(self, tg) + p.collect_source() # delegate this processing + p.collect_properties() + self.all_projects.append(p) + + def add_aliases(self): + """ + Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7 + We also add an alias for "make install" (disabled by default) + """ + base = getattr(self, 'projects_dir', None) or self.tg.path + + node_project = base.make_node('build_all_projects' + self.project_extension) # Node + p_build = self.vsnode_build_all(self, node_project) + p_build.collect_properties() + self.all_projects.append(p_build) + + node_project = base.make_node('install_all_projects' + self.project_extension) # Node + p_install = self.vsnode_install_all(self, node_project) + p_install.collect_properties() + self.all_projects.append(p_install) + + node_project = base.make_node('project_view' + self.project_extension) # Node + p_view = self.vsnode_project_view(self, node_project) + p_view.collect_source() + p_view.collect_properties() + self.all_projects.append(p_view) + + n = self.vsnode_vsdir(self, make_uuid(self.srcnode.win32path() + 'build_aliases'), "build_aliases") + p_build.parent = p_install.parent = p_view.parent = n + self.all_projects.append(n) + + def collect_dirs(self): + """ + Create the folder structure in the Visual studio project view + """ + seen = {} + def make_parents(proj): + # look at a project, try to make a parent + if getattr(proj, 'parent', None): + # aliases already have parents + return + x = proj.iter_path + if x in seen: + proj.parent = seen[x] + return + + # There is not vsnode_vsdir for x. + # So create a project representing the folder "x" + n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.win32path()), x.name) + n.iter_path = x.parent + self.all_projects.append(n) + + # recurse up to the project directory + if x.height() > self.srcnode.height() + 1: + make_parents(n) + + for p in self.all_projects[:]: # iterate over a copy of all projects + if not getattr(p, 'tg', None): + # but only projects that have a task generator + continue + + # make a folder for each task generator + p.iter_path = p.tg.path + make_parents(p) + +def wrap_2008(cls): + class dec(cls): + def __init__(self, *k, **kw): + cls.__init__(self, *k, **kw) + self.project_template = PROJECT_2008_TEMPLATE + + def display_filter(self): + + root = build_property() + root.subfilters = [] + root.sourcefiles = [] + root.source = [] + root.name = '' + + @Utils.run_once + def add_path(lst): + if not lst: + return root + child = build_property() + child.subfilters = [] + child.sourcefiles = [] + child.source = [] + child.name = lst[-1] + + par = add_path(lst[:-1]) + par.subfilters.append(child) + return child + + for x in self.source: + # this crap is for enabling subclasses to override get_filter_name + tmp = self.get_filter_name(x.parent) + tmp = tmp != '.' and tuple(tmp.split('\\')) or () + par = add_path(tmp) + par.source.append(x) + + def display(n): + buf = [] + for x in n.source: + buf.append('\n' % (xml_escape(x.win32path()), self.get_key(x))) + for x in n.subfilters: + buf.append('' % xml_escape(x.name)) + buf.append(display(x)) + buf.append('') + return '\n'.join(buf) + + return display(root) + + def get_key(self, node): + """ + If you do not want to let visual studio use the default file extensions, + override this method to return a value: + 0: C/C++ Code, 1: C++ Class, 2: C++ Header File, 3: C++ Form, + 4: C++ Control, 5: Text File, 6: DEF File, 7: IDL File, + 8: Makefile, 9: RGS File, 10: RC File, 11: RES File, 12: XSD File, + 13: XML File, 14: HTML File, 15: CSS File, 16: Bitmap, 17: Icon, + 18: Resx File, 19: BSC File, 20: XSX File, 21: C++ Web Service, + 22: ASAX File, 23: Asp Page, 24: Document, 25: Discovery File, + 26: C# File, 27: eFileTypeClassDiagram, 28: MHTML Document, + 29: Property Sheet, 30: Cursor, 31: Manifest, 32: eFileTypeRDLC + """ + return '' + + def write(self): + Logs.debug('msvs: creating %r', self.path) + template1 = compile_template(self.project_template) + proj_str = template1(self) + proj_str = rm_blank_lines(proj_str) + self.path.stealth_write(proj_str) + + return dec + +class msvs_2008_generator(msvs_generator): + '''generates a visual studio 2008 solution''' + cmd = 'msvs2008' + fun = msvs_generator.fun + numver = '10.00' + vsver = '2008' + + def init(self): + if not getattr(self, 'project_extension', None): + self.project_extension = '_2008.vcproj' + if not getattr(self, 'solution_name', None): + self.solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '_2008.sln' + + if not getattr(self, 'vsnode_target', None): + self.vsnode_target = wrap_2008(vsnode_target) + if not getattr(self, 'vsnode_build_all', None): + self.vsnode_build_all = wrap_2008(vsnode_build_all) + if not getattr(self, 'vsnode_install_all', None): + self.vsnode_install_all = wrap_2008(vsnode_install_all) + if not getattr(self, 'vsnode_project_view', None): + self.vsnode_project_view = wrap_2008(vsnode_project_view) + + msvs_generator.init(self) + +def options(ctx): + """ + If the msvs option is used, try to detect if the build is made from visual studio + """ + ctx.add_option('--execsolution', action='store', help='when building with visual studio, use a build state file') + + old = BuildContext.execute + def override_build_state(ctx): + def lock(rm, add): + uns = ctx.options.execsolution.replace('.sln', rm) + uns = ctx.root.make_node(uns) + try: + uns.delete() + except OSError: + pass + + uns = ctx.options.execsolution.replace('.sln', add) + uns = ctx.root.make_node(uns) + try: + uns.write('') + except EnvironmentError: + pass + + if ctx.options.execsolution: + ctx.launch_dir = Context.top_dir # force a build for the whole project (invalid cwd when called by visual studio) + lock('.lastbuildstate', '.unsuccessfulbuild') + old(ctx) + lock('.unsuccessfulbuild', '.lastbuildstate') + else: + old(ctx) + BuildContext.execute = override_build_state + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/netcache_client.py lilv-0.24.6/waflib/extras/netcache_client.py --- lilv-0.24.4~dfsg0/waflib/extras/netcache_client.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/netcache_client.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,390 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2011-2015 (ita) + +""" +A client for the network cache (playground/netcache/). Launch the server with: +./netcache_server, then use it for the builds by adding the following: + + def build(bld): + bld.load('netcache_client') + +The parameters should be present in the environment in the form: + NETCACHE=host:port waf configure build + +Or in a more detailed way: + NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build + +where: + host: host where the server resides, by default localhost + port: by default push on 11001 and pull on 12001 + +Use the server provided in playground/netcache/Netcache.java +""" + +import os, socket, time, atexit, sys +from waflib import Task, Logs, Utils, Build, Runner +from waflib.Configure import conf + +BUF = 8192 * 16 +HEADER_SIZE = 128 +MODES = ['PUSH', 'PULL', 'PUSH_PULL'] +STALE_TIME = 30 # seconds + +GET = 'GET' +PUT = 'PUT' +LST = 'LST' +BYE = 'BYE' + +all_sigs_in_cache = (0.0, []) + +def put_data(conn, data): + if sys.hexversion > 0x3000000: + data = data.encode('latin-1') + cnt = 0 + while cnt < len(data): + sent = conn.send(data[cnt:]) + if sent == 0: + raise RuntimeError('connection ended') + cnt += sent + +push_connections = Runner.Queue(0) +pull_connections = Runner.Queue(0) +def get_connection(push=False): + # return a new connection... do not forget to release it! + try: + if push: + ret = push_connections.get(block=False) + else: + ret = pull_connections.get(block=False) + except Exception: + ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + if push: + ret.connect(Task.push_addr) + else: + ret.connect(Task.pull_addr) + return ret + +def release_connection(conn, msg='', push=False): + if conn: + if push: + push_connections.put(conn) + else: + pull_connections.put(conn) + +def close_connection(conn, msg=''): + if conn: + data = '%s,%s' % (BYE, msg) + try: + put_data(conn, data.ljust(HEADER_SIZE)) + except: + pass + try: + conn.close() + except: + pass + +def close_all(): + for q in (push_connections, pull_connections): + while q.qsize(): + conn = q.get() + try: + close_connection(conn) + except: + # ignore errors when cleaning up + pass +atexit.register(close_all) + +def read_header(conn): + cnt = 0 + buf = [] + while cnt < HEADER_SIZE: + data = conn.recv(HEADER_SIZE - cnt) + if not data: + #import traceback + #traceback.print_stack() + raise ValueError('connection ended when reading a header %r' % buf) + buf.append(data) + cnt += len(data) + if sys.hexversion > 0x3000000: + ret = ''.encode('latin-1').join(buf) + ret = ret.decode('latin-1') + else: + ret = ''.join(buf) + return ret + +def check_cache(conn, ssig): + """ + List the files on the server, this is an optimization because it assumes that + concurrent builds are rare + """ + global all_sigs_in_cache + if not STALE_TIME: + return + if time.time() - all_sigs_in_cache[0] > STALE_TIME: + + params = (LST,'') + put_data(conn, ','.join(params).ljust(HEADER_SIZE)) + + # read what is coming back + ret = read_header(conn) + size = int(ret.split(',')[0]) + + buf = [] + cnt = 0 + while cnt < size: + data = conn.recv(min(BUF, size-cnt)) + if not data: + raise ValueError('connection ended %r %r' % (cnt, size)) + buf.append(data) + cnt += len(data) + + if sys.hexversion > 0x3000000: + ret = ''.encode('latin-1').join(buf) + ret = ret.decode('latin-1') + else: + ret = ''.join(buf) + + all_sigs_in_cache = (time.time(), ret.splitlines()) + Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1])) + + if not ssig in all_sigs_in_cache[1]: + raise ValueError('no file %s in cache' % ssig) + +class MissingFile(Exception): + pass + +def recv_file(conn, ssig, count, p): + check_cache(conn, ssig) + + params = (GET, ssig, str(count)) + put_data(conn, ','.join(params).ljust(HEADER_SIZE)) + data = read_header(conn) + + size = int(data.split(',')[0]) + + if size == -1: + raise MissingFile('no file %s - %s in cache' % (ssig, count)) + + # get the file, writing immediately + # TODO a tmp file would be better + f = open(p, 'wb') + cnt = 0 + while cnt < size: + data = conn.recv(min(BUF, size-cnt)) + if not data: + raise ValueError('connection ended %r %r' % (cnt, size)) + f.write(data) + cnt += len(data) + f.close() + +def sock_send(conn, ssig, cnt, p): + #print "pushing %r %r %r" % (ssig, cnt, p) + size = os.stat(p).st_size + params = (PUT, ssig, str(cnt), str(size)) + put_data(conn, ','.join(params).ljust(HEADER_SIZE)) + f = open(p, 'rb') + cnt = 0 + while cnt < size: + r = f.read(min(BUF, size-cnt)) + while r: + k = conn.send(r) + if not k: + raise ValueError('connection ended') + cnt += k + r = r[k:] + +def can_retrieve_cache(self): + if not Task.pull_addr: + return False + if not self.outputs: + return False + self.cached = False + + cnt = 0 + sig = self.signature() + ssig = Utils.to_hex(self.uid() + sig) + + conn = None + err = False + try: + try: + conn = get_connection() + for node in self.outputs: + p = node.abspath() + recv_file(conn, ssig, cnt, p) + cnt += 1 + except MissingFile as e: + Logs.debug('netcache: file is not in the cache %r', e) + err = True + except Exception as e: + Logs.debug('netcache: could not get the files %r', self.outputs) + if Logs.verbose > 1: + Logs.debug('netcache: exception %r', e) + err = True + + # broken connection? remove this one + close_connection(conn) + conn = None + else: + Logs.debug('netcache: obtained %r from cache', self.outputs) + + finally: + release_connection(conn) + if err: + return False + + self.cached = True + return True + +@Utils.run_once +def put_files_cache(self): + if not Task.push_addr: + return + if not self.outputs: + return + if getattr(self, 'cached', None): + return + + #print "called put_files_cache", id(self) + bld = self.generator.bld + sig = self.signature() + ssig = Utils.to_hex(self.uid() + sig) + + conn = None + cnt = 0 + try: + for node in self.outputs: + # We could re-create the signature of the task with the signature of the outputs + # in practice, this means hashing the output files + # this is unnecessary + try: + if not conn: + conn = get_connection(push=True) + sock_send(conn, ssig, cnt, node.abspath()) + Logs.debug('netcache: sent %r', node) + except Exception as e: + Logs.debug('netcache: could not push the files %r', e) + + # broken connection? remove this one + close_connection(conn) + conn = None + cnt += 1 + finally: + release_connection(conn, push=True) + + bld.task_sigs[self.uid()] = self.cache_sig + +def hash_env_vars(self, env, vars_lst): + # reimplement so that the resulting hash does not depend on local paths + if not env.table: + env = env.parent + if not env: + return Utils.SIG_NIL + + idx = str(id(env)) + str(vars_lst) + try: + cache = self.cache_env + except AttributeError: + cache = self.cache_env = {} + else: + try: + return self.cache_env[idx] + except KeyError: + pass + + v = str([env[a] for a in vars_lst]) + v = v.replace(self.srcnode.abspath().__repr__()[:-1], '') + m = Utils.md5() + m.update(v.encode()) + ret = m.digest() + + Logs.debug('envhash: %r %r', ret, v) + + cache[idx] = ret + + return ret + +def uid(self): + # reimplement so that the signature does not depend on local paths + try: + return self.uid_ + except AttributeError: + m = Utils.md5() + src = self.generator.bld.srcnode + up = m.update + up(self.__class__.__name__.encode()) + for x in self.inputs + self.outputs: + up(x.path_from(src).encode()) + self.uid_ = m.digest() + return self.uid_ + + +def make_cached(cls): + if getattr(cls, 'nocache', None): + return + + m1 = cls.run + def run(self): + if getattr(self, 'nocache', False): + return m1(self) + if self.can_retrieve_cache(): + return 0 + return m1(self) + cls.run = run + + m2 = cls.post_run + def post_run(self): + if getattr(self, 'nocache', False): + return m2(self) + bld = self.generator.bld + ret = m2(self) + if bld.cache_global: + self.put_files_cache() + if hasattr(self, 'chmod'): + for node in self.outputs: + os.chmod(node.abspath(), self.chmod) + return ret + cls.post_run = post_run + +@conf +def setup_netcache(ctx, push_addr, pull_addr): + Task.Task.can_retrieve_cache = can_retrieve_cache + Task.Task.put_files_cache = put_files_cache + Task.Task.uid = uid + Task.push_addr = push_addr + Task.pull_addr = pull_addr + Build.BuildContext.hash_env_vars = hash_env_vars + ctx.cache_global = True + + for x in Task.classes.values(): + make_cached(x) + +def build(bld): + if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ: + Logs.warn('Setting NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001') + os.environ['NETCACHE_PULL'] = '127.0.0.1:12001' + os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001' + + if 'NETCACHE' in os.environ: + if not 'NETCACHE_PUSH' in os.environ: + os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE'] + if not 'NETCACHE_PULL' in os.environ: + os.environ['NETCACHE_PULL'] = os.environ['NETCACHE'] + + v = os.environ['NETCACHE_PULL'] + if v: + h, p = v.split(':') + pull_addr = (h, int(p)) + else: + pull_addr = None + + v = os.environ['NETCACHE_PUSH'] + if v: + h, p = v.split(':') + push_addr = (h, int(p)) + else: + push_addr = None + + setup_netcache(bld, push_addr, pull_addr) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/objcopy.py lilv-0.24.6/waflib/extras/objcopy.py --- lilv-0.24.4~dfsg0/waflib/extras/objcopy.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/objcopy.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,53 @@ +#!/usr/bin/python +# Grygoriy Fuchedzhy 2010 + +""" +Support for converting linked targets to ihex, srec or binary files using +objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx' +feature. The 'objcopy' feature uses the following attributes: + +objcopy_bfdname Target object format name (eg. ihex, srec, binary). + Defaults to ihex. +objcopy_target File name used for objcopy output. This defaults to the + target name with objcopy_bfdname as extension. +objcopy_install_path Install path for objcopy_target file. Defaults to ${PREFIX}/fw. +objcopy_flags Additional flags passed to objcopy. +""" + +from waflib.Utils import def_attrs +from waflib import Task, Options +from waflib.TaskGen import feature, after_method + +class objcopy(Task.Task): + run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}' + color = 'CYAN' + +@feature('objcopy') +@after_method('apply_link') +def map_objcopy(self): + def_attrs(self, + objcopy_bfdname = 'ihex', + objcopy_target = None, + objcopy_install_path = "${PREFIX}/firmware", + objcopy_flags = '') + + link_output = self.link_task.outputs[0] + if not self.objcopy_target: + self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name + task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target)) + + task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname) + try: + task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags')) + except AttributeError: + pass + + if self.objcopy_install_path: + self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0]) + +def configure(ctx): + program_name = 'objcopy' + prefix = getattr(Options.options, 'cross_prefix', None) + if prefix: + program_name = '{}-{}'.format(prefix, program_name) + ctx.find_program(program_name, var='OBJCOPY', mandatory=True) diff -Nru lilv-0.24.4~dfsg0/waflib/extras/ocaml.py lilv-0.24.6/waflib/extras/ocaml.py --- lilv-0.24.4~dfsg0/waflib/extras/ocaml.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/ocaml.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,348 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2010 (ita) + +"ocaml support" + +import os, re +from waflib import Utils, Task +from waflib.Logs import error +from waflib.TaskGen import feature, before_method, after_method, extension + +EXT_MLL = ['.mll'] +EXT_MLY = ['.mly'] +EXT_MLI = ['.mli'] +EXT_MLC = ['.c'] +EXT_ML = ['.ml'] + +open_re = re.compile(r'^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M) +foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M) +def filter_comments(txt): + meh = [0] + def repl(m): + if m.group(1): + meh[0] += 1 + elif m.group(2): + meh[0] -= 1 + elif not meh[0]: + return m.group() + return '' + return foo.sub(repl, txt) + +def scan(self): + node = self.inputs[0] + code = filter_comments(node.read()) + + global open_re + names = [] + import_iterator = open_re.finditer(code) + if import_iterator: + for import_match in import_iterator: + names.append(import_match.group(1)) + found_lst = [] + raw_lst = [] + for name in names: + nd = None + for x in self.incpaths: + nd = x.find_resource(name.lower()+'.ml') + if not nd: + nd = x.find_resource(name+'.ml') + if nd: + found_lst.append(nd) + break + else: + raw_lst.append(name) + + return (found_lst, raw_lst) + +native_lst=['native', 'all', 'c_object'] +bytecode_lst=['bytecode', 'all'] + +@feature('ocaml') +def init_ml(self): + Utils.def_attrs(self, + type = 'all', + incpaths_lst = [], + bld_incpaths_lst = [], + mlltasks = [], + mlytasks = [], + mlitasks = [], + native_tasks = [], + bytecode_tasks = [], + linktasks = [], + bytecode_env = None, + native_env = None, + compiled_tasks = [], + includes = '', + uselib = '', + are_deps_set = 0) + +@feature('ocaml') +@after_method('init_ml') +def init_envs_ml(self): + + self.islibrary = getattr(self, 'islibrary', False) + + global native_lst, bytecode_lst + self.native_env = None + if self.type in native_lst: + self.native_env = self.env.derive() + if self.islibrary: + self.native_env['OCALINKFLAGS'] = '-a' + + self.bytecode_env = None + if self.type in bytecode_lst: + self.bytecode_env = self.env.derive() + if self.islibrary: + self.bytecode_env['OCALINKFLAGS'] = '-a' + + if self.type == 'c_object': + self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj') + +@feature('ocaml') +@before_method('apply_vars_ml') +@after_method('init_envs_ml') +def apply_incpaths_ml(self): + inc_lst = self.includes.split() + lst = self.incpaths_lst + for dir in inc_lst: + node = self.path.find_dir(dir) + if not node: + error("node not found: " + str(dir)) + continue + if not node in lst: + lst.append(node) + self.bld_incpaths_lst.append(node) + # now the nodes are added to self.incpaths_lst + +@feature('ocaml') +@before_method('process_source') +def apply_vars_ml(self): + for i in self.incpaths_lst: + if self.bytecode_env: + app = self.bytecode_env.append_value + app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()]) + + if self.native_env: + app = self.native_env.append_value + app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()]) + + varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT'] + for name in self.uselib.split(): + for vname in varnames: + cnt = self.env[vname+'_'+name] + if cnt: + if self.bytecode_env: + self.bytecode_env.append_value(vname, cnt) + if self.native_env: + self.native_env.append_value(vname, cnt) + +@feature('ocaml') +@after_method('process_source') +def apply_link_ml(self): + + if self.bytecode_env: + ext = self.islibrary and '.cma' or '.run' + + linktask = self.create_task('ocalink') + linktask.bytecode = 1 + linktask.set_outputs(self.path.find_or_declare(self.target + ext)) + linktask.env = self.bytecode_env + self.linktasks.append(linktask) + + if self.native_env: + if self.type == 'c_object': + ext = '.o' + elif self.islibrary: + ext = '.cmxa' + else: + ext = '' + + linktask = self.create_task('ocalinkx') + linktask.set_outputs(self.path.find_or_declare(self.target + ext)) + linktask.env = self.native_env + self.linktasks.append(linktask) + + # we produce a .o file to be used by gcc + self.compiled_tasks.append(linktask) + +@extension(*EXT_MLL) +def mll_hook(self, node): + mll_task = self.create_task('ocamllex', node, node.change_ext('.ml')) + mll_task.env = self.native_env.derive() + self.mlltasks.append(mll_task) + + self.source.append(mll_task.outputs[0]) + +@extension(*EXT_MLY) +def mly_hook(self, node): + mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')]) + mly_task.env = self.native_env.derive() + self.mlytasks.append(mly_task) + self.source.append(mly_task.outputs[0]) + + task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi')) + task.env = self.native_env.derive() + +@extension(*EXT_MLI) +def mli_hook(self, node): + task = self.create_task('ocamlcmi', node, node.change_ext('.cmi')) + task.env = self.native_env.derive() + self.mlitasks.append(task) + +@extension(*EXT_MLC) +def mlc_hook(self, node): + task = self.create_task('ocamlcc', node, node.change_ext('.o')) + task.env = self.native_env.derive() + self.compiled_tasks.append(task) + +@extension(*EXT_ML) +def ml_hook(self, node): + if self.native_env: + task = self.create_task('ocamlx', node, node.change_ext('.cmx')) + task.env = self.native_env.derive() + task.incpaths = self.bld_incpaths_lst + self.native_tasks.append(task) + + if self.bytecode_env: + task = self.create_task('ocaml', node, node.change_ext('.cmo')) + task.env = self.bytecode_env.derive() + task.bytecode = 1 + task.incpaths = self.bld_incpaths_lst + self.bytecode_tasks.append(task) + +def compile_may_start(self): + + if not getattr(self, 'flag_deps', ''): + self.flag_deps = 1 + + # the evil part is that we can only compute the dependencies after the + # source files can be read (this means actually producing the source files) + if getattr(self, 'bytecode', ''): + alltasks = self.generator.bytecode_tasks + else: + alltasks = self.generator.native_tasks + + self.signature() # ensure that files are scanned - unfortunately + tree = self.generator.bld + for node in self.inputs: + lst = tree.node_deps[self.uid()] + for depnode in lst: + for t in alltasks: + if t == self: + continue + if depnode in t.inputs: + self.set_run_after(t) + + # TODO necessary to get the signature right - for now + delattr(self, 'cache_sig') + self.signature() + + return Task.Task.runnable_status(self) + +class ocamlx(Task.Task): + """native caml compilation""" + color = 'GREEN' + run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}' + scan = scan + runnable_status = compile_may_start + +class ocaml(Task.Task): + """bytecode caml compilation""" + color = 'GREEN' + run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}' + scan = scan + runnable_status = compile_may_start + +class ocamlcmi(Task.Task): + """interface generator (the .i files?)""" + color = 'BLUE' + run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}' + before = ['ocamlcc', 'ocaml', 'ocamlcc'] + +class ocamlcc(Task.Task): + """ocaml to c interfaces""" + color = 'GREEN' + run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}' + +class ocamllex(Task.Task): + """lexical generator""" + color = 'BLUE' + run_str = '${OCAMLLEX} ${SRC} -o ${TGT}' + before = ['ocamlcmi', 'ocaml', 'ocamlcc'] + +class ocamlyacc(Task.Task): + """parser generator""" + color = 'BLUE' + run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}' + before = ['ocamlcmi', 'ocaml', 'ocamlcc'] + + def base(self): + node = self.outputs[0] + s = os.path.splitext(node.name)[0] + return node.bld_dir() + os.sep + s + +def link_may_start(self): + + if getattr(self, 'bytecode', 0): + alltasks = self.generator.bytecode_tasks + else: + alltasks = self.generator.native_tasks + + for x in alltasks: + if not x.hasrun: + return Task.ASK_LATER + + if not getattr(self, 'order', ''): + + # now reorder the inputs given the task dependencies + # this part is difficult, we do not have a total order on the tasks + # if the dependencies are wrong, this may not stop + seen = [] + pendant = []+alltasks + while pendant: + task = pendant.pop(0) + if task in seen: + continue + for x in task.run_after: + if not x in seen: + pendant.append(task) + break + else: + seen.append(task) + self.inputs = [x.outputs[0] for x in seen] + self.order = 1 + return Task.Task.runnable_status(self) + +class ocalink(Task.Task): + """bytecode caml link""" + color = 'YELLOW' + run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}' + runnable_status = link_may_start + after = ['ocaml', 'ocamlcc'] + +class ocalinkx(Task.Task): + """native caml link""" + color = 'YELLOW' + run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}' + runnable_status = link_may_start + after = ['ocamlx', 'ocamlcc'] + +def configure(conf): + opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False) + occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False) + if (not opt) or (not occ): + conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH') + + v = conf.env + v['OCAMLC'] = occ + v['OCAMLOPT'] = opt + v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False) + v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False) + v['OCAMLFLAGS'] = '' + where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep + v['OCAMLLIB'] = where + v['LIBPATH_OCAML'] = where + v['INCLUDES_OCAML'] = where + v['LIB_OCAML'] = 'camlrun' + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/package.py lilv-0.24.6/waflib/extras/package.py --- lilv-0.24.4~dfsg0/waflib/extras/package.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/package.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,76 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2011 + +""" +Obtain packages, unpack them in a location, and add associated uselib variables +(CFLAGS_pkgname, LIBPATH_pkgname, etc). + +The default is use a Dependencies.txt file in the source directory. + +This is a work in progress. + +Usage: + +def options(opt): + opt.load('package') + +def configure(conf): + conf.load_packages() +""" + +from waflib import Logs +from waflib.Configure import conf + +try: + from urllib import request +except ImportError: + from urllib import urlopen +else: + urlopen = request.urlopen + + +CACHEVAR = 'WAFCACHE_PACKAGE' + +@conf +def get_package_cache_dir(self): + cache = None + if CACHEVAR in conf.environ: + cache = conf.environ[CACHEVAR] + cache = self.root.make_node(cache) + elif self.env[CACHEVAR]: + cache = self.env[CACHEVAR] + cache = self.root.make_node(cache) + else: + cache = self.srcnode.make_node('.wafcache_package') + cache.mkdir() + return cache + +@conf +def download_archive(self, src, dst): + for x in self.env.PACKAGE_REPO: + url = '/'.join((x, src)) + try: + web = urlopen(url) + try: + if web.getcode() != 200: + continue + except AttributeError: + pass + except Exception: + # on python3 urlopen throws an exception + # python 2.3 does not have getcode and throws an exception to fail + continue + else: + tmp = self.root.make_node(dst) + tmp.write(web.read()) + Logs.warn('Downloaded %s from %s', tmp.abspath(), url) + break + else: + self.fatal('Could not get the package %s' % src) + +@conf +def load_packages(self): + self.get_package_cache_dir() + # read the dependencies, get the archives, .. + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/parallel_debug.py lilv-0.24.6/waflib/extras/parallel_debug.py --- lilv-0.24.4~dfsg0/waflib/extras/parallel_debug.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/parallel_debug.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,462 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2007-2010 (ita) + +""" +Debugging helper for parallel compilation. + +Copy it to your project and load it with:: + + def options(opt): + opt.load('parallel_debug', tooldir='.') + def build(bld): + ... + +The build will then output a file named pdebug.svg in the source directory. +""" + +import re, sys, threading, time, traceback +try: + from Queue import Queue +except: + from queue import Queue +from waflib import Runner, Options, Task, Logs, Errors + +SVG_TEMPLATE = """ + + + + + + + + + + +${if project.title} + ${project.title} +${endif} + + +${for cls in project.groups} + + ${for rect in cls.rects} + + ${endfor} + +${endfor} + +${for info in project.infos} + + + ${info.text} + +${endfor} + +${if project.tooltip} + + + + +${endif} + + +""" + +COMPILE_TEMPLATE = '''def f(project): + lst = [] + def xml_escape(value): + return value.replace("&", "&").replace('"', """).replace("'", "'").replace("<", "<").replace(">", ">") + + %s + return ''.join(lst) +''' +reg_act = re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P[^}]*?)\})", re.M) +def compile_template(line): + + extr = [] + def repl(match): + g = match.group + if g('dollar'): + return "$" + elif g('backslash'): + return "\\" + elif g('subst'): + extr.append(g('code')) + return "<<|@|>>" + return None + + line2 = reg_act.sub(repl, line) + params = line2.split('<<|@|>>') + assert(extr) + + + indent = 0 + buf = [] + app = buf.append + + def app(txt): + buf.append(indent * '\t' + txt) + + for x in range(len(extr)): + if params[x]: + app("lst.append(%r)" % params[x]) + + f = extr[x] + if f.startswith(('if', 'for')): + app(f + ':') + indent += 1 + elif f.startswith('py:'): + app(f[3:]) + elif f.startswith(('endif', 'endfor')): + indent -= 1 + elif f.startswith(('else', 'elif')): + indent -= 1 + app(f + ':') + indent += 1 + elif f.startswith('xml:'): + app('lst.append(xml_escape(%s))' % f[4:]) + else: + #app('lst.append((%s) or "cannot find %s")' % (f, f)) + app('lst.append(str(%s))' % f) + + if extr: + if params[-1]: + app("lst.append(%r)" % params[-1]) + + fun = COMPILE_TEMPLATE % "\n\t".join(buf) + # uncomment the following to debug the template + #for i, x in enumerate(fun.splitlines()): + # print i, x + return Task.funex(fun) + +# red #ff4d4d +# green #4da74d +# lila #a751ff + +color2code = { + 'GREEN' : '#4da74d', + 'YELLOW' : '#fefe44', + 'PINK' : '#a751ff', + 'RED' : '#cc1d1d', + 'BLUE' : '#6687bb', + 'CYAN' : '#34e2e2', +} + +mp = {} +info = [] # list of (text,color) + +def map_to_color(name): + if name in mp: + return mp[name] + try: + cls = Task.classes[name] + except KeyError: + return color2code['RED'] + if cls.color in mp: + return mp[cls.color] + if cls.color in color2code: + return color2code[cls.color] + return color2code['RED'] + +def process(self): + m = self.generator.bld.producer + try: + # TODO another place for this? + del self.generator.bld.task_sigs[self.uid()] + except KeyError: + pass + + self.generator.bld.producer.set_running(1, self) + + try: + ret = self.run() + except Exception: + self.err_msg = traceback.format_exc() + self.hasrun = Task.EXCEPTION + + # TODO cleanup + m.error_handler(self) + return + + if ret: + self.err_code = ret + self.hasrun = Task.CRASHED + else: + try: + self.post_run() + except Errors.WafError: + pass + except Exception: + self.err_msg = traceback.format_exc() + self.hasrun = Task.EXCEPTION + else: + self.hasrun = Task.SUCCESS + if self.hasrun != Task.SUCCESS: + m.error_handler(self) + + self.generator.bld.producer.set_running(-1, self) + +Task.Task.process_back = Task.Task.process +Task.Task.process = process + +old_start = Runner.Parallel.start +def do_start(self): + try: + Options.options.dband + except AttributeError: + self.bld.fatal('use def options(opt): opt.load("parallel_debug")!') + + self.taskinfo = Queue() + old_start(self) + if self.dirty: + make_picture(self) +Runner.Parallel.start = do_start + +lock_running = threading.Lock() +def set_running(self, by, tsk): + with lock_running: + try: + cache = self.lock_cache + except AttributeError: + cache = self.lock_cache = {} + + i = 0 + if by > 0: + vals = cache.values() + for i in range(self.numjobs): + if i not in vals: + cache[tsk] = i + break + else: + i = cache[tsk] + del cache[tsk] + + self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs))) ) +Runner.Parallel.set_running = set_running + +def name2class(name): + return name.replace(' ', '_').replace('.', '_') + +def make_picture(producer): + # first, cast the parameters + if not hasattr(producer.bld, 'path'): + return + + tmp = [] + try: + while True: + tup = producer.taskinfo.get(False) + tmp.append(list(tup)) + except: + pass + + try: + ini = float(tmp[0][2]) + except: + return + + if not info: + seen = [] + for x in tmp: + name = x[3] + if not name in seen: + seen.append(name) + else: + continue + + info.append((name, map_to_color(name))) + info.sort(key=lambda x: x[0]) + + thread_count = 0 + acc = [] + for x in tmp: + thread_count += x[6] + acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7])) + + data_node = producer.bld.path.make_node('pdebug.dat') + data_node.write('\n'.join(acc)) + + tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp] + + st = {} + for l in tmp: + if not l[0] in st: + st[l[0]] = len(st.keys()) + tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ] + THREAD_AMOUNT = len(st.keys()) + + st = {} + for l in tmp: + if not l[1] in st: + st[l[1]] = len(st.keys()) + tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ] + + + BAND = Options.options.dband + + seen = {} + acc = [] + for x in range(len(tmp)): + line = tmp[x] + id = line[1] + + if id in seen: + continue + seen[id] = True + + begin = line[2] + thread_id = line[0] + for y in range(x + 1, len(tmp)): + line = tmp[y] + if line[1] == id: + end = line[2] + #print id, thread_id, begin, end + #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) ) + acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) ) + break + + if Options.options.dmaxtime < 0.1: + gwidth = 1 + for x in tmp: + m = BAND * x[2] + if m > gwidth: + gwidth = m + else: + gwidth = BAND * Options.options.dmaxtime + + ratio = float(Options.options.dwidth) / gwidth + gwidth = Options.options.dwidth + gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5) + + + # simple data model for our template + class tobject(object): + pass + + model = tobject() + model.x = 0 + model.y = 0 + model.width = gwidth + 4 + model.height = gheight + 4 + + model.tooltip = not Options.options.dnotooltip + + model.title = Options.options.dtitle + model.title_x = gwidth / 2 + model.title_y = gheight + - 5 + + groups = {} + for (x, y, w, h, clsname, name) in acc: + try: + groups[clsname].append((x, y, w, h, name)) + except: + groups[clsname] = [(x, y, w, h, name)] + + # groups of rectangles (else js highlighting is slow) + model.groups = [] + for cls in groups: + g = tobject() + model.groups.append(g) + g.classname = name2class(cls) + g.rects = [] + for (x, y, w, h, name) in groups[cls]: + r = tobject() + g.rects.append(r) + r.x = 2 + x * ratio + r.y = 2 + y + r.width = w * ratio + r.height = h + r.name = name + r.color = map_to_color(cls) + + cnt = THREAD_AMOUNT + + # caption + model.infos = [] + for (text, color) in info: + inf = tobject() + model.infos.append(inf) + inf.classname = name2class(text) + inf.x = 2 + BAND + inf.y = 5 + (cnt + 0.5) * BAND + inf.width = BAND/2 + inf.height = BAND/2 + inf.color = color + + inf.text = text + inf.text_x = 2 + 2 * BAND + inf.text_y = 5 + (cnt + 0.5) * BAND + 10 + + cnt += 1 + + # write the file... + template1 = compile_template(SVG_TEMPLATE) + txt = template1(model) + + node = producer.bld.path.make_node('pdebug.svg') + node.write(txt) + Logs.warn('Created the diagram %r', node) + +def options(opt): + opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv), + help='title for the svg diagram', dest='dtitle') + opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth') + opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime') + opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband') + opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime') + opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/pch.py lilv-0.24.6/waflib/extras/pch.py --- lilv-0.24.4~dfsg0/waflib/extras/pch.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/pch.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,148 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Alexander Afanasyev (UCLA), 2014 + +""" +Enable precompiled C++ header support (currently only clang++ and g++ are supported) + +To use this tool, wscript should look like: + + def options(opt): + opt.load('pch') + # This will add `--with-pch` configure option. + # Unless --with-pch during configure stage specified, the precompiled header support is disabled + + def configure(conf): + conf.load('pch') + # this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used + # Unless conf.env.WITH_PCH is set, the precompiled header support is disabled + + def build(bld): + bld(features='cxx pch', + target='precompiled-headers', + name='precompiled-headers', + headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers` + + # Other parameters to compile precompiled headers + # includes=..., + # export_includes=..., + # use=..., + # ... + + # Exported parameters will be propagated even if precompiled headers are disabled + ) + + bld( + target='test', + features='cxx cxxprogram', + source='a.cpp b.cpp d.cpp main.cpp', + use='precompiled-headers', + ) + + # or + + bld( + target='test', + features='pch cxx cxxprogram', + source='a.cpp b.cpp d.cpp main.cpp', + headers='a.h b.h c.h', + ) + +Note that precompiled header must have multiple inclusion guards. If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases. +""" + +import os +from waflib import Task, TaskGen, Utils +from waflib.Tools import c_preproc, cxx + + +PCH_COMPILER_OPTIONS = { + 'clang++': [['-include'], '.pch', ['-x', 'c++-header']], + 'g++': [['-include'], '.gch', ['-x', 'c++-header']], +} + + +def options(opt): + opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''') + +def configure(conf): + if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()): + conf.env.WITH_PCH = True + flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']] + conf.env.CXXPCH_F = flags[0] + conf.env.CXXPCH_EXT = flags[1] + conf.env.CXXPCH_FLAGS = flags[2] + + +@TaskGen.feature('pch') +@TaskGen.before('process_source') +def apply_pch(self): + if not self.env.WITH_PCH: + return + + if getattr(self.bld, 'pch_tasks', None) is None: + self.bld.pch_tasks = {} + + if getattr(self, 'headers', None) is None: + return + + self.headers = self.to_nodes(self.headers) + + if getattr(self, 'name', None): + try: + task = self.bld.pch_tasks["%s.%s" % (self.name, self.idx)] + self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx)) + except KeyError: + pass + + out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT']) + out = self.path.find_or_declare(out) + task = self.create_task('gchx', self.headers, out) + + # target should be an absolute path of `out`, but without precompiled header extension + task.target = out.abspath()[:-len(out.suffix())] + + self.pch_task = task + if getattr(self, 'name', None): + self.bld.pch_tasks["%s.%s" % (self.name, self.idx)] = task + +@TaskGen.feature('cxx') +@TaskGen.after_method('process_source', 'propagate_uselib_vars') +def add_pch(self): + if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)): + return + + pch = None + # find pch task, if any + + if getattr(self, 'pch_task', None): + pch = self.pch_task + else: + for use in Utils.to_list(self.use): + try: + pch = self.bld.pch_tasks[use] + except KeyError: + pass + + if pch: + for x in self.compiled_tasks: + x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target]) + +class gchx(Task.Task): + run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' + scan = c_preproc.scan + color = 'BLUE' + ext_out=['.h'] + + def runnable_status(self): + try: + node_deps = self.generator.bld.node_deps[self.uid()] + except KeyError: + node_deps = [] + ret = Task.Task.runnable_status(self) + if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang': + t = os.stat(self.outputs[0].abspath()).st_mtime + for n in self.inputs + node_deps: + if os.stat(n.abspath()).st_mtime > t: + return Task.RUN_ME + return ret diff -Nru lilv-0.24.4~dfsg0/waflib/extras/pep8.py lilv-0.24.6/waflib/extras/pep8.py --- lilv-0.24.4~dfsg0/waflib/extras/pep8.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/pep8.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,106 @@ +#! /usr/bin/env python +# encoding: utf-8 +# +# written by Sylvain Rouquette, 2011 + +''' +Install pep8 module: +$ easy_install pep8 + or +$ pip install pep8 + +To add the pep8 tool to the waf file: +$ ./waf-light --tools=compat15,pep8 + or, if you have waf >= 1.6.2 +$ ./waf update --files=pep8 + + +Then add this to your wscript: + +[at]extension('.py', 'wscript') +def run_pep8(self, node): + self.create_task('Pep8', node) + +''' + +import threading +from waflib import Task, Options + +pep8 = __import__('pep8') + + +class Pep8(Task.Task): + color = 'PINK' + lock = threading.Lock() + + def check_options(self): + if pep8.options: + return + pep8.options = Options.options + pep8.options.prog = 'pep8' + excl = pep8.options.exclude.split(',') + pep8.options.exclude = [s.rstrip('/') for s in excl] + if pep8.options.filename: + pep8.options.filename = pep8.options.filename.split(',') + if pep8.options.select: + pep8.options.select = pep8.options.select.split(',') + else: + pep8.options.select = [] + if pep8.options.ignore: + pep8.options.ignore = pep8.options.ignore.split(',') + elif pep8.options.select: + # Ignore all checks which are not explicitly selected + pep8.options.ignore = [''] + elif pep8.options.testsuite or pep8.options.doctest: + # For doctest and testsuite, all checks are required + pep8.options.ignore = [] + else: + # The default choice: ignore controversial checks + pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',') + pep8.options.physical_checks = pep8.find_checks('physical_line') + pep8.options.logical_checks = pep8.find_checks('logical_line') + pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0) + pep8.options.messages = {} + + def run(self): + with Pep8.lock: + self.check_options() + pep8.input_file(self.inputs[0].abspath()) + return 0 if not pep8.get_count() else -1 + + +def options(opt): + opt.add_option('-q', '--quiet', default=0, action='count', + help="report only file names, or nothing with -qq") + opt.add_option('-r', '--repeat', action='store_true', + help="show all occurrences of the same error") + opt.add_option('--exclude', metavar='patterns', + default=pep8.DEFAULT_EXCLUDE, + help="exclude files or directories which match these " + "comma separated patterns (default: %s)" % + pep8.DEFAULT_EXCLUDE, + dest='exclude') + opt.add_option('--filename', metavar='patterns', default='*.py', + help="when parsing directories, only check filenames " + "matching these comma separated patterns (default: " + "*.py)") + opt.add_option('--select', metavar='errors', default='', + help="select errors and warnings (e.g. E,W6)") + opt.add_option('--ignore', metavar='errors', default='', + help="skip errors and warnings (e.g. E4,W)") + opt.add_option('--show-source', action='store_true', + help="show source code for each error") + opt.add_option('--show-pep8', action='store_true', + help="show text of PEP 8 for each error") + opt.add_option('--statistics', action='store_true', + help="count errors and warnings") + opt.add_option('--count', action='store_true', + help="print total number of errors and warnings " + "to standard error and set exit code to 1 if " + "total is not null") + opt.add_option('--benchmark', action='store_true', + help="measure processing speed") + opt.add_option('--testsuite', metavar='dir', + help="run regression tests from dir") + opt.add_option('--doctest', action='store_true', + help="run doctest on myself") diff -Nru lilv-0.24.4~dfsg0/waflib/extras/pgicc.py lilv-0.24.6/waflib/extras/pgicc.py --- lilv-0.24.4~dfsg0/waflib/extras/pgicc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/pgicc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Antoine Dechaume 2011 + +""" +Detect the PGI C compiler +""" + +import sys, re +from waflib import Errors +from waflib.Configure import conf +from waflib.Tools.compiler_c import c_compiler +c_compiler['linux'].append('pgicc') + +@conf +def find_pgi_compiler(conf, var, name): + """ + Find the program name, and execute it to ensure it really is itself. + """ + if sys.platform == 'cygwin': + conf.fatal('The PGI compiler does not work on Cygwin') + + v = conf.env + cc = None + if v[var]: + cc = v[var] + elif var in conf.environ: + cc = conf.environ[var] + if not cc: + cc = conf.find_program(name, var=var) + if not cc: + conf.fatal('PGI Compiler (%s) was not found' % name) + + v[var + '_VERSION'] = conf.get_pgi_version(cc) + v[var] = cc + v[var + '_NAME'] = 'pgi' + +@conf +def get_pgi_version(conf, cc): + """Find the version of a pgi compiler.""" + version_re = re.compile(r"The Portland Group", re.I).search + cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking + + try: + out, err = conf.cmd_and_log(cmd, output=0) + except Errors.WafError: + conf.fatal('Could not find pgi compiler %r' % cmd) + + if out: + match = version_re(out) + else: + match = version_re(err) + + if not match: + conf.fatal('Could not verify PGI signature') + + cmd = cc + ['-help=variable'] + try: + out, err = conf.cmd_and_log(cmd, output=0) + except Errors.WafError: + conf.fatal('Could not find pgi compiler %r' % cmd) + + version = re.findall(r'^COMPVER\s*=(.*)', out, re.M) + if len(version) != 1: + conf.fatal('Could not determine the compiler version') + return version[0] + +def configure(conf): + conf.find_pgi_compiler('CC', 'pgcc') + conf.find_ar() + conf.gcc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/pgicxx.py lilv-0.24.6/waflib/extras/pgicxx.py --- lilv-0.24.4~dfsg0/waflib/extras/pgicxx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/pgicxx.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Antoine Dechaume 2011 + +""" +Detect the PGI C++ compiler +""" + +from waflib.Tools.compiler_cxx import cxx_compiler +cxx_compiler['linux'].append('pgicxx') + +from waflib.extras import pgicc + +def configure(conf): + conf.find_pgi_compiler('CXX', 'pgCC') + conf.find_ar() + conf.gxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff -Nru lilv-0.24.4~dfsg0/waflib/extras/proc.py lilv-0.24.6/waflib/extras/proc.py --- lilv-0.24.4~dfsg0/waflib/extras/proc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/proc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# per rosengren 2011 + +from os import environ, path +from waflib import TaskGen, Utils + +def options(opt): + grp = opt.add_option_group('Oracle ProC Options') + grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)') + grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)') + grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server') + +def configure(cnf): + env = cnf.env + if not env.PROC_ORACLE: + env.PROC_ORACLE = cnf.options.oracle_home + if not env.PROC_TNS_ADMIN: + env.PROC_TNS_ADMIN = cnf.options.tns_admin + if not env.PROC_CONNECTION: + env.PROC_CONNECTION = cnf.options.connection + cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin') + +def proc(tsk): + env = tsk.env + gen = tsk.generator + inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES']) + + cmd = ( + [env.PROC] + + ['SQLCHECK=SEMANTICS'] + + (['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')'] + if env.PROC_INCLUDES else []) + + ['INCLUDE=(' + ','.join( + [i.bldpath() for i in inc_nodes] + ) + ')'] + + ['userid=' + env.PROC_CONNECTION] + + ['INAME=' + tsk.inputs[0].bldpath()] + + ['ONAME=' + tsk.outputs[0].bldpath()] + ) + exec_env = { + 'ORACLE_HOME': env.PROC_ORACLE, + 'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib', + } + if env.PROC_TNS_ADMIN: + exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN + return tsk.exec_command(cmd, env=exec_env) + +TaskGen.declare_chain( + name = 'proc', + rule = proc, + ext_in = '.pc', + ext_out = '.c', +) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/protoc.py lilv-0.24.6/waflib/extras/protoc.py --- lilv-0.24.4~dfsg0/waflib/extras/protoc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/protoc.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,224 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Philipp Bender, 2012 +# Matt Clarkson, 2012 + +import re, os +from waflib.Task import Task +from waflib.TaskGen import extension +from waflib import Errors, Context, Logs + +""" +A simple tool to integrate protocol buffers into your build system. + +Example for C++: + + def configure(conf): + conf.load('compiler_cxx cxx protoc') + + def build(bld): + bld( + features = 'cxx cxxprogram' + source = 'main.cpp file1.proto proto/file2.proto', + includes = '. proto', + target = 'executable') + +Example for Python: + + def configure(conf): + conf.load('python protoc') + + def build(bld): + bld( + features = 'py' + source = 'main.py file1.proto proto/file2.proto', + protoc_includes = 'proto') + +Example for both Python and C++ at same time: + + def configure(conf): + conf.load('cxx python protoc') + + def build(bld): + bld( + features = 'cxx py' + source = 'file1.proto proto/file2.proto', + protoc_includes = 'proto') # or includes + + +Example for Java: + + def options(opt): + opt.load('java') + + def configure(conf): + conf.load('python java protoc') + # Here you have to point to your protobuf-java JAR and have it in classpath + conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar'] + + def build(bld): + bld( + features = 'javac protoc', + name = 'pbjava', + srcdir = 'inc/ src', # directories used by javac + source = ['inc/message_inc.proto', 'inc/message.proto'], + # source is used by protoc for .proto files + use = 'PROTOBUF', + protoc_includes = ['inc']) # for protoc to search dependencies + + +Protoc includes passed via protoc_includes are either relative to the taskgen +or to the project and are searched in this order. + +Include directories external to the waf project can also be passed to the +extra by using protoc_extincludes + + protoc_extincludes = ['/usr/include/pblib'] + + +Notes when using this tool: + +- protoc command line parsing is tricky. + + The generated files can be put in subfolders which depend on + the order of the include paths. + + Try to be simple when creating task generators + containing protoc stuff. + +""" + +class protoc(Task): + run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${PROTOC_ST:PROTOC_EXTINCPATHS} ${SRC[0].bldpath()}' + color = 'BLUE' + ext_out = ['.h', 'pb.cc', '.py', '.java'] + def scan(self): + """ + Scan .proto dependencies + """ + node = self.inputs[0] + + nodes = [] + names = [] + seen = [] + search_nodes = [] + + if not node: + return (nodes, names) + + if 'cxx' in self.generator.features: + search_nodes = self.generator.includes_nodes + + if 'py' in self.generator.features or 'javac' in self.generator.features: + for incpath in getattr(self.generator, 'protoc_includes', []): + incpath_node = self.generator.path.find_node(incpath) + if incpath_node: + search_nodes.append(incpath_node) + else: + # Check if relative to top-level for extra tg dependencies + incpath_node = self.generator.bld.path.find_node(incpath) + if incpath_node: + search_nodes.append(incpath_node) + else: + raise Errors.WafError('protoc: include path %r does not exist' % incpath) + + + def parse_node(node): + if node in seen: + return + seen.append(node) + code = node.read().splitlines() + for line in code: + m = re.search(r'^import\s+"(.*)";.*(//)?.*', line) + if m: + dep = m.groups()[0] + for incnode in search_nodes: + found = incnode.find_resource(dep) + if found: + nodes.append(found) + parse_node(found) + else: + names.append(dep) + + parse_node(node) + # Add also dependencies path to INCPATHS so protoc will find the included file + for deppath in nodes: + self.env.append_unique('INCPATHS', deppath.parent.bldpath()) + return (nodes, names) + +@extension('.proto') +def process_protoc(self, node): + incdirs = [] + out_nodes = [] + protoc_flags = [] + + # ensure PROTOC_FLAGS is a list; a copy is used below anyway + self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS) + + if 'cxx' in self.features: + cpp_node = node.change_ext('.pb.cc') + hpp_node = node.change_ext('.pb.h') + self.source.append(cpp_node) + out_nodes.append(cpp_node) + out_nodes.append(hpp_node) + protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath()) + + if 'py' in self.features: + py_node = node.change_ext('_pb2.py') + self.source.append(py_node) + out_nodes.append(py_node) + protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath()) + + if 'javac' in self.features: + # Make javac get also pick java code generated in build + if not node.parent.get_bld() in self.javac_task.srcdir: + self.javac_task.srcdir.append(node.parent.get_bld()) + + protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath()) + node.parent.get_bld().mkdir() + + tsk = self.create_task('protoc', node, out_nodes) + tsk.env.append_value('PROTOC_FLAGS', protoc_flags) + + if 'javac' in self.features: + self.javac_task.set_run_after(tsk) + + # Instruct protoc where to search for .proto included files. + # For C++ standard include files dirs are used, + # but this doesn't apply to Python for example + for incpath in getattr(self, 'protoc_includes', []): + incpath_node = self.path.find_node(incpath) + if incpath_node: + incdirs.append(incpath_node.bldpath()) + else: + # Check if relative to top-level for extra tg dependencies + incpath_node = self.bld.path.find_node(incpath) + if incpath_node: + incdirs.append(incpath_node.bldpath()) + else: + raise Errors.WafError('protoc: include path %r does not exist' % incpath) + + tsk.env.PROTOC_INCPATHS = incdirs + + # Include paths external to the waf project (ie. shared pb repositories) + tsk.env.PROTOC_EXTINCPATHS = getattr(self, 'protoc_extincludes', []) + + # PR2115: protoc generates output of .proto files in nested + # directories by canonicalizing paths. To avoid this we have to pass + # as first include the full directory file of the .proto file + tsk.env.prepend_value('INCPATHS', node.parent.bldpath()) + + use = getattr(self, 'use', '') + if not 'PROTOBUF' in use: + self.use = self.to_list(use) + ['PROTOBUF'] + +def configure(conf): + conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs']) + conf.find_program('protoc', var='PROTOC') + conf.start_msg('Checking for protoc version') + protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH) + protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:] + conf.end_msg(protocver) + conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')] + conf.env.PROTOC_ST = '-I%s' + conf.env.PROTOC_FL = '%s' diff -Nru lilv-0.24.4~dfsg0/waflib/extras/pyqt5.py lilv-0.24.6/waflib/extras/pyqt5.py --- lilv-0.24.4~dfsg0/waflib/extras/pyqt5.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/pyqt5.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin, 2016-2019 (fedepell) adapted for Python + +""" +This tool helps with finding Python Qt5 tools and libraries, +and provides translation from QT5 files to Python code. + +The following snippet illustrates the tool usage:: + + def options(opt): + opt.load('py pyqt5') + + def configure(conf): + conf.load('py pyqt5') + + def build(bld): + bld( + features = 'py pyqt5', + source = 'main.py textures.qrc aboutDialog.ui', + ) + +Here, the UI description and resource files will be processed +to generate code. + +Usage +===== + +Load the "pyqt5" tool. + +Add into the sources list also the qrc resources files or ui5 +definition files and they will be translated into python code +with the system tools (PyQt5, PySide2, PyQt4 are searched in this +order) and then compiled +""" + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml = False + ContentHandler = object +else: + has_xml = True + +import os +from waflib.Tools import python +from waflib import Task, Options +from waflib.TaskGen import feature, extension +from waflib.Configure import conf +from waflib import Logs + +EXT_RCC = ['.qrc'] +""" +File extension for the resource (.qrc) files +""" + +EXT_UI = ['.ui'] +""" +File extension for the user interface (.ui) files +""" + + +class XMLHandler(ContentHandler): + """ + Parses ``.qrc`` files + """ + def __init__(self): + self.buf = [] + self.files = [] + def startElement(self, name, attrs): + if name == 'file': + self.buf = [] + def endElement(self, name): + if name == 'file': + self.files.append(str(''.join(self.buf))) + def characters(self, cars): + self.buf.append(cars) + +@extension(*EXT_RCC) +def create_pyrcc_task(self, node): + "Creates rcc and py task for ``.qrc`` files" + rcnode = node.change_ext('.py') + self.create_task('pyrcc', node, rcnode) + if getattr(self, 'install_from', None): + self.install_from = self.install_from.get_bld() + else: + self.install_from = self.path.get_bld() + self.install_path = getattr(self, 'install_path', '${PYTHONDIR}') + self.process_py(rcnode) + +@extension(*EXT_UI) +def create_pyuic_task(self, node): + "Create uic tasks and py for user interface ``.ui`` definition files" + uinode = node.change_ext('.py') + self.create_task('ui5py', node, uinode) + if getattr(self, 'install_from', None): + self.install_from = self.install_from.get_bld() + else: + self.install_from = self.path.get_bld() + self.install_path = getattr(self, 'install_path', '${PYTHONDIR}') + self.process_py(uinode) + +@extension('.ts') +def add_pylang(self, node): + """Adds all the .ts file into ``self.lang``""" + self.lang = self.to_list(getattr(self, 'lang', [])) + [node] + +@feature('pyqt5') +def apply_pyqt5(self): + """ + The additional parameters are: + + :param lang: list of translation files (\\*.ts) to process + :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension + :param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file + :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension + """ + if getattr(self, 'lang', None): + qmtasks = [] + for x in self.to_list(self.lang): + if isinstance(x, str): + x = self.path.find_resource(x + '.ts') + qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm'))) + + + if getattr(self, 'langname', None): + qmnodes = [k.outputs[0] for k in qmtasks] + rcnode = self.langname + if isinstance(rcnode, str): + rcnode = self.path.find_or_declare(rcnode + '.qrc') + t = self.create_task('qm2rcc', qmnodes, rcnode) + create_pyrcc_task(self, t.outputs[0]) + +class pyrcc(Task.Task): + """ + Processes ``.qrc`` files + """ + color = 'BLUE' + run_str = '${QT_PYRCC} ${SRC} -o ${TGT}' + ext_out = ['.py'] + + def rcname(self): + return os.path.splitext(self.inputs[0].name)[0] + + def scan(self): + """Parse the *.qrc* files""" + if not has_xml: + Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') + return ([], []) + + parser = make_parser() + curHandler = XMLHandler() + parser.setContentHandler(curHandler) + fi = open(self.inputs[0].abspath(), 'r') + try: + parser.parse(fi) + finally: + fi.close() + + nodes = [] + names = [] + root = self.inputs[0].parent + for x in curHandler.files: + nd = root.find_resource(x) + if nd: + nodes.append(nd) + else: + names.append(x) + return (nodes, names) + + +class ui5py(Task.Task): + """ + Processes ``.ui`` files for python + """ + color = 'BLUE' + run_str = '${QT_PYUIC} ${SRC} -o ${TGT}' + ext_out = ['.py'] + +class ts2qm(Task.Task): + """ + Generates ``.qm`` files from ``.ts`` files + """ + color = 'BLUE' + run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' + +class qm2rcc(Task.Task): + """ + Generates ``.qrc`` files from ``.qm`` files + """ + color = 'BLUE' + after = 'ts2qm' + def run(self): + """Create a qrc file including the inputs""" + txt = '\n'.join(['%s' % k.path_from(self.outputs[0].parent) for k in self.inputs]) + code = '\n\n%s\n\n' % txt + self.outputs[0].write(code) + +def configure(self): + self.find_pyqt5_binaries() + + # warn about this during the configuration too + if not has_xml: + Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') + +@conf +def find_pyqt5_binaries(self): + """ + Detects PyQt5 or PySide2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc + """ + env = self.env + + if getattr(Options.options, 'want_pyqt5', True): + self.find_program(['pyuic5'], var='QT_PYUIC') + self.find_program(['pyrcc5'], var='QT_PYRCC') + self.find_program(['pylupdate5'], var='QT_PYLUPDATE') + elif getattr(Options.options, 'want_pyside2', True): + self.find_program(['pyside2-uic'], var='QT_PYUIC') + self.find_program(['pyside2-rcc'], var='QT_PYRCC') + self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE') + elif getattr(Options.options, 'want_pyqt4', True): + self.find_program(['pyuic4'], var='QT_PYUIC') + self.find_program(['pyrcc4'], var='QT_PYRCC') + self.find_program(['pylupdate4'], var='QT_PYLUPDATE') + else: + self.find_program(['pyuic5','pyside2-uic','pyuic4'], var='QT_PYUIC') + self.find_program(['pyrcc5','pyside2-rcc','pyrcc4'], var='QT_PYRCC') + self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4'], var='QT_PYLUPDATE') + + if not env.QT_PYUIC: + self.fatal('cannot find the uic compiler for python for qt5') + + if not env.QT_PYRCC: + self.fatal('cannot find the rcc compiler for python for qt5') + + self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE') + +def options(opt): + """ + Command-line options + """ + pyqt5opt=opt.add_option_group("Python QT5 Options") + pyqt5opt.add_option('--pyqt5-pyqt5', action='store_true', default=False, dest='want_pyqt5', help='use PyQt5 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') + pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use PySide2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') + pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') diff -Nru lilv-0.24.4~dfsg0/waflib/extras/pytest.py lilv-0.24.6/waflib/extras/pytest.py --- lilv-0.24.4~dfsg0/waflib/extras/pytest.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/pytest.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,225 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2016-2018 (xbreak) + +""" +Provides Python unit test support using :py:class:`waflib.Tools.waf_unit_test.utest` +task via the **pytest** feature. + +To use pytest the following is needed: + +1. Load `pytest` and the dependency `waf_unit_test` tools. +2. Create a task generator with feature `pytest` (not `test`) and customize behaviour with + the following attributes: + + - `pytest_source`: Test input files. + - `ut_str`: Test runner command, e.g. ``${PYTHON} -B -m unittest discover`` or + if nose is used: ``${NOSETESTS} --no-byte-compile ${SRC}``. + - `ut_shell`: Determines if ``ut_str`` is executed in a shell. Default: False. + - `ut_cwd`: Working directory for test runner. Defaults to directory of + first ``pytest_source`` file. + + Additionally the following `pytest` specific attributes are used in dependent taskgens: + + - `pytest_path`: Node or string list of additional Python paths. + - `pytest_libpath`: Node or string list of additional library paths. + +The `use` dependencies are used for both update calculation and to populate +the following environment variables for the `pytest` test runner: + +1. `PYTHONPATH` (`sys.path`) of any dependent taskgen that has the feature `py`: + + - `install_from` attribute is used to determine where the root of the Python sources + are located. If `install_from` is not specified the default is to use the taskgen path + as the root. + + - `pytest_path` attribute is used to manually specify additional Python paths. + +2. Dynamic linker search path variable (e.g. `LD_LIBRARY_PATH`) of any dependent taskgen with + non-static link_task. + + - `pytest_libpath` attribute is used to manually specify additional linker paths. + +Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens + because the extension might be part of a Python package or used standalone: + + - When used as part of another `py` package, the `PYTHONPATH` is provided by + that taskgen so no additional action is required. + + - When used as a standalone module, the user needs to specify the `PYTHONPATH` explicitly + via the `pytest_path` attribute on the `pyext` taskgen. + + For details c.f. the pytest playground examples. + + +For example:: + + # A standalone Python C extension that demonstrates unit test environment population + # of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH. + # + # Note: `pytest_path` is provided here because pytest cannot automatically determine + # if the extension is part of another Python package or is used standalone. + bld(name = 'foo_ext', + features = 'c cshlib pyext', + source = 'src/foo_ext.c', + target = 'foo_ext', + pytest_path = [ bld.path.get_bld() ]) + + # Python package under test that also depend on the Python module `foo_ext` + # + # Note: `install_from` is added automatically to `PYTHONPATH`. + bld(name = 'foo', + features = 'py', + use = 'foo_ext', + source = bld.path.ant_glob('src/foo/*.py'), + install_from = 'src') + + # Unit test example using the built in module unittest and let that discover + # any test cases. + bld(name = 'foo_test', + features = 'pytest', + use = 'foo', + pytest_source = bld.path.ant_glob('test/*.py'), + ut_str = '${PYTHON} -B -m unittest discover') + +""" + +import os +from waflib import Task, TaskGen, Errors, Utils, Logs +from waflib.Tools import ccroot + +def _process_use_rec(self, name): + """ + Recursively process ``use`` for task generator with name ``name``.. + Used by pytest_process_use. + """ + if name in self.pytest_use_not or name in self.pytest_use_seen: + return + try: + tg = self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.pytest_use_not.add(name) + return + + self.pytest_use_seen.append(name) + tg.post() + + for n in self.to_list(getattr(tg, 'use', [])): + _process_use_rec(self, n) + + +@TaskGen.feature('pytest') +@TaskGen.after_method('process_source', 'apply_link') +def pytest_process_use(self): + """ + Process the ``use`` attribute which contains a list of task generator names and store + paths that later is used to populate the unit test runtime environment. + """ + self.pytest_use_not = set() + self.pytest_use_seen = [] + self.pytest_paths = [] # strings or Nodes + self.pytest_libpaths = [] # strings or Nodes + self.pytest_dep_nodes = [] + + names = self.to_list(getattr(self, 'use', [])) + for name in names: + _process_use_rec(self, name) + + def extend_unique(lst, varlst): + ext = [] + for x in varlst: + if x not in lst: + ext.append(x) + lst.extend(ext) + + # Collect type specific info needed to construct a valid runtime environment + # for the test. + for name in self.pytest_use_seen: + tg = self.bld.get_tgen_by_name(name) + + extend_unique(self.pytest_paths, Utils.to_list(getattr(tg, 'pytest_path', []))) + extend_unique(self.pytest_libpaths, Utils.to_list(getattr(tg, 'pytest_libpath', []))) + + if 'py' in tg.features: + # Python dependencies are added to PYTHONPATH + pypath = getattr(tg, 'install_from', tg.path) + + if 'buildcopy' in tg.features: + # Since buildcopy is used we assume that PYTHONPATH in build should be used, + # not source + extend_unique(self.pytest_paths, [pypath.get_bld().abspath()]) + + # Add buildcopy output nodes to dependencies + extend_unique(self.pytest_dep_nodes, [o for task in getattr(tg, 'tasks', []) \ + for o in getattr(task, 'outputs', [])]) + else: + # If buildcopy is not used, depend on sources instead + extend_unique(self.pytest_dep_nodes, tg.source) + extend_unique(self.pytest_paths, [pypath.abspath()]) + + if getattr(tg, 'link_task', None): + # For tasks with a link_task (C, C++, D et.c.) include their library paths: + if not isinstance(tg.link_task, ccroot.stlink_task): + extend_unique(self.pytest_dep_nodes, tg.link_task.outputs) + extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH) + + if 'pyext' in tg.features: + # If the taskgen is extending Python we also want to add the interpreter libpath. + extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH_PYEXT) + else: + # Only add to libpath if the link task is not a Python extension + extend_unique(self.pytest_libpaths, [tg.link_task.outputs[0].parent.abspath()]) + + +@TaskGen.feature('pytest') +@TaskGen.after_method('pytest_process_use') +def make_pytest(self): + """ + Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``: + + - Paths in `pytest_paths` attribute are used to populate PYTHONPATH + - Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH) + """ + nodes = self.to_nodes(self.pytest_source) + tsk = self.create_task('utest', nodes) + + tsk.dep_nodes.extend(self.pytest_dep_nodes) + if getattr(self, 'ut_str', None): + self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False)) + tsk.vars = lst + tsk.vars + + if getattr(self, 'ut_cwd', None): + if isinstance(self.ut_cwd, str): + # we want a Node instance + if os.path.isabs(self.ut_cwd): + self.ut_cwd = self.bld.root.make_node(self.ut_cwd) + else: + self.ut_cwd = self.path.make_node(self.ut_cwd) + else: + if tsk.inputs: + self.ut_cwd = tsk.inputs[0].parent + else: + raise Errors.WafError("no valid input files for pytest task, check pytest_source value") + + if not self.ut_cwd.exists(): + self.ut_cwd.mkdir() + + if not hasattr(self, 'ut_env'): + self.ut_env = dict(os.environ) + def add_paths(var, lst): + # Add list of paths to a variable, lst can contain strings or nodes + lst = [ str(n) for n in lst ] + Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst) + self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '') + + # Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH + add_paths('PYTHONPATH', self.pytest_paths) + + if Utils.is_win32: + add_paths('PATH', self.pytest_libpaths) + elif Utils.unversioned_sys_platform() == 'darwin': + add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths) + add_paths('LD_LIBRARY_PATH', self.pytest_libpaths) + else: + add_paths('LD_LIBRARY_PATH', self.pytest_libpaths) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/qnxnto.py lilv-0.24.6/waflib/extras/qnxnto.py --- lilv-0.24.4~dfsg0/waflib/extras/qnxnto.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/qnxnto.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Jérôme Carretero 2011 (zougloub) +# QNX neutrino compatibility functions + +import sys, os +from waflib import Utils + +class Popen(object): + """ + Popen cannot work on QNX from a threaded program: + Forking in threads is not implemented in neutrino. + + Python's os.popen / spawn / fork won't work when running in threads (they will if in the main program thread) + + In waf, this happens mostly in build. + And the use cases can be replaced by os.system() calls. + """ + __slots__ = ["prog", "kw", "popen", "verbose"] + verbose = 0 + def __init__(self, prog, **kw): + try: + self.prog = prog + self.kw = kw + self.popen = None + if Popen.verbose: + sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw)) + + do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1 + if do_delegate: + if Popen.verbose: + print("Delegating to real Popen") + self.popen = self.real_Popen(prog, **kw) + else: + if Popen.verbose: + print("Emulating") + except Exception as e: + if Popen.verbose: + print("Exception: %s" % e) + raise + + def __getattr__(self, name): + if Popen.verbose: + sys.stdout.write("Getattr: %s..." % name) + if name in Popen.__slots__: + return object.__getattribute__(self, name) + else: + if self.popen is not None: + if Popen.verbose: + print("from Popen") + return getattr(self.popen, name) + else: + if name == "wait": + return self.emu_wait + else: + raise Exception("subprocess emulation: not implemented: %s" % name) + + def emu_wait(self): + if Popen.verbose: + print("emulated wait (%r kw=%r)" % (self.prog, self.kw)) + if isinstance(self.prog, str): + cmd = self.prog + else: + cmd = " ".join(self.prog) + if 'cwd' in self.kw: + cmd = 'cd "%s" && %s' % (self.kw['cwd'], cmd) + return os.system(cmd) + +if sys.platform == "qnx6": + Popen.real_Popen = Utils.subprocess.Popen + Utils.subprocess.Popen = Popen + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/qt4.py lilv-0.24.6/waflib/extras/qt4.py --- lilv-0.24.4~dfsg0/waflib/extras/qt4.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/qt4.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,695 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2010 (ita) + +""" + +Tool Description +================ + +This tool helps with finding Qt4 tools and libraries, +and also provides syntactic sugar for using Qt4 tools. + +The following snippet illustrates the tool usage:: + + def options(opt): + opt.load('compiler_cxx qt4') + + def configure(conf): + conf.load('compiler_cxx qt4') + + def build(bld): + bld( + features = 'qt4 cxx cxxprogram', + uselib = 'QTCORE QTGUI QTOPENGL QTSVG', + source = 'main.cpp textures.qrc aboutDialog.ui', + target = 'window', + ) + +Here, the UI description and resource files will be processed +to generate code. + +Usage +===== + +Load the "qt4" tool. + +You also need to edit your sources accordingly: + +- the normal way of doing things is to have your C++ files + include the .moc file. + This is regarded as the best practice (and provides much faster + compilations). + It also implies that the include paths have beenset properly. + +- to have the include paths added automatically, use the following:: + + from waflib.TaskGen import feature, before_method, after_method + @feature('cxx') + @after_method('process_source') + @before_method('apply_incpaths') + def add_includes_paths(self): + incs = set(self.to_list(getattr(self, 'includes', ''))) + for x in self.compiled_tasks: + incs.add(x.inputs[0].parent.path_from(self.path)) + self.includes = sorted(incs) + +Note: another tool provides Qt processing that does not require +.moc includes, see 'playground/slow_qt/'. + +A few options (--qt{dir,bin,...}) and environment variables +(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool, +tool path selection, etc; please read the source for more info. + +""" + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml = False + ContentHandler = object +else: + has_xml = True + +import os, sys +from waflib.Tools import cxx +from waflib import Task, Utils, Options, Errors, Context +from waflib.TaskGen import feature, after_method, extension +from waflib.Configure import conf +from waflib import Logs + +MOC_H = ['.h', '.hpp', '.hxx', '.hh'] +""" +File extensions associated to the .moc files +""" + +EXT_RCC = ['.qrc'] +""" +File extension for the resource (.qrc) files +""" + +EXT_UI = ['.ui'] +""" +File extension for the user interface (.ui) files +""" + +EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C'] +""" +File extensions of C++ files that may require a .moc processing +""" + +QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner" + +class qxx(Task.classes['cxx']): + """ + Each C++ file can have zero or several .moc files to create. + They are known only when the files are scanned (preprocessor) + To avoid scanning the c++ files each time (parsing C/C++), the results + are retrieved from the task cache (bld.node_deps/bld.raw_deps). + The moc tasks are also created *dynamically* during the build. + """ + + def __init__(self, *k, **kw): + Task.Task.__init__(self, *k, **kw) + self.moc_done = 0 + + def runnable_status(self): + """ + Compute the task signature to make sure the scanner was executed. Create the + moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary), + then postpone the task execution (there is no need to recompute the task signature). + """ + if self.moc_done: + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + self.add_moc_tasks() + return Task.Task.runnable_status(self) + + def create_moc_task(self, h_node, m_node): + """ + If several libraries use the same classes, it is possible that moc will run several times (Issue 1318) + It is not possible to change the file names, but we can assume that the moc transformation will be identical, + and the moc tasks can be shared in a global cache. + + The defines passed to moc will then depend on task generator order. If this is not acceptable, then + use the tool slow_qt4 instead (and enjoy the slow builds... :-( ) + """ + try: + moc_cache = self.generator.bld.moc_cache + except AttributeError: + moc_cache = self.generator.bld.moc_cache = {} + + try: + return moc_cache[h_node] + except KeyError: + tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator) + tsk.set_inputs(h_node) + tsk.set_outputs(m_node) + + if self.generator: + self.generator.tasks.append(tsk) + + # direct injection in the build phase (safe because called from the main thread) + gen = self.generator.bld.producer + gen.outstanding.append(tsk) + gen.total += 1 + + return tsk + + def moc_h_ext(self): + ext = [] + try: + ext = Options.options.qt_header_ext.split() + except AttributeError: + pass + if not ext: + ext = MOC_H + return ext + + def add_moc_tasks(self): + """ + Create the moc tasks by looking in ``bld.raw_deps[self.uid()]`` + """ + node = self.inputs[0] + bld = self.generator.bld + + try: + # compute the signature once to know if there is a moc file to create + self.signature() + except KeyError: + # the moc file may be referenced somewhere else + pass + else: + # remove the signature, it must be recomputed with the moc task + delattr(self, 'cache_sig') + + include_nodes = [node.parent] + self.generator.includes_nodes + + moctasks = [] + mocfiles = set() + for d in bld.raw_deps.get(self.uid(), []): + if not d.endswith('.moc'): + continue + + # process that base.moc only once + if d in mocfiles: + continue + mocfiles.add(d) + + # find the source associated with the moc file + h_node = None + + base2 = d[:-4] + for x in include_nodes: + for e in self.moc_h_ext(): + h_node = x.find_node(base2 + e) + if h_node: + break + if h_node: + m_node = h_node.change_ext('.moc') + break + else: + # foo.cpp -> foo.cpp.moc + for k in EXT_QT4: + if base2.endswith(k): + for x in include_nodes: + h_node = x.find_node(base2) + if h_node: + break + if h_node: + m_node = h_node.change_ext(k + '.moc') + break + + if not h_node: + raise Errors.WafError('No source found for %r which is a moc file' % d) + + # create the moc task + task = self.create_moc_task(h_node, m_node) + moctasks.append(task) + + # simple scheduler dependency: run the moc task before others + self.run_after.update(set(moctasks)) + self.moc_done = 1 + +class trans_update(Task.Task): + """Update a .ts files from a list of C++ files""" + run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}' + color = 'BLUE' + +class XMLHandler(ContentHandler): + """ + Parser for *.qrc* files + """ + def __init__(self): + self.buf = [] + self.files = [] + def startElement(self, name, attrs): + if name == 'file': + self.buf = [] + def endElement(self, name): + if name == 'file': + self.files.append(str(''.join(self.buf))) + def characters(self, cars): + self.buf.append(cars) + +@extension(*EXT_RCC) +def create_rcc_task(self, node): + "Create rcc and cxx tasks for *.qrc* files" + rcnode = node.change_ext('_rc.cpp') + self.create_task('rcc', node, rcnode) + cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o')) + try: + self.compiled_tasks.append(cpptask) + except AttributeError: + self.compiled_tasks = [cpptask] + return cpptask + +@extension(*EXT_UI) +def create_uic_task(self, node): + "hook for uic tasks" + uictask = self.create_task('ui4', node) + uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])] + +@extension('.ts') +def add_lang(self, node): + """add all the .ts file into self.lang""" + self.lang = self.to_list(getattr(self, 'lang', [])) + [node] + +@feature('qt4') +@after_method('apply_link') +def apply_qt4(self): + """ + Add MOC_FLAGS which may be necessary for moc:: + + def build(bld): + bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE') + + The additional parameters are: + + :param lang: list of translation files (\\*.ts) to process + :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension + :param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**) + :type update: bool + :param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file + :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension + """ + if getattr(self, 'lang', None): + qmtasks = [] + for x in self.to_list(self.lang): + if isinstance(x, str): + x = self.path.find_resource(x + '.ts') + qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm'))) + + if getattr(self, 'update', None) and Options.options.trans_qt4: + cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [ + a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')] + for x in qmtasks: + self.create_task('trans_update', cxxnodes, x.inputs) + + if getattr(self, 'langname', None): + qmnodes = [x.outputs[0] for x in qmtasks] + rcnode = self.langname + if isinstance(rcnode, str): + rcnode = self.path.find_or_declare(rcnode + '.qrc') + t = self.create_task('qm2rcc', qmnodes, rcnode) + k = create_rcc_task(self, t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + + lst = [] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag) < 2: + continue + f = flag[0:2] + if f in ('-D', '-I', '/D', '/I'): + if (f[0] == '/'): + lst.append('-' + flag[1:]) + else: + lst.append(flag) + self.env.append_value('MOC_FLAGS', lst) + +@extension(*EXT_QT4) +def cxx_hook(self, node): + """ + Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task. + """ + return self.create_compiled_task('qxx', node) + +class rcc(Task.Task): + """ + Process *.qrc* files + """ + color = 'BLUE' + run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out = ['.h'] + + def rcname(self): + return os.path.splitext(self.inputs[0].name)[0] + + def scan(self): + """Parse the *.qrc* files""" + if not has_xml: + Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + return ([], []) + + parser = make_parser() + curHandler = XMLHandler() + parser.setContentHandler(curHandler) + fi = open(self.inputs[0].abspath(), 'r') + try: + parser.parse(fi) + finally: + fi.close() + + nodes = [] + names = [] + root = self.inputs[0].parent + for x in curHandler.files: + nd = root.find_resource(x) + if nd: + nodes.append(nd) + else: + names.append(x) + return (nodes, names) + +class moc(Task.Task): + """ + Create *.moc* files + """ + color = 'BLUE' + run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' + def keyword(self): + return "Creating" + def __str__(self): + return self.outputs[0].path_from(self.generator.bld.launch_node()) + +class ui4(Task.Task): + """ + Process *.ui* files + """ + color = 'BLUE' + run_str = '${QT_UIC} ${SRC} -o ${TGT}' + ext_out = ['.h'] + +class ts2qm(Task.Task): + """ + Create *.qm* files from *.ts* files + """ + color = 'BLUE' + run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' + +class qm2rcc(Task.Task): + """ + Transform *.qm* files into *.rc* files + """ + color = 'BLUE' + after = 'ts2qm' + + def run(self): + """Create a qrc file including the inputs""" + txt = '\n'.join(['%s' % k.path_from(self.outputs[0].parent) for k in self.inputs]) + code = '\n\n%s\n\n' % txt + self.outputs[0].write(code) + +def configure(self): + """ + Besides the configuration options, the environment variable QT4_ROOT may be used + to give the location of the qt4 libraries (absolute path). + + The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg` + """ + self.find_qt4_binaries() + self.set_qt4_libs_to_check() + self.set_qt4_defines() + self.find_qt4_libraries() + self.add_qt4_rpath() + self.simplify_qt4_libs() + +@conf +def find_qt4_binaries(self): + env = self.env + opt = Options.options + + qtdir = getattr(opt, 'qtdir', '') + qtbin = getattr(opt, 'qtbin', '') + + paths = [] + + if qtdir: + qtbin = os.path.join(qtdir, 'bin') + + # the qt directory has been given from QT4_ROOT - deduce the qt binary path + if not qtdir: + qtdir = os.environ.get('QT4_ROOT', '') + qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin') + + if qtbin: + paths = [qtbin] + + # no qtdir, look in the path and in /usr/local/Trolltech + if not qtdir: + paths = os.environ.get('PATH', '').split(os.pathsep) + paths.append('/usr/share/qt4/bin/') + try: + lst = Utils.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + + # keep the highest version + qtdir = '/usr/local/Trolltech/%s/' % lst[0] + qtbin = os.path.join(qtdir, 'bin') + paths.append(qtbin) + + # at the end, try to find qmake in the paths given + # keep the one with the highest version + cand = None + prev_ver = ['4', '0', '0'] + for qmk in ('qmake-qt4', 'qmake4', 'qmake'): + try: + qmake = self.find_program(qmk, path_list=paths) + except self.errors.ConfigurationError: + pass + else: + try: + version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip() + except self.errors.WafError: + pass + else: + if version: + new_ver = version.split('.') + if new_ver > prev_ver: + cand = qmake + prev_ver = new_ver + if cand: + self.env.QMAKE = cand + else: + self.fatal('Could not find qmake for qt4') + + qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep + + def find_bin(lst, var): + if var in env: + return + for f in lst: + try: + ret = self.find_program(f, path_list=paths) + except self.errors.ConfigurationError: + pass + else: + env[var]=ret + break + + find_bin(['uic-qt3', 'uic3'], 'QT_UIC3') + find_bin(['uic-qt4', 'uic'], 'QT_UIC') + if not env.QT_UIC: + self.fatal('cannot find the uic compiler for qt4') + + self.start_msg('Checking for uic version') + uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH) + uicver = ''.join(uicver).strip() + uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '') + self.end_msg(uicver) + if uicver.find(' 3.') != -1: + self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') + + find_bin(['moc-qt4', 'moc'], 'QT_MOC') + find_bin(['rcc-qt4', 'rcc'], 'QT_RCC') + find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE') + find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE') + + env['UIC3_ST']= '%s -o %s' + env['UIC_ST'] = '%s -o %s' + env['MOC_ST'] = '-o' + env['ui_PATTERN'] = 'ui_%s.h' + env['QT_LRELEASE_FLAGS'] = ['-silent'] + env.MOCCPPPATH_ST = '-I%s' + env.MOCDEFINES_ST = '-D%s' + +@conf +def find_qt4_libraries(self): + qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR") + if not qtlibs: + try: + qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip() + except Errors.WafError: + qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep + qtlibs = os.path.join(qtdir, 'lib') + self.msg('Found the Qt4 libraries in', qtlibs) + + qtincludes = os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() + env = self.env + if not 'PKG_CONFIG_PATH' in os.environ: + os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs) + + try: + if os.environ.get("QT4_XCOMPILE"): + raise self.errors.ConfigurationError() + self.check_cfg(atleast_pkgconfig_version='0.1') + except self.errors.ConfigurationError: + for i in self.qt4_vars: + uselib = i.upper() + if Utils.unversioned_sys_platform() == "darwin": + # Since at least qt 4.7.3 each library locates in separate directory + frameworkName = i + ".framework" + qtDynamicLib = os.path.join(qtlibs, frameworkName, i) + if os.path.exists(qtDynamicLib): + env.append_unique('FRAMEWORK_' + uselib, i) + self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') + else: + self.msg('Checking for %s' % i, False, 'YELLOW') + env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers')) + elif env.DEST_OS != "win32": + qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so") + qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a") + if os.path.exists(qtDynamicLib): + env.append_unique('LIB_' + uselib, i) + self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') + elif os.path.exists(qtStaticLib): + env.append_unique('LIB_' + uselib, i) + self.msg('Checking for %s' % i, qtStaticLib, 'GREEN') + else: + self.msg('Checking for %s' % i, False, 'YELLOW') + + env.append_unique('LIBPATH_' + uselib, qtlibs) + env.append_unique('INCLUDES_' + uselib, qtincludes) + env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) + else: + # Release library names are like QtCore4 + for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"): + lib = os.path.join(qtlibs, k % i) + if os.path.exists(lib): + env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) + self.msg('Checking for %s' % i, lib, 'GREEN') + break + else: + self.msg('Checking for %s' % i, False, 'YELLOW') + + env.append_unique('LIBPATH_' + uselib, qtlibs) + env.append_unique('INCLUDES_' + uselib, qtincludes) + env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) + + # Debug library names are like QtCore4d + uselib = i.upper() + "_debug" + for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"): + lib = os.path.join(qtlibs, k % i) + if os.path.exists(lib): + env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) + self.msg('Checking for %s' % i, lib, 'GREEN') + break + else: + self.msg('Checking for %s' % i, False, 'YELLOW') + + env.append_unique('LIBPATH_' + uselib, qtlibs) + env.append_unique('INCLUDES_' + uselib, qtincludes) + env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) + else: + for i in self.qt4_vars_debug + self.qt4_vars: + self.check_cfg(package=i, args='--cflags --libs', mandatory=False) + +@conf +def simplify_qt4_libs(self): + # the libpaths make really long command-lines + # remove the qtcore ones from qtgui, etc + env = self.env + def process_lib(vars_, coreval): + for d in vars_: + var = d.upper() + if var == 'QTCORE': + continue + + value = env['LIBPATH_'+var] + if value: + core = env[coreval] + accu = [] + for lib in value: + if lib in core: + continue + accu.append(lib) + env['LIBPATH_'+var] = accu + + process_lib(self.qt4_vars, 'LIBPATH_QTCORE') + process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG') + +@conf +def add_qt4_rpath(self): + # rpath if wanted + env = self.env + if getattr(Options.options, 'want_rpath', False): + def process_rpath(vars_, coreval): + for d in vars_: + var = d.upper() + value = env['LIBPATH_'+var] + if value: + core = env[coreval] + accu = [] + for lib in value: + if var != 'QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var] = accu + process_rpath(self.qt4_vars, 'LIBPATH_QTCORE') + process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG') + +@conf +def set_qt4_libs_to_check(self): + if not hasattr(self, 'qt4_vars'): + self.qt4_vars = QT4_LIBS + self.qt4_vars = Utils.to_list(self.qt4_vars) + if not hasattr(self, 'qt4_vars_debug'): + self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars] + self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug) + +@conf +def set_qt4_defines(self): + if sys.platform != 'win32': + return + for x in self.qt4_vars: + y = x[2:].upper() + self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y) + self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y) + +def options(opt): + """ + Command-line options + """ + opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries') + + opt.add_option('--header-ext', + type='string', + default='', + help='header extension for moc files', + dest='qt_header_ext') + + for i in 'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i, type='string', default='', dest=i) + + opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/relocation.py lilv-0.24.6/waflib/extras/relocation.py --- lilv-0.24.4~dfsg0/waflib/extras/relocation.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/relocation.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +Waf 1.6 + +Try to detect if the project directory was relocated, and if it was, +change the node representing the project directory. Just call: + + waf configure build + +Note that if the project directory name changes, the signatures for the tasks using +files in that directory will change, causing a partial build. +""" + +import os +from waflib import Build, ConfigSet, Task, Utils, Errors +from waflib.TaskGen import feature, after_method + +EXTRA_LOCK = '.old_srcdir' + +old1 = Build.BuildContext.store +def store(self): + old1(self) + db = os.path.join(self.variant_dir, EXTRA_LOCK) + env = ConfigSet.ConfigSet() + env.SRCDIR = self.srcnode.abspath() + env.store(db) +Build.BuildContext.store = store + +old2 = Build.BuildContext.init_dirs +def init_dirs(self): + + if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)): + raise Errors.WafError('The project was not configured: run "waf configure" first!') + + srcdir = None + db = os.path.join(self.variant_dir, EXTRA_LOCK) + env = ConfigSet.ConfigSet() + try: + env.load(db) + srcdir = env.SRCDIR + except: + pass + + if srcdir: + d = self.root.find_node(srcdir) + if d and srcdir != self.top_dir and getattr(d, 'children', ''): + srcnode = self.root.make_node(self.top_dir) + print("relocating the source directory %r -> %r" % (srcdir, self.top_dir)) + srcnode.children = {} + + for (k, v) in d.children.items(): + srcnode.children[k] = v + v.parent = srcnode + d.children = {} + + old2(self) + +Build.BuildContext.init_dirs = init_dirs + + +def uid(self): + try: + return self.uid_ + except AttributeError: + # this is not a real hot zone, but we want to avoid surprises here + m = Utils.md5() + up = m.update + up(self.__class__.__name__.encode()) + for x in self.inputs + self.outputs: + up(x.path_from(x.ctx.srcnode).encode()) + self.uid_ = m.digest() + return self.uid_ +Task.Task.uid = uid + +@feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes') +@after_method('propagate_uselib_vars', 'process_source') +def apply_incpaths(self): + lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES']) + self.includes_nodes = lst + bld = self.bld + self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst] + + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/remote.py lilv-0.24.6/waflib/extras/remote.py --- lilv-0.24.4~dfsg0/waflib/extras/remote.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/remote.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,327 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Remote Builds tool using rsync+ssh + +__author__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2013" + +""" +Simple Remote Builds +******************** + +This tool is an *experimental* tool (meaning, do not even try to pollute +the waf bug tracker with bugs in here, contact me directly) providing simple +remote builds. + +It uses rsync and ssh to perform the remote builds. +It is intended for performing cross-compilation on platforms where +a cross-compiler is either unavailable (eg. MacOS, QNX) a specific product +does not exist (eg. Windows builds using Visual Studio) or simply not installed. +This tool sends the sources and the waf script to the remote host, +and commands the usual waf execution. + +There are alternatives to using this tool, such as setting up shared folders, +logging on to remote machines, and building on the shared folders. +Electing one method or another depends on the size of the program. + + +Usage +===== + +1. Set your wscript file so it includes a list of variants, + e.g.:: + + from waflib import Utils + top = '.' + out = 'build' + + variants = [ + 'linux_64_debug', + 'linux_64_release', + 'linux_32_debug', + 'linux_32_release', + ] + + from waflib.extras import remote + + def options(opt): + # normal stuff from here on + opt.load('compiler_c') + + def configure(conf): + if not conf.variant: + return + # normal stuff from here on + conf.load('compiler_c') + + def build(bld): + if not bld.variant: + return + # normal stuff from here on + bld(features='c cprogram', target='app', source='main.c') + + +2. Build the waf file, so it includes this tool, and put it in the current + directory + + .. code:: bash + + ./waf-light --tools=remote + +3. Set the host names to access the hosts: + + .. code:: bash + + export REMOTE_QNX=user@kiunix + +4. Setup the ssh server and ssh keys + + The ssh key should not be protected by a password, or it will prompt for it every time. + Create the key on the client: + + .. code:: bash + + ssh-keygen -t rsa -f foo.rsa + + Then copy foo.rsa.pub to the remote machine (user@kiunix:/home/user/.ssh/authorized_keys), + and make sure the permissions are correct (chmod go-w ~ ~/.ssh ~/.ssh/authorized_keys) + + A separate key for the build processes can be set in the environment variable WAF_SSH_KEY. + The tool will then use 'ssh-keyscan' to avoid prompting for remote hosts, so + be warned to use this feature on internal networks only (MITM). + + .. code:: bash + + export WAF_SSH_KEY=~/foo.rsa + +5. Perform the build: + + .. code:: bash + + waf configure_all build_all --remote + +""" + + +import getpass, os, re, sys +from collections import OrderedDict +from waflib import Context, Options, Utils, ConfigSet + +from waflib.Build import BuildContext, CleanContext, InstallContext, UninstallContext +from waflib.Configure import ConfigurationContext + + +is_remote = False +if '--remote' in sys.argv: + is_remote = True + sys.argv.remove('--remote') + +class init(Context.Context): + """ + Generates the *_all commands + """ + cmd = 'init' + fun = 'init' + def execute(self): + for x in list(Context.g_module.variants): + self.make_variant(x) + lst = ['remote'] + for k in Options.commands: + if k.endswith('_all'): + name = k.replace('_all', '') + for x in Context.g_module.variants: + lst.append('%s_%s' % (name, x)) + else: + lst.append(k) + del Options.commands[:] + Options.commands += lst + + def make_variant(self, x): + for y in (BuildContext, CleanContext, InstallContext, UninstallContext): + name = y.__name__.replace('Context','').lower() + class tmp(y): + cmd = name + '_' + x + fun = 'build' + variant = x + class tmp(ConfigurationContext): + cmd = 'configure_' + x + fun = 'configure' + variant = x + def __init__(self, **kw): + ConfigurationContext.__init__(self, **kw) + self.setenv(x) + +class remote(BuildContext): + cmd = 'remote' + fun = 'build' + + def get_ssh_hosts(self): + lst = [] + for v in Context.g_module.variants: + self.env.HOST = self.login_to_host(self.variant_to_login(v)) + cmd = Utils.subst_vars('${SSH_KEYSCAN} -t rsa,ecdsa ${HOST}', self.env) + out, err = self.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH) + lst.append(out.strip()) + return lst + + def setup_private_ssh_key(self): + """ + When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory + Make sure that the ssh key does not prompt for a password + """ + key = os.environ.get('WAF_SSH_KEY', '') + if not key: + return + if not os.path.isfile(key): + self.fatal('Key in WAF_SSH_KEY must point to a valid file') + self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh') + self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts') + self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key)) + self.ssh_config = os.path.join(self.ssh_dir, 'config') + for x in self.ssh_hosts, self.ssh_key, self.ssh_config: + if not os.path.isfile(x): + if not os.path.isdir(self.ssh_dir): + os.makedirs(self.ssh_dir) + Utils.writef(self.ssh_key, Utils.readf(key), 'wb') + os.chmod(self.ssh_key, 448) + + Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts())) + os.chmod(self.ssh_key, 448) + + Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb') + os.chmod(self.ssh_config, 448) + self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key] + self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh') + + def skip_unbuildable_variant(self): + # skip variants that cannot be built on this OS + for k in Options.commands: + a, _, b = k.partition('_') + if b in Context.g_module.variants: + c, _, _ = b.partition('_') + if c != Utils.unversioned_sys_platform(): + Options.commands.remove(k) + + def login_to_host(self, login): + return re.sub(r'(\w+@)', '', login) + + def variant_to_login(self, variant): + """linux_32_debug -> search env.LINUX_32 and then env.LINUX""" + x = variant[:variant.rfind('_')] + ret = os.environ.get('REMOTE_' + x.upper(), '') + if not ret: + x = x[:x.find('_')] + ret = os.environ.get('REMOTE_' + x.upper(), '') + if not ret: + ret = '%s@localhost' % getpass.getuser() + return ret + + def execute(self): + global is_remote + if not is_remote: + self.skip_unbuildable_variant() + else: + BuildContext.execute(self) + + def restore(self): + self.top_dir = os.path.abspath(Context.g_module.top) + self.srcnode = self.root.find_node(self.top_dir) + self.path = self.srcnode + + self.out_dir = os.path.join(self.top_dir, Context.g_module.out) + self.bldnode = self.root.make_node(self.out_dir) + self.bldnode.mkdir() + + self.env = ConfigSet.ConfigSet() + + def extract_groups_of_builds(self): + """Return a dict mapping each variants to the commands to build""" + self.vgroups = {} + for x in reversed(Options.commands): + _, _, variant = x.partition('_') + if variant in Context.g_module.variants: + try: + dct = self.vgroups[variant] + except KeyError: + dct = self.vgroups[variant] = OrderedDict() + try: + dct[variant].append(x) + except KeyError: + dct[variant] = [x] + Options.commands.remove(x) + + def custom_options(self, login): + try: + return Context.g_module.host_options[login] + except (AttributeError, KeyError): + return {} + + def recurse(self, *k, **kw): + self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx') + self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh') + self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan') + try: + self.env.WAF = getattr(Context.g_module, 'waf') + except AttributeError: + try: + os.stat('waf') + except KeyError: + self.fatal('Put a waf file in the directory (./waf-light --tools=remote)') + else: + self.env.WAF = './waf' + + self.extract_groups_of_builds() + self.setup_private_ssh_key() + for k, v in self.vgroups.items(): + task = self(rule=rsync_and_ssh, always=True) + task.env.login = self.variant_to_login(k) + + task.env.commands = [] + for opt, value in v.items(): + task.env.commands += value + task.env.variant = task.env.commands[0].partition('_')[2] + for opt, value in self.custom_options(k): + task.env[opt] = value + self.jobs = len(self.vgroups) + + def make_mkdir_command(self, task): + return Utils.subst_vars('${SSH} ${SSH_OPTS} ${login} "rm -fr ${remote_dir} && mkdir -p ${remote_dir}"', task.env) + + def make_send_command(self, task): + return Utils.subst_vars('${RSYNC} ${RSYNC_SEND_OPTS} -e "${SSH} ${SSH_OPTS}" ${local_dir} ${login}:${remote_dir}', task.env) + + def make_exec_command(self, task): + txt = '''${SSH} ${SSH_OPTS} ${login} "cd ${remote_dir} && ${WAF} ${commands}"''' + return Utils.subst_vars(txt, task.env) + + def make_save_command(self, task): + return Utils.subst_vars('${RSYNC} ${RSYNC_SAVE_OPTS} -e "${SSH} ${SSH_OPTS}" ${login}:${remote_dir_variant} ${build_dir}', task.env) + +def rsync_and_ssh(task): + + # remove a warning + task.uid_ = id(task) + + bld = task.generator.bld + + task.env.user, _, _ = task.env.login.partition('@') + task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant))) + task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir) + task.env.local_dir = bld.srcnode.abspath() + '/' + + task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant) + task.env.build_dir = bld.bldnode.abspath() + + ret = task.exec_command(bld.make_mkdir_command(task)) + if ret: + return ret + ret = task.exec_command(bld.make_send_command(task)) + if ret: + return ret + ret = task.exec_command(bld.make_exec_command(task)) + if ret: + return ret + ret = task.exec_command(bld.make_save_command(task)) + if ret: + return ret + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/resx.py lilv-0.24.6/waflib/extras/resx.py --- lilv-0.24.4~dfsg0/waflib/extras/resx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/resx.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,35 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import os +from waflib import Task +from waflib.TaskGen import extension + +def configure(conf): + conf.find_program(['resgen'], var='RESGEN') + conf.env.RESGENFLAGS = '/useSourcePath' + +@extension('.resx') +def resx_file(self, node): + """ + Bind the .resx extension to a resgen task + """ + if not getattr(self, 'cs_task', None): + self.bld.fatal('resx_file has no link task for use %r' % self) + + # Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources' + assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0]) + res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.') + out = self.path.find_or_declare(assembly + '.' + res + '.resources') + + tsk = self.create_task('resgen', node, out) + + self.cs_task.dep_nodes.extend(tsk.outputs) # dependency + self.env.append_value('RESOURCES', tsk.outputs[0].bldpath()) + +class resgen(Task.Task): + """ + Compile C# resource files + """ + color = 'YELLOW' + run_str = '${RESGEN} ${RESGENFLAGS} ${SRC} ${TGT}' diff -Nru lilv-0.24.4~dfsg0/waflib/extras/review.py lilv-0.24.6/waflib/extras/review.py --- lilv-0.24.4~dfsg0/waflib/extras/review.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/review.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,325 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Laurent Birtz, 2011 +# moved the code into a separate tool (ita) + +""" +There are several things here: +- a different command-line option management making options persistent +- the review command to display the options set + +Assumptions: +- configuration options are not always added to the right group (and do not count on the users to do it...) +- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration +- when the options change, the build is invalidated (forcing a reconfiguration) +""" + +import os, textwrap, shutil +from waflib import Logs, Context, ConfigSet, Options, Build, Configure + +class Odict(dict): + """Ordered dictionary""" + def __init__(self, data=None): + self._keys = [] + dict.__init__(self) + if data: + # we were provided a regular dict + if isinstance(data, dict): + self.append_from_dict(data) + + # we were provided a tuple list + elif type(data) == list: + self.append_from_plist(data) + + # we were provided invalid input + else: + raise Exception("expected a dict or a tuple list") + + def append_from_dict(self, dict): + map(self.__setitem__, dict.keys(), dict.values()) + + def append_from_plist(self, plist): + for pair in plist: + if len(pair) != 2: + raise Exception("invalid pairs list") + for (k, v) in plist: + self.__setitem__(k, v) + + def __delitem__(self, key): + if not key in self._keys: + raise KeyError(key) + dict.__delitem__(self, key) + self._keys.remove(key) + + def __setitem__(self, key, item): + dict.__setitem__(self, key, item) + if key not in self._keys: + self._keys.append(key) + + def clear(self): + dict.clear(self) + self._keys = [] + + def copy(self): + return Odict(self.plist()) + + def items(self): + return zip(self._keys, self.values()) + + def keys(self): + return list(self._keys) # return a copy of the list + + def values(self): + return map(self.get, self._keys) + + def plist(self): + p = [] + for k, v in self.items(): + p.append( (k, v) ) + return p + + def __str__(self): + buf = [] + buf.append("{ ") + for k, v in self.items(): + buf.append('%r : %r, ' % (k, v)) + buf.append("}") + return ''.join(buf) + +review_options = Odict() +""" +Ordered dictionary mapping configuration option names to their optparse option. +""" + +review_defaults = {} +""" +Dictionary mapping configuration option names to their default value. +""" + +old_review_set = None +""" +Review set containing the configuration values before parsing the command line. +""" + +new_review_set = None +""" +Review set containing the configuration values after parsing the command line. +""" + +class OptionsReview(Options.OptionsContext): + def __init__(self, **kw): + super(self.__class__, self).__init__(**kw) + + def prepare_config_review(self): + """ + Find the configuration options that are reviewable, detach + their default value from their optparse object and store them + into the review dictionaries. + """ + gr = self.get_option_group('configure options') + for opt in gr.option_list: + if opt.action != 'store' or opt.dest in ("out", "top"): + continue + review_options[opt.dest] = opt + review_defaults[opt.dest] = opt.default + if gr.defaults.has_key(opt.dest): + del gr.defaults[opt.dest] + opt.default = None + + def parse_args(self): + self.prepare_config_review() + self.parser.get_option('--prefix').help = 'installation prefix' + super(OptionsReview, self).parse_args() + Context.create_context('review').refresh_review_set() + +class ReviewContext(Context.Context): + '''reviews the configuration values''' + + cmd = 'review' + + def __init__(self, **kw): + super(self.__class__, self).__init__(**kw) + + out = Options.options.out + if not out: + out = getattr(Context.g_module, Context.OUT, None) + if not out: + out = Options.lockfile.replace('.lock-waf', '') + self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath() + """Path to the build directory""" + + self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR) + """Path to the cache directory""" + + self.review_path = os.path.join(self.cache_path, 'review.cache') + """Path to the review cache file""" + + def execute(self): + """ + Display and store the review set. Invalidate the cache as required. + """ + if not self.compare_review_set(old_review_set, new_review_set): + self.invalidate_cache() + self.store_review_set(new_review_set) + print(self.display_review_set(new_review_set)) + + def invalidate_cache(self): + """Invalidate the cache to prevent bad builds.""" + try: + Logs.warn("Removing the cached configuration since the options have changed") + shutil.rmtree(self.cache_path) + except: + pass + + def refresh_review_set(self): + """ + Obtain the old review set and the new review set, and import the new set. + """ + global old_review_set, new_review_set + old_review_set = self.load_review_set() + new_review_set = self.update_review_set(old_review_set) + self.import_review_set(new_review_set) + + def load_review_set(self): + """ + Load and return the review set from the cache if it exists. + Otherwise, return an empty set. + """ + if os.path.isfile(self.review_path): + return ConfigSet.ConfigSet(self.review_path) + return ConfigSet.ConfigSet() + + def store_review_set(self, review_set): + """ + Store the review set specified in the cache. + """ + if not os.path.isdir(self.cache_path): + os.makedirs(self.cache_path) + review_set.store(self.review_path) + + def update_review_set(self, old_set): + """ + Merge the options passed on the command line with those imported + from the previous review set and return the corresponding + preview set. + """ + + # Convert value to string. It's important that 'None' maps to + # the empty string. + def val_to_str(val): + if val == None or val == '': + return '' + return str(val) + + new_set = ConfigSet.ConfigSet() + opt_dict = Options.options.__dict__ + + for name in review_options.keys(): + # the option is specified explicitly on the command line + if name in opt_dict: + # if the option is the default, pretend it was never specified + if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]): + new_set[name] = opt_dict[name] + # the option was explicitly specified in a previous command + elif name in old_set: + new_set[name] = old_set[name] + + return new_set + + def import_review_set(self, review_set): + """ + Import the actual value of the reviewable options in the option + dictionary, given the current review set. + """ + for name in review_options.keys(): + if name in review_set: + value = review_set[name] + else: + value = review_defaults[name] + setattr(Options.options, name, value) + + def compare_review_set(self, set1, set2): + """ + Return true if the review sets specified are equal. + """ + if len(set1.keys()) != len(set2.keys()): + return False + for key in set1.keys(): + if not key in set2 or set1[key] != set2[key]: + return False + return True + + def display_review_set(self, review_set): + """ + Return the string representing the review set specified. + """ + term_width = Logs.get_term_cols() + lines = [] + for dest in review_options.keys(): + opt = review_options[dest] + name = ", ".join(opt._short_opts + opt._long_opts) + help = opt.help + actual = None + if dest in review_set: + actual = review_set[dest] + default = review_defaults[dest] + lines.append(self.format_option(name, help, actual, default, term_width)) + return "Configuration:\n\n" + "\n\n".join(lines) + "\n" + + def format_option(self, name, help, actual, default, term_width): + """ + Return the string representing the option specified. + """ + def val_to_str(val): + if val == None or val == '': + return "(void)" + return str(val) + + max_name_len = 20 + sep_len = 2 + + w = textwrap.TextWrapper() + w.width = term_width - 1 + if w.width < 60: + w.width = 60 + + out = "" + + # format the help + out += w.fill(help) + "\n" + + # format the name + name_len = len(name) + out += Logs.colors.CYAN + name + Logs.colors.NORMAL + + # set the indentation used when the value wraps to the next line + w.subsequent_indent = " ".rjust(max_name_len + sep_len) + w.width -= (max_name_len + sep_len) + + # the name string is too long, switch to the next line + if name_len > max_name_len: + out += "\n" + w.subsequent_indent + + # fill the remaining of the line with spaces + else: + out += " ".rjust(max_name_len + sep_len - name_len) + + # format the actual value, if there is one + if actual != None: + out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent + + # format the default value + default_fmt = val_to_str(default) + if actual != None: + default_fmt = "default: " + default_fmt + out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL + + return out + +# Monkey-patch ConfigurationContext.execute() to have it store the review set. +old_configure_execute = Configure.ConfigurationContext.execute +def new_configure_execute(self): + old_configure_execute(self) + Context.create_context('review').store_review_set(new_review_set) +Configure.ConfigurationContext.execute = new_configure_execute + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/rst.py lilv-0.24.6/waflib/extras/rst.py --- lilv-0.24.4~dfsg0/waflib/extras/rst.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/rst.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,260 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Jérôme Carretero, 2013 (zougloub) + +""" +reStructuredText support (experimental) + +Example:: + + def configure(conf): + conf.load('rst') + if not conf.env.RST2HTML: + conf.fatal('The program rst2html is required') + + def build(bld): + bld( + features = 'rst', + type = 'rst2html', # rst2html, rst2pdf, ... + source = 'index.rst', # mandatory, the source + deps = 'image.png', # to give additional non-trivial dependencies + ) + +By default the tool looks for a set of programs in PATH. +The tools are defined in `rst_progs`. +To configure with a special program use:: + + $ RST2HTML=/path/to/rst2html waf configure + +This tool is experimental; don't hesitate to contribute to it. + +""" + +import re +from waflib import Node, Utils, Task, Errors, Logs +from waflib.TaskGen import feature, before_method + +rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split() + +def parse_rst_node(task, node, nodes, names, seen, dirs=None): + # TODO add extensibility, to handle custom rst include tags... + if dirs is None: + dirs = (node.parent,node.get_bld().parent) + + if node in seen: + return + seen.append(node) + code = node.read() + re_rst = re.compile(r'^\s*.. ((?P\|\S+\|) )?(?Pinclude|image|figure):: (?P.*)$', re.M) + for match in re_rst.finditer(code): + ipath = match.group('file') + itype = match.group('type') + Logs.debug('rst: visiting %s: %s', itype, ipath) + found = False + for d in dirs: + Logs.debug('rst: looking for %s in %s', ipath, d.abspath()) + found = d.find_node(ipath) + if found: + Logs.debug('rst: found %s as %s', ipath, found.abspath()) + nodes.append((itype, found)) + if itype == 'include': + parse_rst_node(task, found, nodes, names, seen) + break + if not found: + names.append((itype, ipath)) + +class docutils(Task.Task): + """ + Compile a rst file. + """ + + def scan(self): + """ + A recursive regex-based scanner that finds rst dependencies. + """ + + nodes = [] + names = [] + seen = [] + + node = self.inputs[0] + + if not node: + return (nodes, names) + + parse_rst_node(self, node, nodes, names, seen) + + Logs.debug('rst: %r: found the following file deps: %r', self, nodes) + if names: + Logs.warn('rst: %r: could not find the following file deps: %r', self, names) + + return ([v for (t,v) in nodes], [v for (t,v) in names]) + + def check_status(self, msg, retcode): + """ + Check an exit status and raise an error with a particular message + + :param msg: message to display if the code is non-zero + :type msg: string + :param retcode: condition + :type retcode: boolean + """ + if retcode != 0: + raise Errors.WafError('%r command exit status %r' % (msg, retcode)) + + def run(self): + """ + Runs the rst compilation using docutils + """ + raise NotImplementedError() + +class rst2html(docutils): + color = 'BLUE' + + def __init__(self, *args, **kw): + docutils.__init__(self, *args, **kw) + self.command = self.generator.env.RST2HTML + self.attributes = ['stylesheet'] + + def scan(self): + nodes, names = docutils.scan(self) + + for attribute in self.attributes: + stylesheet = getattr(self.generator, attribute, None) + if stylesheet is not None: + ssnode = self.generator.to_nodes(stylesheet)[0] + nodes.append(ssnode) + Logs.debug('rst: adding dep to %s %s', attribute, stylesheet) + + return nodes, names + + def run(self): + cwdn = self.outputs[0].parent + src = self.inputs[0].path_from(cwdn) + dst = self.outputs[0].path_from(cwdn) + + cmd = self.command + [src, dst] + cmd += Utils.to_list(getattr(self.generator, 'options', [])) + for attribute in self.attributes: + stylesheet = getattr(self.generator, attribute, None) + if stylesheet is not None: + stylesheet = self.generator.to_nodes(stylesheet)[0] + cmd += ['--%s' % attribute, stylesheet.path_from(cwdn)] + + return self.exec_command(cmd, cwd=cwdn.abspath()) + +class rst2s5(rst2html): + def __init__(self, *args, **kw): + rst2html.__init__(self, *args, **kw) + self.command = self.generator.env.RST2S5 + self.attributes = ['stylesheet'] + +class rst2latex(rst2html): + def __init__(self, *args, **kw): + rst2html.__init__(self, *args, **kw) + self.command = self.generator.env.RST2LATEX + self.attributes = ['stylesheet'] + +class rst2xetex(rst2html): + def __init__(self, *args, **kw): + rst2html.__init__(self, *args, **kw) + self.command = self.generator.env.RST2XETEX + self.attributes = ['stylesheet'] + +class rst2pdf(docutils): + color = 'BLUE' + def run(self): + cwdn = self.outputs[0].parent + src = self.inputs[0].path_from(cwdn) + dst = self.outputs[0].path_from(cwdn) + + cmd = self.generator.env.RST2PDF + [src, '-o', dst] + cmd += Utils.to_list(getattr(self.generator, 'options', [])) + + return self.exec_command(cmd, cwd=cwdn.abspath()) + + +@feature('rst') +@before_method('process_source') +def apply_rst(self): + """ + Create :py:class:`rst` or other rst-related task objects + """ + + if self.target: + if isinstance(self.target, Node.Node): + tgt = self.target + elif isinstance(self.target, str): + tgt = self.path.get_bld().make_node(self.target) + else: + self.bld.fatal("rst: Don't know how to build target name %s which is not a string or Node for %s" % (self.target, self)) + else: + tgt = None + + tsk_type = getattr(self, 'type', None) + + src = self.to_nodes(self.source) + assert len(src) == 1 + src = src[0] + + if tsk_type is not None and tgt is None: + if tsk_type.startswith('rst2'): + ext = tsk_type[4:] + else: + self.bld.fatal("rst: Could not detect the output file extension for %s" % self) + tgt = src.change_ext('.%s' % ext) + elif tsk_type is None and tgt is not None: + out = tgt.name + ext = out[out.rfind('.')+1:] + self.type = 'rst2' + ext + elif tsk_type is not None and tgt is not None: + # the user knows what he wants + pass + else: + self.bld.fatal("rst: Need to indicate task type or target name for %s" % self) + + deps_lst = [] + + if getattr(self, 'deps', None): + deps = self.to_list(self.deps) + for filename in deps: + n = self.path.find_resource(filename) + if not n: + self.bld.fatal('Could not find %r for %r' % (filename, self)) + if not n in deps_lst: + deps_lst.append(n) + + try: + task = self.create_task(self.type, src, tgt) + except KeyError: + self.bld.fatal("rst: Task of type %s not implemented (created by %s)" % (self.type, self)) + + task.env = self.env + + # add the manual dependencies + if deps_lst: + try: + lst = self.bld.node_deps[task.uid()] + for n in deps_lst: + if not n in lst: + lst.append(n) + except KeyError: + self.bld.node_deps[task.uid()] = deps_lst + + inst_to = getattr(self, 'install_path', None) + if inst_to: + self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:]) + + self.source = [] + +def configure(self): + """ + Try to find the rst programs. + + Do not raise any error if they are not found. + You'll have to use additional code in configure() to die + if programs were not found. + """ + for p in rst_progs: + self.find_program(p, mandatory=False) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/run_do_script.py lilv-0.24.6/waflib/extras/run_do_script.py --- lilv-0.24.4~dfsg0/waflib/extras/run_do_script.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/run_do_script.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,139 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Hans-Martin von Gaudecker, 2012 + +""" +Run a Stata do-script in the directory specified by **ctx.bldnode**. The +first and only argument will be the name of the do-script (no extension), +which can be accessed inside the do-script by the local macro `1'. Useful +for keeping a log file. + +The tool uses the log file that is automatically kept by Stata only +for error-catching purposes, it will be destroyed if the task finished +without error. In case of an error in **some_script.do**, you can inspect +it as **some_script.log** in the **ctx.bldnode** directory. + +Note that Stata will not return an error code if it exits abnormally -- +catching errors relies on parsing the log file mentioned before. Should +the parser behave incorrectly please send an email to hmgaudecker [at] gmail. + +**WARNING** + + The tool will not work if multiple do-scripts of the same name---but in + different directories---are run at the same time! Avoid this situation. + +Usage:: + + ctx(features='run_do_script', + source='some_script.do', + target=['some_table.tex', 'some_figure.eps'], + deps='some_data.csv') +""" + + +import os, re, sys +from waflib import Task, TaskGen, Logs + +if sys.platform == 'darwin': + STATA_COMMANDS = ['Stata64MP', 'StataMP', + 'Stata64SE', 'StataSE', + 'Stata64', 'Stata'] + STATAFLAGS = '-e -q do' + STATAENCODING = 'MacRoman' +elif sys.platform.startswith('linux'): + STATA_COMMANDS = ['stata-mp', 'stata-se', 'stata'] + STATAFLAGS = '-b -q do' + # Not sure whether this is correct... + STATAENCODING = 'Latin-1' +elif sys.platform.lower().startswith('win'): + STATA_COMMANDS = ['StataMP-64', 'StataMP-ia', + 'StataMP', 'StataSE-64', + 'StataSE-ia', 'StataSE', + 'Stata-64', 'Stata-ia', + 'Stata.e', 'WMPSTATA', + 'WSESTATA', 'WSTATA'] + STATAFLAGS = '/e do' + STATAENCODING = 'Latin-1' +else: + raise Exception("Unknown sys.platform: %s " % sys.platform) + +def configure(ctx): + ctx.find_program(STATA_COMMANDS, var='STATACMD', errmsg="""\n +No Stata executable found!\n\n +If Stata is needed:\n + 1) Check the settings of your system path. + 2) Note we are looking for Stata executables called: %s + If yours has a different name, please report to hmgaudecker [at] gmail\n +Else:\n + Do not load the 'run_do_script' tool in the main wscript.\n\n""" % STATA_COMMANDS) + ctx.env.STATAFLAGS = STATAFLAGS + ctx.env.STATAENCODING = STATAENCODING + +class run_do_script_base(Task.Task): + """Run a Stata do-script from the bldnode directory.""" + run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"' + shell = True + +class run_do_script(run_do_script_base): + """Use the log file automatically kept by Stata for error-catching. + Erase it if the task finished without error. If not, it will show + up as do_script.log in the bldnode directory. + """ + def run(self): + run_do_script_base.run(self) + ret, log_tail = self.check_erase_log_file() + if ret: + Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""", + self.inputs[0], ret, self.env.LOGFILEPATH, log_tail) + return ret + + def check_erase_log_file(self): + """Parse Stata's default log file and erase it if everything okay. + + Parser is based on Brendan Halpin's shell script found here: + http://teaching.sociology.ul.ie/bhalpin/wordpress/?p=122 + """ + + if sys.version_info.major >= 3: + kwargs = {'file': self.env.LOGFILEPATH, 'mode': 'r', 'encoding': self.env.STATAENCODING} + else: + kwargs = {'name': self.env.LOGFILEPATH, 'mode': 'r'} + with open(**kwargs) as log: + log_tail = log.readlines()[-10:] + for line in log_tail: + error_found = re.match(r"r\(([0-9]+)\)", line) + if error_found: + return error_found.group(1), ''.join(log_tail) + else: + pass + # Only end up here if the parser did not identify an error. + os.remove(self.env.LOGFILEPATH) + return None, None + + +@TaskGen.feature('run_do_script') +@TaskGen.before_method('process_source') +def apply_run_do_script(tg): + """Task generator customising the options etc. to call Stata in batch + mode for running a do-script. + """ + + # Convert sources and targets to nodes + src_node = tg.path.find_resource(tg.source) + tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)] + + tsk = tg.create_task('run_do_script', src=src_node, tgt=tgt_nodes) + tsk.env.DOFILETRUNK = os.path.splitext(src_node.name)[0] + tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s.log' % (tsk.env.DOFILETRUNK)) + + # dependencies (if the attribute 'deps' changes, trigger a recompilation) + for x in tg.to_list(getattr(tg, 'deps', [])): + node = tg.path.find_resource(x) + if not node: + tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) + tsk.dep_nodes.append(node) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) + + # Bypass the execution of process_source by setting the source to an empty list + tg.source = [] + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/run_m_script.py lilv-0.24.6/waflib/extras/run_m_script.py --- lilv-0.24.4~dfsg0/waflib/extras/run_m_script.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/run_m_script.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,88 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Hans-Martin von Gaudecker, 2012 + +""" +Run a Matlab script. + +Note that the script is run in the directory where it lives -- Matlab won't +allow it any other way. + +For error-catching purposes, keep an own log-file that is destroyed if the +task finished without error. If not, it will show up as mscript_[index].log +in the bldnode directory. + +Usage:: + + ctx(features='run_m_script', + source='some_script.m', + target=['some_table.tex', 'some_figure.eps'], + deps='some_data.mat') +""" + +import os, sys +from waflib import Task, TaskGen, Logs + +MATLAB_COMMANDS = ['matlab'] + +def configure(ctx): + ctx.find_program(MATLAB_COMMANDS, var='MATLABCMD', errmsg = """\n +No Matlab executable found!\n\n +If Matlab is needed:\n + 1) Check the settings of your system path. + 2) Note we are looking for Matlab executables called: %s + If yours has a different name, please report to hmgaudecker [at] gmail\n +Else:\n + Do not load the 'run_m_script' tool in the main wscript.\n\n""" % MATLAB_COMMANDS) + ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize' + +class run_m_script_base(Task.Task): + """Run a Matlab script.""" + run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"' + shell = True + +class run_m_script(run_m_script_base): + """Erase the Matlab overall log file if everything went okay, else raise an + error and print its 10 last lines. + """ + def run(self): + ret = run_m_script_base.run(self) + logfile = self.env.LOGFILEPATH + if ret: + mode = 'r' + if sys.version_info.major >= 3: + mode = 'rb' + with open(logfile, mode=mode) as f: + tail = f.readlines()[-10:] + Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""", + self.inputs[0], ret, logfile, '\n'.join(tail)) + else: + os.remove(logfile) + return ret + +@TaskGen.feature('run_m_script') +@TaskGen.before_method('process_source') +def apply_run_m_script(tg): + """Task generator customising the options etc. to call Matlab in batch + mode for running a m-script. + """ + + # Convert sources and targets to nodes + src_node = tg.path.find_resource(tg.source) + tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)] + + tsk = tg.create_task('run_m_script', src=src_node, tgt=tgt_nodes) + tsk.cwd = src_node.parent.abspath() + tsk.env.MSCRIPTTRUNK = os.path.splitext(src_node.name)[0] + tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (tsk.env.MSCRIPTTRUNK, tg.idx)) + + # dependencies (if the attribute 'deps' changes, trigger a recompilation) + for x in tg.to_list(getattr(tg, 'deps', [])): + node = tg.path.find_resource(x) + if not node: + tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) + tsk.dep_nodes.append(node) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) + + # Bypass the execution of process_source by setting the source to an empty list + tg.source = [] diff -Nru lilv-0.24.4~dfsg0/waflib/extras/run_py_script.py lilv-0.24.6/waflib/extras/run_py_script.py --- lilv-0.24.4~dfsg0/waflib/extras/run_py_script.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/run_py_script.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Hans-Martin von Gaudecker, 2012 + +""" +Run a Python script in the directory specified by **ctx.bldnode**. + +Select a Python version by specifying the **version** keyword for +the task generator instance as integer 2 or 3. Default is 3. + +If the build environment has an attribute "PROJECT_PATHS" with +a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH. +Same a string passed to the optional **add_to_pythonpath** +keyword (appended after the PROJECT_ROOT). + +Usage:: + + ctx(features='run_py_script', version=3, + source='some_script.py', + target=['some_table.tex', 'some_figure.eps'], + deps='some_data.csv', + add_to_pythonpath='src/some/library') +""" + +import os, re +from waflib import Task, TaskGen, Logs + + +def configure(conf): + """TODO: Might need to be updated for Windows once + "PEP 397":http://www.python.org/dev/peps/pep-0397/ is settled. + """ + conf.find_program('python', var='PY2CMD', mandatory=False) + conf.find_program('python3', var='PY3CMD', mandatory=False) + if not conf.env.PY2CMD and not conf.env.PY3CMD: + conf.fatal("No Python interpreter found!") + +class run_py_2_script(Task.Task): + """Run a Python 2 script.""" + run_str = '${PY2CMD} ${SRC[0].abspath()}' + shell=True + +class run_py_3_script(Task.Task): + """Run a Python 3 script.""" + run_str = '${PY3CMD} ${SRC[0].abspath()}' + shell=True + +@TaskGen.feature('run_py_script') +@TaskGen.before_method('process_source') +def apply_run_py_script(tg): + """Task generator for running either Python 2 or Python 3 on a single + script. + + Attributes: + + * source -- A **single** source node or string. (required) + * target -- A single target or list of targets (nodes or strings) + * deps -- A single dependency or list of dependencies (nodes or strings) + * add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable + + If the build environment has an attribute "PROJECT_PATHS" with + a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH. + """ + + # Set the Python version to use, default to 3. + v = getattr(tg, 'version', 3) + if v not in (2, 3): + raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v) + + # Convert sources and targets to nodes + src_node = tg.path.find_resource(tg.source) + tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)] + + # Create the task. + tsk = tg.create_task('run_py_%d_script' %v, src=src_node, tgt=tgt_nodes) + + # custom execution environment + # TODO use a list and os.sep.join(lst) at the end instead of concatenating strings + tsk.env.env = dict(os.environ) + tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '') + project_paths = getattr(tsk.env, 'PROJECT_PATHS', None) + if project_paths and 'PROJECT_ROOT' in project_paths: + tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths['PROJECT_ROOT'].abspath() + if getattr(tg, 'add_to_pythonpath', None): + tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath + + # Clean up the PYTHONPATH -- replace double occurrences of path separator + tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH']) + + # Clean up the PYTHONPATH -- doesn't like starting with path separator + if tsk.env.env['PYTHONPATH'].startswith(os.pathsep): + tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:] + + # dependencies (if the attribute 'deps' changes, trigger a recompilation) + for x in tg.to_list(getattr(tg, 'deps', [])): + node = tg.path.find_resource(x) + if not node: + tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) + tsk.dep_nodes.append(node) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) + + # Bypass the execution of process_source by setting the source to an empty list + tg.source = [] + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/run_r_script.py lilv-0.24.6/waflib/extras/run_r_script.py --- lilv-0.24.4~dfsg0/waflib/extras/run_r_script.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/run_r_script.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,86 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Hans-Martin von Gaudecker, 2012 + +""" +Run a R script in the directory specified by **ctx.bldnode**. + +For error-catching purposes, keep an own log-file that is destroyed if the +task finished without error. If not, it will show up as rscript_[index].log +in the bldnode directory. + +Usage:: + + ctx(features='run_r_script', + source='some_script.r', + target=['some_table.tex', 'some_figure.eps'], + deps='some_data.csv') +""" + + +import os, sys +from waflib import Task, TaskGen, Logs + +R_COMMANDS = ['RTerm', 'R', 'r'] + +def configure(ctx): + ctx.find_program(R_COMMANDS, var='RCMD', errmsg = """\n +No R executable found!\n\n +If R is needed:\n + 1) Check the settings of your system path. + 2) Note we are looking for R executables called: %s + If yours has a different name, please report to hmgaudecker [at] gmail\n +Else:\n + Do not load the 'run_r_script' tool in the main wscript.\n\n""" % R_COMMANDS) + ctx.env.RFLAGS = 'CMD BATCH --slave' + +class run_r_script_base(Task.Task): + """Run a R script.""" + run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"' + shell = True + +class run_r_script(run_r_script_base): + """Erase the R overall log file if everything went okay, else raise an + error and print its 10 last lines. + """ + def run(self): + ret = run_r_script_base.run(self) + logfile = self.env.LOGFILEPATH + if ret: + mode = 'r' + if sys.version_info.major >= 3: + mode = 'rb' + with open(logfile, mode=mode) as f: + tail = f.readlines()[-10:] + Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""", + self.inputs[0], ret, logfile, '\n'.join(tail)) + else: + os.remove(logfile) + return ret + + +@TaskGen.feature('run_r_script') +@TaskGen.before_method('process_source') +def apply_run_r_script(tg): + """Task generator customising the options etc. to call R in batch + mode for running a R script. + """ + + # Convert sources and targets to nodes + src_node = tg.path.find_resource(tg.source) + tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)] + + tsk = tg.create_task('run_r_script', src=src_node, tgt=tgt_nodes) + tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (os.path.splitext(src_node.name)[0], tg.idx)) + + # dependencies (if the attribute 'deps' changes, trigger a recompilation) + for x in tg.to_list(getattr(tg, 'deps', [])): + node = tg.path.find_resource(x) + if not node: + tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) + tsk.dep_nodes.append(node) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) + + # Bypass the execution of process_source by setting the source to an empty list + tg.source = [] + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/sas.py lilv-0.24.6/waflib/extras/sas.py --- lilv-0.24.4~dfsg0/waflib/extras/sas.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/sas.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Mark Coggeshall, 2010 + +"SAS support" + +import os +from waflib import Task, Errors, Logs +from waflib.TaskGen import feature, before_method + +sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False) + +class sas(Task.Task): + vars = ['SAS', 'SASFLAGS'] + def run(task): + command = 'SAS' + fun = sas_fun + + node = task.inputs[0] + logfilenode = node.change_ext('.log') + lstfilenode = node.change_ext('.lst') + + # set the cwd + task.cwd = task.inputs[0].parent.get_src().abspath() + Logs.debug('runner: %r on %r', command, node) + + SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep + task.env.env = {'SASINPUTS': SASINPUTS} + + task.env.SRCFILE = node.abspath() + task.env.LOGFILE = logfilenode.abspath() + task.env.LSTFILE = lstfilenode.abspath() + ret = fun(task) + if ret: + Logs.error('Running %s on %r returned a non-zero exit', command, node) + Logs.error('SRCFILE = %r', node) + Logs.error('LOGFILE = %r', logfilenode) + Logs.error('LSTFILE = %r', lstfilenode) + return ret + +@feature('sas') +@before_method('process_source') +def apply_sas(self): + if not getattr(self, 'type', None) in ('sas',): + self.type = 'sas' + + self.env['logdir'] = getattr(self, 'logdir', 'log') + self.env['lstdir'] = getattr(self, 'lstdir', 'lst') + + deps_lst = [] + + if getattr(self, 'deps', None): + deps = self.to_list(self.deps) + for filename in deps: + n = self.path.find_resource(filename) + if not n: + n = self.bld.root.find_resource(filename) + if not n: + raise Errors.WafError('cannot find input file %s for processing' % filename) + if not n in deps_lst: + deps_lst.append(n) + + for node in self.to_nodes(self.source): + if self.type == 'sas': + task = self.create_task('sas', src=node) + task.dep_nodes = deps_lst + self.source = [] + +def configure(self): + self.find_program('sas', var='SAS', mandatory=False) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/satellite_assembly.py lilv-0.24.6/waflib/extras/satellite_assembly.py --- lilv-0.24.4~dfsg0/waflib/extras/satellite_assembly.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/satellite_assembly.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,57 @@ +#!/usr/bin/python +# encoding: utf-8 +# vim: tabstop=4 noexpandtab + +""" +Create a satellite assembly from "*.??.txt" files. ?? stands for a language code. + +The projects Resources subfolder contains resources.??.txt string files for several languages. +The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll + +#gen becomes template (It is called gen because it also uses resx.py). +bld(source='Resources/resources.de.txt',gen=ExeName) +""" + +import os, re +from waflib import Task +from waflib.TaskGen import feature,before_method + +class al(Task.Task): + run_str = '${AL} ${ALFLAGS}' + +@feature('satellite_assembly') +@before_method('process_source') +def satellite_assembly(self): + if not getattr(self, 'gen', None): + self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter') + res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I) + + # self.source can contain node objects, so this will break in one way or another + self.source = self.to_list(self.source) + for i, x in enumerate(self.source): + #x = 'resources/resources.de.resx' + #x = 'resources/resources.de.txt' + mo = res_lang.match(x) + if mo: + template = os.path.splitext(self.gen)[0] + templatedir, templatename = os.path.split(template) + res = mo.group(1) + lang = mo.group(2) + #./Resources/resources.de.resources + resources = self.path.find_or_declare(res+ '.' + lang + '.resources') + self.create_task('resgen', self.to_nodes(x), [resources]) + #./de/Exename.resources.dll + satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll') + tsk = self.create_task('al',[resources],[satellite]) + tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen)) + tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath()) + tsk.env.append_value('ALFLAGS','/culture:'+lang) + tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath()) + self.source[i] = None + # remove the None elements that we just substituted + self.source = list(filter(lambda x:x, self.source)) + +def configure(ctx): + ctx.find_program('al', var='AL', mandatory=True) + ctx.load('resx') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/scala.py lilv-0.24.6/waflib/extras/scala.py --- lilv-0.24.4~dfsg0/waflib/extras/scala.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/scala.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,128 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2010 (ita) + +""" +Scala support + +scalac outputs files a bit where it wants to +""" + +import os +from waflib import Task, Utils, Node +from waflib.TaskGen import feature, before_method, after_method + +from waflib.Tools import ccroot +ccroot.USELIB_VARS['scalac'] = set(['CLASSPATH', 'SCALACFLAGS']) + +from waflib.Tools import javaw + +@feature('scalac') +@before_method('process_source') +def apply_scalac(self): + + Utils.def_attrs(self, jarname='', classpath='', + sourcepath='.', srcdir='.', + jar_mf_attributes={}, jar_mf_classpath=[]) + + outdir = getattr(self, 'outdir', None) + if outdir: + if not isinstance(outdir, Node.Node): + outdir = self.path.get_bld().make_node(self.outdir) + else: + outdir = self.path.get_bld() + outdir.mkdir() + self.env['OUTDIR'] = outdir.abspath() + + self.scalac_task = tsk = self.create_task('scalac') + tmp = [] + + srcdir = getattr(self, 'srcdir', '') + if isinstance(srcdir, Node.Node): + srcdir = [srcdir] + for x in Utils.to_list(srcdir): + if isinstance(x, Node.Node): + y = x + else: + y = self.path.find_dir(x) + if not y: + self.bld.fatal('Could not find the folder %s from %s' % (x, self.path)) + tmp.append(y) + tsk.srcdir = tmp + +# reuse some code +feature('scalac')(javaw.use_javac_files) +after_method('apply_scalac')(javaw.use_javac_files) + +feature('scalac')(javaw.set_classpath) +after_method('apply_scalac', 'use_scalac_files')(javaw.set_classpath) + + +SOURCE_RE = '**/*.scala' +class scalac(javaw.javac): + color = 'GREEN' + vars = ['CLASSPATH', 'SCALACFLAGS', 'SCALAC', 'OUTDIR'] + + def runnable_status(self): + """ + Wait for dependent tasks to be complete, then read the file system to find the input nodes. + """ + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + + if not self.inputs: + global SOURCE_RE + self.inputs = [] + for x in self.srcdir: + self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False)) + return super(javaw.javac, self).runnable_status() + + def run(self): + """ + Execute the scalac compiler + """ + env = self.env + gen = self.generator + bld = gen.bld + wd = bld.bldnode.abspath() + def to_list(xx): + if isinstance(xx, str): + return [xx] + return xx + self.last_cmd = lst = [] + lst.extend(to_list(env['SCALAC'])) + lst.extend(['-classpath']) + lst.extend(to_list(env['CLASSPATH'])) + lst.extend(['-d']) + lst.extend(to_list(env['OUTDIR'])) + lst.extend(to_list(env['SCALACFLAGS'])) + lst.extend([a.abspath() for a in self.inputs]) + lst = [x for x in lst if x] + try: + self.out = self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, output=0, quiet=0)[1] + except: + self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None) + +def configure(self): + """ + Detect the scalac program + """ + # If SCALA_HOME is set, we prepend it to the path list + java_path = self.environ['PATH'].split(os.pathsep) + v = self.env + + if 'SCALA_HOME' in self.environ: + java_path = [os.path.join(self.environ['SCALA_HOME'], 'bin')] + java_path + self.env['SCALA_HOME'] = [self.environ['SCALA_HOME']] + + for x in 'scalac scala'.split(): + self.find_program(x, var=x.upper(), path_list=java_path) + + if 'CLASSPATH' in self.environ: + v['CLASSPATH'] = self.environ['CLASSPATH'] + + v.SCALACFLAGS = ['-verbose'] + if not v['SCALAC']: + self.fatal('scalac is required for compiling scala classes') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/slow_qt4.py lilv-0.24.6/waflib/extras/slow_qt4.py --- lilv-0.24.4~dfsg0/waflib/extras/slow_qt4.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/slow_qt4.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,96 @@ +#! /usr/bin/env python +# Thomas Nagy, 2011 (ita) + +""" +Create _moc.cpp files + +The builds are 30-40% faster when .moc files are included, +you should NOT use this tool. If you really +really want it: + +def configure(conf): + conf.load('compiler_cxx qt4') + conf.load('slow_qt4') + +See playground/slow_qt/wscript for a complete example. +""" + +from waflib.TaskGen import extension +from waflib import Task +import waflib.Tools.qt4 +import waflib.Tools.cxx + +@extension(*waflib.Tools.qt4.EXT_QT4) +def cxx_hook(self, node): + return self.create_compiled_task('cxx_qt', node) + +class cxx_qt(Task.classes['cxx']): + def runnable_status(self): + ret = Task.classes['cxx'].runnable_status(self) + if ret != Task.ASK_LATER and not getattr(self, 'moc_done', None): + + try: + cache = self.generator.moc_cache + except AttributeError: + cache = self.generator.moc_cache = {} + + deps = self.generator.bld.node_deps[self.uid()] + for x in [self.inputs[0]] + deps: + if x.read().find('Q_OBJECT') > 0: + + # process "foo.h -> foo.moc" only if "foo.cpp" is in the sources for the current task generator + # this code will work because it is in the main thread (runnable_status) + if x.name.rfind('.') > -1: # a .h file... + name = x.name[:x.name.rfind('.')] + for tsk in self.generator.compiled_tasks: + if tsk.inputs and tsk.inputs[0].name.startswith(name): + break + else: + # no corresponding file, continue + continue + + # the file foo.cpp could be compiled for a static and a shared library - hence the %number in the name + cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx) + if cxx_node in cache: + continue + cache[cxx_node] = self + + tsk = Task.classes['moc'](env=self.env, generator=self.generator) + tsk.set_inputs(x) + tsk.set_outputs(cxx_node) + + if x.name.endswith('.cpp'): + # moc is trying to be too smart but it is too dumb: + # why forcing the #include when Q_OBJECT is in the cpp file? + gen = self.generator.bld.producer + gen.outstanding.append(tsk) + gen.total += 1 + self.set_run_after(tsk) + else: + cxxtsk = Task.classes['cxx'](env=self.env, generator=self.generator) + cxxtsk.set_inputs(tsk.outputs) + cxxtsk.set_outputs(cxx_node.change_ext('.o')) + cxxtsk.set_run_after(tsk) + + try: + self.more_tasks.extend([tsk, cxxtsk]) + except AttributeError: + self.more_tasks = [tsk, cxxtsk] + + try: + link = self.generator.link_task + except AttributeError: + pass + else: + link.set_run_after(cxxtsk) + link.inputs.extend(cxxtsk.outputs) + link.inputs.sort(key=lambda x: x.abspath()) + + self.moc_done = True + + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + + return ret + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/softlink_libs.py lilv-0.24.6/waflib/extras/softlink_libs.py --- lilv-0.24.4~dfsg0/waflib/extras/softlink_libs.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/softlink_libs.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,76 @@ +#! /usr/bin/env python +# per rosengren 2011 + +from waflib.TaskGen import feature, after_method +from waflib.Task import Task, always_run +from os.path import basename, isabs +from os import tmpfile, linesep + +def options(opt): + grp = opt.add_option_group('Softlink Libraries Options') + grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]') + +def configure(cnf): + cnf.find_program('ldd') + if not cnf.env.SOFTLINK_EXCLUDE: + cnf.env.SOFTLINK_EXCLUDE = cnf.options.exclude.split(',') + +@feature('softlink_libs') +@after_method('process_rule') +def add_finder(self): + tgt = self.path.find_or_declare(self.target) + self.create_task('sll_finder', tgt=tgt) + self.create_task('sll_installer', tgt=tgt) + always_run(sll_installer) + +class sll_finder(Task): + ext_out = 'softlink_libs' + def run(self): + bld = self.generator.bld + linked=[] + target_paths = [] + for g in bld.groups: + for tgen in g: + # FIXME it might be better to check if there is a link_task (getattr?) + target_paths += [tgen.path.get_bld().bldpath()] + linked += [t.outputs[0].bldpath() + for t in getattr(tgen, 'tasks', []) + if t.__class__.__name__ in + ['cprogram', 'cshlib', 'cxxprogram', 'cxxshlib']] + lib_list = [] + if len(linked): + cmd = [self.env.LDD] + linked + # FIXME add DYLD_LIBRARY_PATH+PATH for osx+win32 + ldd_env = {'LD_LIBRARY_PATH': ':'.join(target_paths + self.env.LIBPATH)} + # FIXME the with syntax will not work in python 2 + with tmpfile() as result: + self.exec_command(cmd, env=ldd_env, stdout=result) + result.seek(0) + for line in result.readlines(): + words = line.split() + if len(words) < 3 or words[1] != '=>': + continue + lib = words[2] + if lib == 'not': + continue + if any([lib.startswith(p) for p in + [bld.bldnode.abspath(), '('] + + self.env.SOFTLINK_EXCLUDE]): + continue + if not isabs(lib): + continue + lib_list.append(lib) + lib_list = sorted(set(lib_list)) + self.outputs[0].write(linesep.join(lib_list + self.env.DYNAMIC_LIBS)) + return 0 + +class sll_installer(Task): + ext_in = 'softlink_libs' + def run(self): + tgt = self.outputs[0] + self.generator.bld.install_files('${LIBDIR}', tgt, postpone=False) + lib_list=tgt.read().split() + for lib in lib_list: + self.generator.bld.symlink_as('${LIBDIR}/'+basename(lib), lib, postpone=False) + return 0 + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/sphinx.py lilv-0.24.6/waflib/extras/sphinx.py --- lilv-0.24.4~dfsg0/waflib/extras/sphinx.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/sphinx.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,81 @@ +"""Support for Sphinx documentation + +This is a wrapper for sphinx-build program. Please note that sphinx-build supports only one output format which can +passed to build via sphinx_output_format attribute. The default output format is html. + +Example wscript: + +def configure(cnf): + conf.load('sphinx') + +def build(bld): + bld( + features='sphinx', + sphinx_source='sources', # path to source directory + sphinx_options='-a -v', # sphinx-build program additional options + sphinx_output_format='man' # output format of sphinx documentation + ) + +""" + +from waflib.Node import Node +from waflib import Utils +from waflib.Task import Task +from waflib.TaskGen import feature, after_method + + +def configure(cnf): + """Check if sphinx-build program is available and loads gnu_dirs tool.""" + cnf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False) + cnf.load('gnu_dirs') + + +@feature('sphinx') +def build_sphinx(self): + """Builds sphinx sources. + """ + if not self.env.SPHINX_BUILD: + self.bld.fatal('Program SPHINX_BUILD not defined.') + if not getattr(self, 'sphinx_source', None): + self.bld.fatal('Attribute sphinx_source not defined.') + if not isinstance(self.sphinx_source, Node): + self.sphinx_source = self.path.find_node(self.sphinx_source) + if not self.sphinx_source: + self.bld.fatal('Can\'t find sphinx_source: %r' % self.sphinx_source) + + Utils.def_attrs(self, sphinx_output_format='html') + self.env.SPHINX_OUTPUT_FORMAT = self.sphinx_output_format + self.env.SPHINX_OPTIONS = getattr(self, 'sphinx_options', []) + + for source_file in self.sphinx_source.ant_glob('**/*'): + self.bld.add_manual_dependency(self.sphinx_source, source_file) + + sphinx_build_task = self.create_task('SphinxBuildingTask') + sphinx_build_task.set_inputs(self.sphinx_source) + sphinx_build_task.set_outputs(self.path.get_bld()) + + # the sphinx-build results are in directory + sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT) + sphinx_output_directory.mkdir() + Utils.def_attrs(self, install_path=get_install_path(self)) + self.add_install_files(install_to=self.install_path, + install_from=sphinx_output_directory.ant_glob('**/*'), + cwd=sphinx_output_directory, + relative_trick=True) + + +def get_install_path(tg): + if tg.env.SPHINX_OUTPUT_FORMAT == 'man': + return tg.env.MANDIR + elif tg.env.SPHINX_OUTPUT_FORMAT == 'info': + return tg.env.INFODIR + else: + return tg.env.DOCDIR + + +class SphinxBuildingTask(Task): + color = 'BOLD' + run_str = '${SPHINX_BUILD} -M ${SPHINX_OUTPUT_FORMAT} ${SRC} ${TGT} ${SPHINX_OPTIONS}' + + def keyword(self): + return 'Compiling (%s)' % self.env.SPHINX_OUTPUT_FORMAT diff -Nru lilv-0.24.4~dfsg0/waflib/extras/stale.py lilv-0.24.6/waflib/extras/stale.py --- lilv-0.24.4~dfsg0/waflib/extras/stale.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/stale.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,98 @@ +#! /usr/bin/env python +# encoding: UTF-8 +# Thomas Nagy, 2006-2015 (ita) + +""" +Add a pre-build hook to remove build files (declared in the system) +that do not have a corresponding target + +This can be used for example to remove the targets +that have changed name without performing +a full 'waf clean' + +Of course, it will only work if there are no dynamically generated +nodes/tasks, in which case the method will have to be modified +to exclude some folders for example. + +Make sure to set bld.post_mode = waflib.Build.POST_AT_ONCE +""" + +from waflib import Logs, Build +from waflib.Runner import Parallel + +DYNAMIC_EXT = [] # add your non-cleanable files/extensions here +MOC_H_EXTS = '.cpp .cxx .hpp .hxx .h'.split() + +def can_delete(node): + """Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files""" + if not node.name.endswith('.moc'): + return True + base = node.name[:-4] + p1 = node.parent.get_src() + p2 = node.parent.get_bld() + for k in MOC_H_EXTS: + h_name = base + k + n = p1.search_node(h_name) + if n: + return False + n = p2.search_node(h_name) + if n: + return False + + # foo.cpp.moc, foo.h.moc, etc. + if base.endswith(k): + return False + + return True + +# recursion over the nodes to find the stale files +def stale_rec(node, nodes): + if node.abspath() in node.ctx.env[Build.CFG_FILES]: + return + + if getattr(node, 'children', []): + for x in node.children.values(): + if x.name != "c4che": + stale_rec(x, nodes) + else: + for ext in DYNAMIC_EXT: + if node.name.endswith(ext): + break + else: + if not node in nodes: + if can_delete(node): + Logs.warn('Removing stale file -> %r', node) + node.delete() + +old = Parallel.refill_task_list +def refill_task_list(self): + iit = old(self) + bld = self.bld + + # execute this operation only once + if getattr(self, 'stale_done', False): + return iit + self.stale_done = True + + # this does not work in partial builds + if bld.targets != '*': + return iit + + # this does not work in dynamic builds + if getattr(bld, 'post_mode') == Build.POST_AT_ONCE: + return iit + + # obtain the nodes to use during the build + nodes = [] + for tasks in bld.groups: + for x in tasks: + try: + nodes.extend(x.outputs) + except AttributeError: + pass + + stale_rec(bld.bldnode, nodes) + return iit + +Parallel.refill_task_list = refill_task_list + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/stracedeps.py lilv-0.24.6/waflib/extras/stracedeps.py --- lilv-0.24.4~dfsg0/waflib/extras/stracedeps.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/stracedeps.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,174 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2015 (ita) + +""" +Execute tasks through strace to obtain dependencies after the process is run. This +scheme is similar to that of the Fabricate script. + +To use:: + + def configure(conf): + conf.load('strace') + +WARNING: +* This will not work when advanced scanners are needed (qt4/qt5) +* The overhead of running 'strace' is significant (56s -> 1m29s) +* It will not work on Windows :-) +""" + +import os, re, threading +from waflib import Task, Logs, Utils + +#TRACECALLS = 'trace=access,chdir,clone,creat,execve,exit_group,fork,lstat,lstat64,mkdir,open,rename,stat,stat64,symlink,vfork' +TRACECALLS = 'trace=process,file' + +BANNED = ('/tmp', '/proc', '/sys', '/dev') + +s_process = r'(?:clone|fork|vfork)\(.*?(?P\d+)' +s_file = r'(?P\w+)\("(?P([^"\\]|\\.)*)"(.*)' +re_lines = re.compile(r'^(?P\d+)\s+(?:(?:%s)|(?:%s))\r*$' % (s_file, s_process), re.IGNORECASE | re.MULTILINE) +strace_lock = threading.Lock() + +def configure(conf): + conf.find_program('strace') + +def task_method(func): + # Decorator function to bind/replace methods on the base Task class + # + # The methods Task.exec_command and Task.sig_implicit_deps already exists and are rarely overridden + # we thus expect that we are the only ones doing this + try: + setattr(Task.Task, 'nostrace_%s' % func.__name__, getattr(Task.Task, func.__name__)) + except AttributeError: + pass + setattr(Task.Task, func.__name__, func) + return func + +@task_method +def get_strace_file(self): + try: + return self.strace_file + except AttributeError: + pass + + if self.outputs: + ret = self.outputs[0].abspath() + '.strace' + else: + ret = '%s%s%d%s' % (self.generator.bld.bldnode.abspath(), os.sep, id(self), '.strace') + self.strace_file = ret + return ret + +@task_method +def get_strace_args(self): + return (self.env.STRACE or ['strace']) + ['-e', TRACECALLS, '-f', '-o', self.get_strace_file()] + +@task_method +def exec_command(self, cmd, **kw): + bld = self.generator.bld + if not 'cwd' in kw: + kw['cwd'] = self.get_cwd() + + args = self.get_strace_args() + fname = self.get_strace_file() + if isinstance(cmd, list): + cmd = args + cmd + else: + cmd = '%s %s' % (' '.join(args), cmd) + + try: + ret = bld.exec_command(cmd, **kw) + finally: + if not ret: + self.parse_strace_deps(fname, kw['cwd']) + return ret + +@task_method +def sig_implicit_deps(self): + # bypass the scanner functions + return + +@task_method +def parse_strace_deps(self, path, cwd): + # uncomment the following line to disable the dependencies and force a file scan + # return + try: + cnt = Utils.readf(path) + finally: + try: + os.remove(path) + except OSError: + pass + + if not isinstance(cwd, str): + cwd = cwd.abspath() + + nodes = [] + bld = self.generator.bld + try: + cache = bld.strace_cache + except AttributeError: + cache = bld.strace_cache = {} + + # chdir and relative paths + pid_to_cwd = {} + + global BANNED + done = set() + for m in re.finditer(re_lines, cnt): + # scraping the output of strace + pid = m.group('pid') + if m.group('npid'): + npid = m.group('npid') + pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd) + continue + + p = m.group('path').replace('\\"', '"') + + if p == '.' or m.group().find('= -1 ENOENT') > -1: + # just to speed it up a bit + continue + + if not os.path.isabs(p): + p = os.path.join(pid_to_cwd.get(pid, cwd), p) + + call = m.group('call') + if call == 'chdir': + pid_to_cwd[pid] = p + continue + + if p in done: + continue + done.add(p) + + for x in BANNED: + if p.startswith(x): + break + else: + if p.endswith('/') or os.path.isdir(p): + continue + + try: + node = cache[p] + except KeyError: + strace_lock.acquire() + try: + cache[p] = node = bld.root.find_node(p) + if not node: + continue + finally: + strace_lock.release() + nodes.append(node) + + # record the dependencies then force the task signature recalculation for next time + if Logs.verbose: + Logs.debug('deps: real scanner for %r returned %r', self, nodes) + bld = self.generator.bld + bld.node_deps[self.uid()] = nodes + bld.raw_deps[self.uid()] = [] + try: + del self.cache_sig + except AttributeError: + pass + self.signature() + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/swig.py lilv-0.24.6/waflib/extras/swig.py --- lilv-0.24.4~dfsg0/waflib/extras/swig.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/swig.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,237 @@ +#! /usr/bin/env python +# encoding: UTF-8 +# Petar Forai +# Thomas Nagy 2008-2010 (ita) + +import re +from waflib import Task, Logs +from waflib.TaskGen import extension, feature, after_method +from waflib.Configure import conf +from waflib.Tools import c_preproc + +""" +tasks have to be added dynamically: +- swig interface files may be created at runtime +- the module name may be unknown in advance +""" + +SWIG_EXTS = ['.swig', '.i'] + +re_module = re.compile(r'%module(?:\s*\(.*\))?\s+(.+)', re.M) + +re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M) +re_2 = re.compile(r'[#%](?:include|import(?:\(module=".*"\))+|python(?:begin|code)) [<"](.*)[">]', re.M) + +class swig(Task.Task): + color = 'BLUE' + run_str = '${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}' + ext_out = ['.h'] # might produce .h files although it is not mandatory + vars = ['SWIG_VERSION', 'SWIGDEPS'] + + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + + if not getattr(self, 'init_outputs', None): + self.init_outputs = True + if not getattr(self, 'module', None): + # search the module name + txt = self.inputs[0].read() + m = re_module.search(txt) + if not m: + raise ValueError("could not find the swig module name") + self.module = m.group(1) + + swig_c(self) + + # add the language-specific output files as nodes + # call funs in the dict swig_langs + for x in self.env['SWIGFLAGS']: + # obtain the language + x = x[1:] + try: + fun = swig_langs[x] + except KeyError: + pass + else: + fun(self) + + return super(swig, self).runnable_status() + + def scan(self): + "scan for swig dependencies, climb the .i files" + lst_src = [] + + seen = [] + missing = [] + to_see = [self.inputs[0]] + + while to_see: + node = to_see.pop(0) + if node in seen: + continue + seen.append(node) + lst_src.append(node) + + # read the file + code = node.read() + code = c_preproc.re_nl.sub('', code) + code = c_preproc.re_cpp.sub(c_preproc.repl, code) + + # find .i files and project headers + names = re_2.findall(code) + for n in names: + for d in self.generator.includes_nodes + [node.parent]: + u = d.find_resource(n) + if u: + to_see.append(u) + break + else: + missing.append(n) + return (lst_src, missing) + +# provide additional language processing +swig_langs = {} +def swigf(fun): + swig_langs[fun.__name__.replace('swig_', '')] = fun + return fun +swig.swigf = swigf + +def swig_c(self): + ext = '.swigwrap_%d.c' % self.generator.idx + flags = self.env['SWIGFLAGS'] + if '-c++' in flags: + ext += 'xx' + out_node = self.inputs[0].parent.find_or_declare(self.module + ext) + + if '-c++' in flags: + c_tsk = self.generator.cxx_hook(out_node) + else: + c_tsk = self.generator.c_hook(out_node) + + c_tsk.set_run_after(self) + + # transfer weights from swig task to c task + if getattr(self, 'weight', None): + c_tsk.weight = self.weight + if getattr(self, 'tree_weight', None): + c_tsk.tree_weight = self.tree_weight + + try: + self.more_tasks.append(c_tsk) + except AttributeError: + self.more_tasks = [c_tsk] + + try: + ltask = self.generator.link_task + except AttributeError: + pass + else: + ltask.set_run_after(c_tsk) + # setting input nodes does not declare the build order + # because the build already started, but it sets + # the dependency to enable rebuilds + ltask.inputs.append(c_tsk.outputs[0]) + + self.outputs.append(out_node) + + if not '-o' in self.env['SWIGFLAGS']: + self.env.append_value('SWIGFLAGS', ['-o', self.outputs[0].abspath()]) + +@swigf +def swig_python(tsk): + node = tsk.inputs[0].parent + if tsk.outdir: + node = tsk.outdir + tsk.set_outputs(node.find_or_declare(tsk.module+'.py')) + +@swigf +def swig_ocaml(tsk): + node = tsk.inputs[0].parent + if tsk.outdir: + node = tsk.outdir + tsk.set_outputs(node.find_or_declare(tsk.module+'.ml')) + tsk.set_outputs(node.find_or_declare(tsk.module+'.mli')) + +@extension(*SWIG_EXTS) +def i_file(self, node): + # the task instance + tsk = self.create_task('swig') + tsk.set_inputs(node) + tsk.module = getattr(self, 'swig_module', None) + + flags = self.to_list(getattr(self, 'swig_flags', [])) + tsk.env.append_value('SWIGFLAGS', flags) + + tsk.outdir = None + if '-outdir' in flags: + outdir = flags[flags.index('-outdir')+1] + outdir = tsk.generator.bld.bldnode.make_node(outdir) + outdir.mkdir() + tsk.outdir = outdir + +@feature('c', 'cxx', 'd', 'fc', 'asm') +@after_method('apply_link', 'process_source') +def enforce_swig_before_link(self): + try: + link_task = self.link_task + except AttributeError: + pass + else: + for x in self.tasks: + if x.__class__.__name__ == 'swig': + link_task.run_after.add(x) + +@conf +def check_swig_version(conf, minver=None): + """ + Check if the swig tool is found matching a given minimum version. + minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver. + + If successful, SWIG_VERSION is defined as 'MAJOR.MINOR' + (eg. '1.3') of the actual swig version found. + + :param minver: minimum version + :type minver: tuple of int + :return: swig version + :rtype: tuple of int + """ + assert minver is None or isinstance(minver, tuple) + swigbin = conf.env['SWIG'] + if not swigbin: + conf.fatal('could not find the swig executable') + + # Get swig version string + cmd = swigbin + ['-version'] + Logs.debug('swig: Running swig command %r', cmd) + reg_swig = re.compile(r'SWIG Version\s(.*)', re.M) + swig_out = conf.cmd_and_log(cmd) + swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')]) + + # Compare swig version with the minimum required + result = (minver is None) or (swigver_tuple >= minver) + + if result: + # Define useful environment variables + swigver = '.'.join([str(x) for x in swigver_tuple[:2]]) + conf.env['SWIG_VERSION'] = swigver + + # Feedback + swigver_full = '.'.join(map(str, swigver_tuple[:3])) + if minver is None: + conf.msg('Checking for swig version', swigver_full) + else: + minver_str = '.'.join(map(str, minver)) + conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW') + + if not result: + conf.fatal('The swig version is too old, expecting %r' % (minver,)) + + return swigver_tuple + +def configure(conf): + conf.find_program('swig', var='SWIG') + conf.env.SWIGPATH_ST = '-I%s' + conf.env.SWIGDEF_ST = '-D%s' + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/syms.py lilv-0.24.6/waflib/extras/syms.py --- lilv-0.24.4~dfsg0/waflib/extras/syms.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/syms.py 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,84 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +This tool supports the export_symbols_regex to export the symbols in a shared library. +by default, all symbols are exported by gcc, and nothing by msvc. +to use the tool, do something like: + +def build(ctx): + ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib') + +only the symbols starting with 'mylib_' will be exported. +""" + +import re +from waflib.Context import STDOUT +from waflib.Task import Task +from waflib.Errors import WafError +from waflib.TaskGen import feature, after_method + +class gen_sym(Task): + def run(self): + obj = self.inputs[0] + kw = {} + + reg = getattr(self.generator, 'export_symbols_regex', '.+?') + if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): + re_nm = re.compile(r'External\s+\|\s+_(?P%s)\b' % reg) + cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()] + else: + if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows + re_nm = re.compile(r'(T|D)\s+_(?P%s)\b' % reg) + elif self.env.DEST_BINFMT=='mac-o': + re_nm=re.compile(r'(T|D)\s+(?P_?(%s))\b' % reg) + else: + re_nm = re.compile(r'(T|D)\s+(?P%s)\b' % reg) + cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()] + syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))] + self.outputs[0].write('%r' % syms) + +class compile_sym(Task): + def run(self): + syms = {} + for x in self.inputs: + slist = eval(x.read()) + for s in slist: + syms[s] = 1 + lsyms = list(syms.keys()) + lsyms.sort() + if self.env.DEST_BINFMT == 'pe': + self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms)) + elif self.env.DEST_BINFMT == 'elf': + self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n") + elif self.env.DEST_BINFMT=='mac-o': + self.outputs[0].write('\n'.join(lsyms) + '\n') + else: + raise WafError('NotImplemented') + +@feature('syms') +@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars') +def do_the_symbol_stuff(self): + def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def')) + compiled_tasks = getattr(self, 'compiled_tasks', None) + if compiled_tasks: + ins = [x.outputs[0] for x in compiled_tasks] + self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins] + self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node) + + link_task = getattr(self, 'link_task', None) + if link_task: + self.link_task.dep_nodes.append(def_node) + + if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): + self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()]) + elif self.env.DEST_BINFMT == 'pe': + # gcc on windows takes *.def as an additional input + self.link_task.inputs.append(def_node) + elif self.env.DEST_BINFMT == 'elf': + self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()]) + elif self.env.DEST_BINFMT=='mac-o': + self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()]) + else: + raise WafError('NotImplemented') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/ticgt.py lilv-0.24.6/waflib/extras/ticgt.py --- lilv-0.24.4~dfsg0/waflib/extras/ticgt.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/ticgt.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,300 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Texas Instruments code generator support (experimental) +# When reporting issues, please directly assign the bug to the maintainer. + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2012" + +""" +TI cgt6x is a compiler suite for TI DSPs. + +The toolchain does pretty weird things, and I'm sure I'm missing some of them. +But still, the tool saves time. + +What this tool does is: + +- create a TI compiler environment +- create TI compiler features, to handle some specifics about this compiler + It has a few idiosyncracies, such as not giving the liberty of the .o file names +- automatically activate them when using the TI compiler +- handle the tconf tool + The tool + +TODO: + +- the set_platform_flags() function is not nice +- more tests +- broaden tool scope, if needed + +""" + +import os, re + +from waflib import Options, Utils, Task, TaskGen +from waflib.Tools import c, ccroot, c_preproc +from waflib.Configure import conf +from waflib.TaskGen import feature, before_method +from waflib.Tools.c import cprogram + +opj = os.path.join + +@conf +def find_ticc(conf): + conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) + conf.env.CC_NAME = 'ticc' + +@conf +def find_tild(conf): + conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) + conf.env.LINK_CC_NAME = 'tild' + +@conf +def find_tiar(conf): + conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) + conf.env.AR_NAME = 'tiar' + conf.env.ARFLAGS = 'qru' + +@conf +def ticc_common_flags(conf): + v = conf.env + + if not v['LINK_CC']: + v['LINK_CC'] = v['CC'] + v['CCLNK_SRC_F'] = [] + v['CCLNK_TGT_F'] = ['-o'] + v['CPPPATH_ST'] = '-I%s' + v['DEFINES_ST'] = '-d%s' + + v['LIB_ST'] = '-l%s' # template for adding libs + v['LIBPATH_ST'] = '-i%s' # template for adding libpaths + v['STLIB_ST'] = '-l=%s.lib' + v['STLIBPATH_ST'] = '-i%s' + + # program + v['cprogram_PATTERN'] = '%s.out' + + # static lib + #v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic'] + v['cstlib_PATTERN'] = '%s.lib' + +def configure(conf): + v = conf.env + v.TI_CGT_DIR = getattr(Options.options, 'ti-cgt-dir', "") + v.TI_DSPLINK_DIR = getattr(Options.options, 'ti-dsplink-dir', "") + v.TI_BIOSUTILS_DIR = getattr(Options.options, 'ti-biosutils-dir', "") + v.TI_DSPBIOS_DIR = getattr(Options.options, 'ti-dspbios-dir', "") + v.TI_XDCTOOLS_DIR = getattr(Options.options, 'ti-xdctools-dir', "") + conf.find_ticc() + conf.find_tiar() + conf.find_tild() + conf.ticc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() + conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR) + + conf.env.TCONF_INCLUDES += [ + opj(conf.env.TI_DSPBIOS_DIR, 'packages'), + ] + + conf.env.INCLUDES += [ + opj(conf.env.TI_CGT_DIR, 'include'), + ] + + conf.env.LIBPATH += [ + opj(conf.env.TI_CGT_DIR, "lib"), + ] + + conf.env.INCLUDES_DSPBIOS += [ + opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'), + ] + + conf.env.LIBPATH_DSPBIOS += [ + opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'), + ] + + conf.env.INCLUDES_DSPLINK += [ + opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'), + ] + +@conf +def ti_set_debug(cfg, debug=1): + """ + Sets debug flags for the compiler. + + TODO: + - for each TI CFLAG/INCLUDES/LINKFLAGS/LIBPATH replace RELEASE by DEBUG + - -g --no_compress + """ + if debug: + cfg.env.CFLAGS += "-d_DEBUG -dDEBUG -dDDSP_DEBUG".split() + +@conf +def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board): + """ + Sets the INCLUDES, LINKFLAGS for DSPLINK and TCONF_INCLUDES + For the specific hardware. + + Assumes that DSPLINK was built in its own folder. + + :param splat: short platform name (eg. OMAPL138) + :param dsp: DSP name (eg. 674X) + :param dspbios_ver: string identifying DspBios version (eg. 5.XX) + :param board: board name (eg. OMAPL138GEM) + + """ + d1 = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver) + d = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver, board) + cfg.env.TCONF_INCLUDES += [d1, d] + cfg.env.INCLUDES_DSPLINK += [ + opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp), + d, + ] + + cfg.env.LINKFLAGS_DSPLINK += [ + opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x) + for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio') + ] + + +def options(opt): + opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="") + opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="") + opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="") + opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="") + opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="") + +class ti_cprogram(cprogram): + """ + Link object files into a c program + + Changes: + + - the linked executable to have a relative path (because we can) + - put the LIBPATH first + """ + run_str = '${LINK_CC} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].bldpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ' + +@feature("c") +@before_method('apply_link') +def use_ti_cprogram(self): + """ + Automatically uses ti_cprogram link process + """ + if 'cprogram' in self.features and self.env.CC_NAME == 'ticc': + self.features.insert(0, "ti_cprogram") + +class ti_c(Task.Task): + """ + Compile task for the TI codegen compiler + + This compiler does not allow specifying the output file name, only the output path. + + """ + "Compile C files into object files" + run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}' + vars = ['CCDEPS'] # unused variable to depend on, just in case + ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] + scan = c_preproc.scan + +def create_compiled_task(self, name, node): + """ + Overrides ccroot.create_compiled_task to support ti_c + """ + out = '%s' % (node.change_ext('.obj').name) + if self.env.CC_NAME == 'ticc': + name = 'ti_c' + task = self.create_task(name, node, node.parent.find_or_declare(out)) + self.env.OUT = '-fr%s' % (node.parent.get_bld().abspath()) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks = [task] + return task + +@TaskGen.extension('.c') +def c_hook(self, node): + "Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance" + if self.env.CC_NAME == 'ticc': + return create_compiled_task(self, 'ti_c', node) + else: + return self.create_compiled_task('c', node) + + +@feature("ti-tconf") +@before_method('process_source') +def apply_tconf(self): + sources = [x.get_src() for x in self.to_nodes(self.source, path=self.path.get_src())] + node = sources[0] + assert(sources[0].name.endswith(".tcf")) + if len(sources) > 1: + assert(sources[1].name.endswith(".cmd")) + + target = getattr(self, 'target', self.source) + target_node = node.get_bld().parent.find_or_declare(node.name) + + procid = "%d" % int(getattr(self, 'procid', 0)) + + importpaths = [] + includes = Utils.to_list(getattr(self, 'includes', [])) + for x in includes + self.env.TCONF_INCLUDES: + if x == os.path.abspath(x): + importpaths.append(x) + else: + relpath = self.path.find_node(x).path_from(target_node.parent) + importpaths.append(relpath) + + task = self.create_task('ti_tconf', sources, target_node.change_ext('.cdb')) + task.path = self.path + task.includes = includes + task.cwd = target_node.parent.abspath() + task.env = self.env.derive() + task.env["TCONFSRC"] = node.path_from(target_node.parent) + task.env["TCONFINC"] = '-Dconfig.importPath=%s' % ";".join(importpaths) + task.env['TCONFPROGNAME'] = '-Dconfig.programName=%s' % target + task.env['PROCID'] = procid + task.outputs = [ + target_node.change_ext("cfg_c.c"), + target_node.change_ext("cfg.s62"), + target_node.change_ext("cfg.cmd"), + ] + + create_compiled_task(self, 'ti_c', task.outputs[1]) + ctask = create_compiled_task(self, 'ti_c', task.outputs[0]) + ctask.env = self.env.derive() + + self.add_those_o_files(target_node.change_ext("cfg.cmd")) + if len(sources) > 1: + self.add_those_o_files(sources[1]) + self.source = [] + +re_tconf_include = re.compile(r'(?Putils\.importFile)\("(?P.*)"\)',re.M) +class ti_tconf(Task.Task): + run_str = '${TCONF} ${TCONFINC} ${TCONFPROGNAME} ${TCONFSRC} ${PROCID}' + color = 'PINK' + + def scan(self): + includes = Utils.to_list(getattr(self, 'includes', [])) + + def deps(node): + nodes, names = [], [] + if node: + code = Utils.readf(node.abspath()) + for match in re_tconf_include.finditer(code): + path = match.group('file') + if path: + for x in includes: + filename = opj(x, path) + fi = self.path.find_resource(filename) + if fi: + subnodes, subnames = deps(fi) + nodes += subnodes + names += subnames + nodes.append(fi) + names.append(path) + break + return nodes, names + return deps(self.inputs[0]) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/unity.py lilv-0.24.6/waflib/extras/unity.py --- lilv-0.24.4~dfsg0/waflib/extras/unity.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/unity.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,108 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +Compile whole groups of C/C++ files at once +(C and C++ files are processed independently though). + +To enable globally:: + + def options(opt): + opt.load('compiler_cxx') + def build(bld): + bld.load('compiler_cxx unity') + +To enable for specific task generators only:: + + def build(bld): + bld(features='c cprogram unity', source='main.c', ...) + +The file order is often significant in such builds, so it can be +necessary to adjust the order of source files and the batch sizes. +To control the amount of files processed in a batch per target +(the default is 50):: + + def build(bld): + bld(features='c cprogram', unity_size=20) + +""" + +from waflib import Task, Options +from waflib.Tools import c_preproc +from waflib import TaskGen + +MAX_BATCH = 50 + +EXTS_C = ('.c',) +EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++') + +def options(opt): + global MAX_BATCH + opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH, + help='default unity batch size (0 disables unity builds)') + +@TaskGen.taskgen_method +def batch_size(self): + default = getattr(Options.options, 'batchsize', MAX_BATCH) + if default < 1: + return 0 + return getattr(self, 'unity_size', default) + + +class unity(Task.Task): + color = 'BLUE' + scan = c_preproc.scan + def to_include(self, node): + ret = node.path_from(self.outputs[0].parent) + ret = ret.replace('\\', '\\\\').replace('"', '\\"') + return ret + def run(self): + lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs] + txt = ''.join(lst) + self.outputs[0].write(txt) + def __str__(self): + node = self.outputs[0] + return node.path_from(node.ctx.launch_node()) + +def bind_unity(obj, cls_name, exts): + if not 'mappings' in obj.__dict__: + obj.mappings = dict(obj.mappings) + + for j in exts: + fun = obj.mappings[j] + if fun.__name__ == 'unity_fun': + raise ValueError('Attempt to bind unity mappings multiple times %r' % j) + + def unity_fun(self, node): + cnt = self.batch_size() + if cnt <= 1: + return fun(self, node) + x = getattr(self, 'master_%s' % cls_name, None) + if not x or len(x.inputs) >= cnt: + x = self.create_task('unity') + setattr(self, 'master_%s' % cls_name, x) + + cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0) + c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name)) + x.outputs = [c_node] + setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1) + fun(self, c_node) + x.inputs.append(node) + + obj.mappings[j] = unity_fun + +@TaskGen.feature('unity') +@TaskGen.before('process_source') +def single_unity(self): + lst = self.to_list(self.features) + if 'c' in lst: + bind_unity(self, 'c', EXTS_C) + if 'cxx' in lst: + bind_unity(self, 'cxx', EXTS_CXX) + +def build(bld): + if bld.env.CC_NAME: + bind_unity(TaskGen.task_gen, 'c', EXTS_C) + if bld.env.CXX_NAME: + bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/use_config.py lilv-0.24.6/waflib/extras/use_config.py --- lilv-0.24.4~dfsg0/waflib/extras/use_config.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/use_config.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,185 @@ +#!/usr/bin/env python +# coding=utf-8 +# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org + +""" +When a project has a lot of options the 'waf configure' command line can be +very long and it becomes a cause of error. +This tool provides a convenient way to load a set of configuration parameters +from a local file or from a remote url. + +The configuration parameters are stored in a Python file that is imported as +an extra waf tool can be. + +Example: +$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ... + +The file 'myconf1' will be downloaded from 'http://www.anywhere.org' +(or 'http://www.anywhere.org/wafcfg'). +If the files are available locally, it could be: +$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ... + +The configuration of 'myconf1.py' is automatically loaded by calling +its 'configure' function. In this example, it defines environment variables and +set options: + +def configure(self): + self.env['CC'] = 'gcc-4.8' + self.env.append_value('LIBPATH', [...]) + self.options.perlbinary = '/usr/local/bin/perl' + self.options.pyc = False + +The corresponding command line should have been: +$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl + + +This is an extra tool, not bundled with the default waf binary. +To add the use_config tool to the waf file: +$ ./waf-light --tools=use_config + +When using this tool, the wscript will look like: + + def options(opt): + opt.load('use_config') + + def configure(conf): + conf.load('use_config') +""" + +import sys +import os.path as osp +import os + +local_repo = '' +"""Local repository containing additional Waf tools (plugins)""" +remote_repo = 'https://gitlab.com/ita1024/waf/raw/master/' +""" +Remote directory containing downloadable waf tools. The missing tools can be downloaded by using:: + + $ waf configure --download +""" + +remote_locs = ['waflib/extras', 'waflib/Tools'] +""" +Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo` +""" + + +try: + from urllib import request +except ImportError: + from urllib import urlopen +else: + urlopen = request.urlopen + + +from waflib import Errors, Context, Logs, Utils, Options, Configure + +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + + + + +DEFAULT_DIR = 'wafcfg' +# add first the current wafcfg subdirectory +sys.path.append(osp.abspath(DEFAULT_DIR)) + +def options(self): + group = self.add_option_group('configure options') + group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing') + + group.add_option('--use-config', action='store', default=None, + metavar='CFG', dest='use_config', + help='force the configuration parameters by importing ' + 'CFG.py. Several modules may be provided (comma ' + 'separated).') + group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR, + metavar='CFG_DIR', dest='use_config_dir', + help='path or url where to find the configuration file') + +def download_check(node): + """ + Hook to check for the tools which are downloaded. Replace with your function if necessary. + """ + pass + + +def download_tool(tool, force=False, ctx=None): + """ + Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`:: + + $ waf configure --download + """ + for x in Utils.to_list(remote_repo): + for sub in Utils.to_list(remote_locs): + url = '/'.join((x, sub, tool + '.py')) + try: + web = urlopen(url) + try: + if web.getcode() != 200: + continue + except AttributeError: + pass + except Exception: + # on python3 urlopen throws an exception + # python 2.3 does not have getcode and throws an exception to fail + continue + else: + tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py'))) + tmp.write(web.read(), 'wb') + Logs.warn('Downloaded %s from %s', tool, url) + download_check(tmp) + try: + module = Context.load_tool(tool) + except Exception: + Logs.warn('The tool %s from %s is unusable', tool, url) + try: + tmp.delete() + except Exception: + pass + continue + return module + + raise Errors.WafError('Could not load the Waf tool') + +def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): + try: + module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path) + except ImportError as e: + if not ctx or not hasattr(Options.options, 'download'): + Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool) + raise + if Options.options.download: + module = download_tool(tool, ctx=ctx) + if not module: + ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e)) + else: + ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e)) + return module + +Context.load_tool_default = Context.load_tool +Context.load_tool = load_tool +Configure.download_tool = download_tool + +def configure(self): + opts = self.options + use_cfg = opts.use_config + if use_cfg is None: + return + url = urlparse(opts.use_config_dir) + kwargs = {} + if url.scheme: + kwargs['download'] = True + kwargs['remote_url'] = url.geturl() + # search first with the exact url, else try with +'/wafcfg' + kwargs['remote_locs'] = ['', DEFAULT_DIR] + tooldir = url.geturl() + ' ' + DEFAULT_DIR + for cfg in use_cfg.split(','): + Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg) + self.load(cfg, tooldir=tooldir, **kwargs) + self.start_msg('Checking for configuration') + self.end_msg(use_cfg) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/valadoc.py lilv-0.24.6/waflib/extras/valadoc.py --- lilv-0.24.4~dfsg0/waflib/extras/valadoc.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/valadoc.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,140 @@ +#! /usr/bin/env python +# encoding: UTF-8 +# Nicolas Joseph 2009 + +""" +ported from waf 1.5: +TODO: tabs vs spaces +""" + +from waflib import Task, Utils, Errors, Logs +from waflib.TaskGen import feature + +VALADOC_STR = '${VALADOC}' + +class valadoc(Task.Task): + vars = ['VALADOC', 'VALADOCFLAGS'] + color = 'BLUE' + after = ['cprogram', 'cstlib', 'cshlib', 'cxxprogram', 'cxxstlib', 'cxxshlib'] + quiet = True # no outputs .. this is weird + + def __init__(self, *k, **kw): + Task.Task.__init__(self, *k, **kw) + self.output_dir = '' + self.doclet = '' + self.package_name = '' + self.package_version = '' + self.files = [] + self.vapi_dirs = [] + self.protected = True + self.private = False + self.inherit = False + self.deps = False + self.vala_defines = [] + self.vala_target_glib = None + self.enable_non_null_experimental = False + self.force = False + + def run(self): + if not self.env['VALADOCFLAGS']: + self.env['VALADOCFLAGS'] = '' + cmd = [Utils.subst_vars(VALADOC_STR, self.env)] + cmd.append ('-o %s' % self.output_dir) + if getattr(self, 'doclet', None): + cmd.append ('--doclet %s' % self.doclet) + cmd.append ('--package-name %s' % self.package_name) + if getattr(self, 'package_version', None): + cmd.append ('--package-version %s' % self.package_version) + if getattr(self, 'packages', None): + for package in self.packages: + cmd.append ('--pkg %s' % package) + if getattr(self, 'vapi_dirs', None): + for vapi_dir in self.vapi_dirs: + cmd.append ('--vapidir %s' % vapi_dir) + if not getattr(self, 'protected', None): + cmd.append ('--no-protected') + if getattr(self, 'private', None): + cmd.append ('--private') + if getattr(self, 'inherit', None): + cmd.append ('--inherit') + if getattr(self, 'deps', None): + cmd.append ('--deps') + if getattr(self, 'vala_defines', None): + for define in self.vala_defines: + cmd.append ('--define %s' % define) + if getattr(self, 'vala_target_glib', None): + cmd.append ('--target-glib=%s' % self.vala_target_glib) + if getattr(self, 'enable_non_null_experimental', None): + cmd.append ('--enable-non-null-experimental') + if getattr(self, 'force', None): + cmd.append ('--force') + cmd.append (' '.join ([x.abspath() for x in self.files])) + return self.generator.bld.exec_command(' '.join(cmd)) + +@feature('valadoc') +def process_valadoc(self): + """ + Generate API documentation from Vala source code with valadoc + + doc = bld( + features = 'valadoc', + output_dir = '../doc/html', + package_name = 'vala-gtk-example', + package_version = '1.0.0', + packages = 'gtk+-2.0', + vapi_dirs = '../vapi', + force = True + ) + + path = bld.path.find_dir ('../src') + doc.files = path.ant_glob (incl='**/*.vala') + """ + + task = self.create_task('valadoc') + if getattr(self, 'output_dir', None): + task.output_dir = self.path.find_or_declare(self.output_dir).abspath() + else: + Errors.WafError('no output directory') + if getattr(self, 'doclet', None): + task.doclet = self.doclet + else: + Errors.WafError('no doclet directory') + if getattr(self, 'package_name', None): + task.package_name = self.package_name + else: + Errors.WafError('no package name') + if getattr(self, 'package_version', None): + task.package_version = self.package_version + if getattr(self, 'packages', None): + task.packages = Utils.to_list(self.packages) + if getattr(self, 'vapi_dirs', None): + vapi_dirs = Utils.to_list(self.vapi_dirs) + for vapi_dir in vapi_dirs: + try: + task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath()) + except AttributeError: + Logs.warn('Unable to locate Vala API directory: %r', vapi_dir) + if getattr(self, 'files', None): + task.files = self.files + else: + Errors.WafError('no input file') + if getattr(self, 'protected', None): + task.protected = self.protected + if getattr(self, 'private', None): + task.private = self.private + if getattr(self, 'inherit', None): + task.inherit = self.inherit + if getattr(self, 'deps', None): + task.deps = self.deps + if getattr(self, 'vala_defines', None): + task.vala_defines = Utils.to_list(self.vala_defines) + if getattr(self, 'vala_target_glib', None): + task.vala_target_glib = self.vala_target_glib + if getattr(self, 'enable_non_null_experimental', None): + task.enable_non_null_experimental = self.enable_non_null_experimental + if getattr(self, 'force', None): + task.force = self.force + +def configure(conf): + conf.find_program('valadoc', errmsg='You must install valadoc for generate the API documentation') + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/waf_xattr.py lilv-0.24.6/waflib/extras/waf_xattr.py --- lilv-0.24.4~dfsg0/waflib/extras/waf_xattr.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/waf_xattr.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,150 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +Use extended attributes instead of database files + +1. Input files will be made writable +2. This is only for systems providing extended filesystem attributes +3. By default, hashes are calculated only if timestamp/size change (HASH_CACHE below) +4. The module enables "deep_inputs" on all tasks by propagating task signatures +5. This module also skips task signature comparisons for task code changes due to point 4. +6. This module is for Python3/Linux only, but it could be extended to Python2/other systems + using the xattr library +7. For projects in which tasks always declare output files, it should be possible to + store the rest of build context attributes on output files (imp_sigs, raw_deps and node_deps) + but this is not done here + +On a simple C++ project benchmark, the variations before and after adding waf_xattr.py were observed: +total build time: 20s -> 22s +no-op build time: 2.4s -> 1.8s +pickle file size: 2.9MB -> 2.6MB +""" + +import os +from waflib import Logs, Node, Task, Utils, Errors +from waflib.Task import SKIP_ME, RUN_ME, CANCEL_ME, ASK_LATER, SKIPPED, MISSING + +HASH_CACHE = True +SIG_VAR = 'user.waf.sig' +SEP = ','.encode() +TEMPLATE = '%b%d,%d'.encode() + +try: + PermissionError +except NameError: + PermissionError = IOError + +def getxattr(self): + return os.getxattr(self.abspath(), SIG_VAR) + +def setxattr(self, val): + os.setxattr(self.abspath(), SIG_VAR, val) + +def h_file(self): + try: + ret = getxattr(self) + except OSError: + if HASH_CACHE: + st = os.stat(self.abspath()) + mtime = st.st_mtime + size = st.st_size + else: + if len(ret) == 16: + # for build directory files + return ret + + if HASH_CACHE: + # check if timestamp and mtime match to avoid re-hashing + st = os.stat(self.abspath()) + mtime, size = ret[16:].split(SEP) + if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size): + return ret[:16] + + ret = Utils.h_file(self.abspath()) + if HASH_CACHE: + val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size)) + try: + setxattr(self, val) + except PermissionError: + os.chmod(self.abspath(), st.st_mode | 128) + setxattr(self, val) + return ret + +def runnable_status(self): + bld = self.generator.bld + if bld.is_install < 0: + return SKIP_ME + + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + elif t.hasrun < SKIPPED: + # a dependency has an error + return CANCEL_ME + + # first compute the signature + try: + new_sig = self.signature() + except Errors.TaskNotReady: + return ASK_LATER + + if not self.outputs: + # compare the signature to a signature computed previously + # this part is only for tasks with no output files + key = self.uid() + try: + prev_sig = bld.task_sigs[key] + except KeyError: + Logs.debug('task: task %r must run: it was never run before or the task code changed', self) + return RUN_ME + if new_sig != prev_sig: + Logs.debug('task: task %r must run: the task signature changed', self) + return RUN_ME + + # compare the signatures of the outputs to make a decision + for node in self.outputs: + try: + sig = node.h_file() + except EnvironmentError: + Logs.debug('task: task %r must run: an output node does not exist', self) + return RUN_ME + if sig != new_sig: + Logs.debug('task: task %r must run: an output node is stale', self) + return RUN_ME + + return (self.always_run and RUN_ME) or SKIP_ME + +def post_run(self): + bld = self.generator.bld + sig = self.signature() + for node in self.outputs: + if not node.exists(): + self.hasrun = MISSING + self.err_msg = '-> missing file: %r' % node.abspath() + raise Errors.WafError(self.err_msg) + os.setxattr(node.abspath(), 'user.waf.sig', sig) + if not self.outputs: + # only for task with no outputs + bld.task_sigs[self.uid()] = sig + if not self.keep_last_cmd: + try: + del self.last_cmd + except AttributeError: + pass + +try: + os.getxattr +except AttributeError: + pass +else: + h_file.__doc__ = Node.Node.h_file.__doc__ + + # keep file hashes as file attributes + Node.Node.h_file = h_file + + # enable "deep_inputs" on all tasks + Task.Task.runnable_status = runnable_status + Task.Task.post_run = post_run + Task.Task.sig_deep_inputs = Utils.nada + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/why.py lilv-0.24.6/waflib/extras/why.py --- lilv-0.24.4~dfsg0/waflib/extras/why.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/why.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,78 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2010 (ita) + +""" +This tool modifies the task signature scheme to store and obtain +information about the task execution (why it must run, etc):: + + def configure(conf): + conf.load('why') + +After adding the tool, a full rebuild is necessary: +waf clean build --zones=task +""" + +from waflib import Task, Utils, Logs, Errors + +def signature(self): + # compute the result one time, and suppose the scan_signature will give the good result + try: + return self.cache_sig + except AttributeError: + pass + + self.m = Utils.md5() + self.m.update(self.hcode) + id_sig = self.m.digest() + + # explicit deps + self.m = Utils.md5() + self.sig_explicit_deps() + exp_sig = self.m.digest() + + # env vars + self.m = Utils.md5() + self.sig_vars() + var_sig = self.m.digest() + + # implicit deps / scanner results + self.m = Utils.md5() + if self.scan: + try: + self.sig_implicit_deps() + except Errors.TaskRescan: + return self.signature() + impl_sig = self.m.digest() + + ret = self.cache_sig = impl_sig + id_sig + exp_sig + var_sig + return ret + + +Task.Task.signature = signature + +old = Task.Task.runnable_status +def runnable_status(self): + ret = old(self) + if ret == Task.RUN_ME: + try: + old_sigs = self.generator.bld.task_sigs[self.uid()] + except (KeyError, AttributeError): + Logs.debug("task: task must run as no previous signature exists") + else: + new_sigs = self.cache_sig + def v(x): + return Utils.to_hex(x) + + Logs.debug('Task %r', self) + msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable'] + tmp = 'task: -> %s: %s %s' + for x in range(len(msgs)): + l = len(Utils.SIG_NIL) + a = new_sigs[x*l : (x+1)*l] + b = old_sigs[x*l : (x+1)*l] + if (a != b): + Logs.debug(tmp, msgs[x].ljust(35), v(a), v(b)) + return ret +Task.Task.runnable_status = runnable_status + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/win32_opts.py lilv-0.24.6/waflib/extras/win32_opts.py --- lilv-0.24.4~dfsg0/waflib/extras/win32_opts.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/win32_opts.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,170 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +Windows-specific optimizations + +This module can help reducing the overhead of listing files on windows +(more than 10000 files). Python 3.5 already provides the listdir +optimization though. +""" + +import os +from waflib import Utils, Build, Node, Logs + +try: + TP = '%s\\*'.decode('ascii') +except AttributeError: + TP = '%s\\*' + +if Utils.is_win32: + from waflib.Tools import md5_tstamp + import ctypes, ctypes.wintypes + + FindFirstFile = ctypes.windll.kernel32.FindFirstFileW + FindNextFile = ctypes.windll.kernel32.FindNextFileW + FindClose = ctypes.windll.kernel32.FindClose + FILE_ATTRIBUTE_DIRECTORY = 0x10 + INVALID_HANDLE_VALUE = -1 + UPPER_FOLDERS = ('.', '..') + try: + UPPER_FOLDERS = [unicode(x) for x in UPPER_FOLDERS] + except NameError: + pass + + def cached_hash_file(self): + try: + cache = self.ctx.cache_listdir_cache_hash_file + except AttributeError: + cache = self.ctx.cache_listdir_cache_hash_file = {} + + if id(self.parent) in cache: + try: + t = cache[id(self.parent)][self.name] + except KeyError: + raise IOError('Not a file') + else: + # an opportunity to list the files and the timestamps at once + findData = ctypes.wintypes.WIN32_FIND_DATAW() + find = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData)) + + if find == INVALID_HANDLE_VALUE: + cache[id(self.parent)] = {} + raise IOError('Not a file') + + cache[id(self.parent)] = lst_files = {} + try: + while True: + if findData.cFileName not in UPPER_FOLDERS: + thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY + if not thatsadir: + ts = findData.ftLastWriteTime + d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime + lst_files[str(findData.cFileName)] = d + if not FindNextFile(find, ctypes.byref(findData)): + break + except Exception: + cache[id(self.parent)] = {} + raise IOError('Not a file') + finally: + FindClose(find) + t = lst_files[self.name] + + fname = self.abspath() + if fname in Build.hashes_md5_tstamp: + if Build.hashes_md5_tstamp[fname][0] == t: + return Build.hashes_md5_tstamp[fname][1] + + try: + fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT) + except OSError: + raise IOError('Cannot read from %r' % fname) + f = os.fdopen(fd, 'rb') + m = Utils.md5() + rb = 1 + try: + while rb: + rb = f.read(200000) + m.update(rb) + finally: + f.close() + + # ensure that the cache is overwritten + Build.hashes_md5_tstamp[fname] = (t, m.digest()) + return m.digest() + Node.Node.cached_hash_file = cached_hash_file + + def get_bld_sig_win32(self): + try: + return self.ctx.hash_cache[id(self)] + except KeyError: + pass + except AttributeError: + self.ctx.hash_cache = {} + self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath()) + return ret + Node.Node.get_bld_sig = get_bld_sig_win32 + + def isfile_cached(self): + # optimize for nt.stat calls, assuming there are many files for few folders + try: + cache = self.__class__.cache_isfile_cache + except AttributeError: + cache = self.__class__.cache_isfile_cache = {} + + try: + c1 = cache[id(self.parent)] + except KeyError: + c1 = cache[id(self.parent)] = [] + + curpath = self.parent.abspath() + findData = ctypes.wintypes.WIN32_FIND_DATAW() + find = FindFirstFile(TP % curpath, ctypes.byref(findData)) + + if find == INVALID_HANDLE_VALUE: + Logs.error("invalid win32 handle isfile_cached %r", self.abspath()) + return os.path.isfile(self.abspath()) + + try: + while True: + if findData.cFileName not in UPPER_FOLDERS: + thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY + if not thatsadir: + c1.append(str(findData.cFileName)) + if not FindNextFile(find, ctypes.byref(findData)): + break + except Exception as e: + Logs.error('exception while listing a folder %r %r', self.abspath(), e) + return os.path.isfile(self.abspath()) + finally: + FindClose(find) + return self.name in c1 + Node.Node.isfile_cached = isfile_cached + + def find_or_declare_win32(self, lst): + # assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + node = self.get_bld().search_node(lst) + if node: + if not node.isfile_cached(): + try: + node.parent.mkdir() + except OSError: + pass + return node + self = self.get_src() + node = self.find_node(lst) + if node: + if not node.isfile_cached(): + try: + node.parent.mkdir() + except OSError: + pass + return node + node = self.get_bld().make_node(lst) + node.parent.mkdir() + return node + Node.Node.find_or_declare = find_or_declare_win32 + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/wix.py lilv-0.24.6/waflib/extras/wix.py --- lilv-0.24.4~dfsg0/waflib/extras/wix.py 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/extras/wix.py 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,87 @@ +#!/usr/bin/python +# encoding: utf-8 +# vim: tabstop=4 noexpandtab + +""" +Windows Installer XML Tool (WiX) + +.wxs --- candle ---> .wxobj --- light ---> .msi + +bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..]) + +bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..]) +""" + +import os, copy +from waflib import TaskGen +from waflib import Task +from waflib.Utils import winreg + +class candle(Task.Task): + run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}', + +class light(Task.Task): + run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}" + +@TaskGen.feature('wix') +@TaskGen.before_method('process_source') +def wix(self): + #X.wxs -> ${SRC} for CANDLE + #X.wxobj -> ${SRC} for LIGHT + #X.dll -> -ext X in ${LIGHTFLAGS} + #X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS} + wxobj = [] + wxs = [] + exts = [] + wxl = [] + rest = [] + for x in self.source: + if x.endswith('.wxobj'): + wxobj.append(x) + elif x.endswith('.wxs'): + wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj')) + wxs.append(x) + elif x.endswith('.dll'): + exts.append(x[:-4]) + elif '.' not in x: + exts.append(x) + elif x.endswith('.wxl'): + wxl.append(x) + else: + rest.append(x) + self.source = self.to_nodes(rest) #.wxs + + cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj)) + lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen)) + + cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[])) + lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[])) + + for x in wxl: + lght.env.append_value('LIGHTFLAGS','wixui.wixlib') + lght.env.append_value('LIGHTFLAGS','-loc') + lght.env.append_value('LIGHTFLAGS',x) + for x in exts: + cndl.env.append_value('CANDLEFLAGS','-ext') + cndl.env.append_value('CANDLEFLAGS',x) + lght.env.append_value('LIGHTFLAGS','-ext') + lght.env.append_value('LIGHTFLAGS',x) + +#wix_bin_path() +def wix_bin_path(): + basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders" + query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey) + cnt=winreg.QueryInfoKey(query)[0] + thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK' + for i in range(cnt-1,-1,-1): + thiskey = winreg.EnumKey(query,i) + if 'WiX' in thiskey: + break + winreg.CloseKey(query) + return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin') + +def configure(ctx): + path_list=[wix_bin_path()] + ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list) + ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list) + diff -Nru lilv-0.24.4~dfsg0/waflib/extras/xcode6.py lilv-0.24.6/waflib/extras/xcode6.py --- lilv-0.24.4~dfsg0/waflib/extras/xcode6.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/extras/xcode6.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,448 +1,727 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf +# Based on work by Nicolas Mercier 2011 +# Extended by Simon Warg 2015, https://github.com/mimon +# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html + +""" +See playground/xcode6/ for usage examples. + +""" + +from waflib import Context, TaskGen, Build, Utils, Errors, Logs +import os, sys + +# FIXME too few extensions +XCODE_EXTS = ['.c', '.cpp', '.m', '.mm'] + +HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)' + +MAP_EXT = { + '': "folder", + '.h' : "sourcecode.c.h", + + '.hh': "sourcecode.cpp.h", + '.inl': "sourcecode.cpp.h", + '.hpp': "sourcecode.cpp.h", + + '.c': "sourcecode.c.c", + + '.m': "sourcecode.c.objc", + + '.mm': "sourcecode.cpp.objcpp", + + '.cc': "sourcecode.cpp.cpp", + + '.cpp': "sourcecode.cpp.cpp", + '.C': "sourcecode.cpp.cpp", + '.cxx': "sourcecode.cpp.cpp", + '.c++': "sourcecode.cpp.cpp", + + '.l': "sourcecode.lex", # luthor + '.ll': "sourcecode.lex", + + '.y': "sourcecode.yacc", + '.yy': "sourcecode.yacc", + + '.plist': "text.plist.xml", + ".nib": "wrapper.nib", + ".xib": "text.xib", +} + +# Used in PBXNativeTarget elements +PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application' +PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework' +PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool' +PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static' +PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic' +PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension' +PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit' + +# Used in PBXFileReference elements +FILE_TYPE_APPLICATION = 'wrapper.cfbundle' +FILE_TYPE_FRAMEWORK = 'wrapper.framework' +FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib' +FILE_TYPE_LIB_STATIC = 'archive.ar' +FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable' + +# Tuple packs of the above +TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework') +TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app') +TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib') +TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a') +TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '') + +# Maps target type string to its data +TARGET_TYPES = { + 'framework': TARGET_TYPE_FRAMEWORK, + 'app': TARGET_TYPE_APPLICATION, + 'dylib': TARGET_TYPE_DYNAMIC_LIB, + 'stlib': TARGET_TYPE_STATIC_LIB, + 'exe' :TARGET_TYPE_EXECUTABLE, +} -from waflib import Context,TaskGen,Build,Utils,Errors,Logs -import os,sys -XCODE_EXTS=['.c','.cpp','.m','.mm'] -HEADERS_GLOB='**/(*.h|*.hpp|*.H|*.inl)' -MAP_EXT={'':"folder",'.h':"sourcecode.c.h",'.hh':"sourcecode.cpp.h",'.inl':"sourcecode.cpp.h",'.hpp':"sourcecode.cpp.h",'.c':"sourcecode.c.c",'.m':"sourcecode.c.objc",'.mm':"sourcecode.cpp.objcpp",'.cc':"sourcecode.cpp.cpp",'.cpp':"sourcecode.cpp.cpp",'.C':"sourcecode.cpp.cpp",'.cxx':"sourcecode.cpp.cpp",'.c++':"sourcecode.cpp.cpp",'.l':"sourcecode.lex",'.ll':"sourcecode.lex",'.y':"sourcecode.yacc",'.yy':"sourcecode.yacc",'.plist':"text.plist.xml",".nib":"wrapper.nib",".xib":"text.xib",} -PRODUCT_TYPE_APPLICATION='com.apple.product-type.application' -PRODUCT_TYPE_FRAMEWORK='com.apple.product-type.framework' -PRODUCT_TYPE_EXECUTABLE='com.apple.product-type.tool' -PRODUCT_TYPE_LIB_STATIC='com.apple.product-type.library.static' -PRODUCT_TYPE_LIB_DYNAMIC='com.apple.product-type.library.dynamic' -PRODUCT_TYPE_EXTENSION='com.apple.product-type.kernel-extension' -PRODUCT_TYPE_IOKIT='com.apple.product-type.kernel-extension.iokit' -FILE_TYPE_APPLICATION='wrapper.cfbundle' -FILE_TYPE_FRAMEWORK='wrapper.framework' -FILE_TYPE_LIB_DYNAMIC='compiled.mach-o.dylib' -FILE_TYPE_LIB_STATIC='archive.ar' -FILE_TYPE_EXECUTABLE='compiled.mach-o.executable' -TARGET_TYPE_FRAMEWORK=(PRODUCT_TYPE_FRAMEWORK,FILE_TYPE_FRAMEWORK,'.framework') -TARGET_TYPE_APPLICATION=(PRODUCT_TYPE_APPLICATION,FILE_TYPE_APPLICATION,'.app') -TARGET_TYPE_DYNAMIC_LIB=(PRODUCT_TYPE_LIB_DYNAMIC,FILE_TYPE_LIB_DYNAMIC,'.dylib') -TARGET_TYPE_STATIC_LIB=(PRODUCT_TYPE_LIB_STATIC,FILE_TYPE_LIB_STATIC,'.a') -TARGET_TYPE_EXECUTABLE=(PRODUCT_TYPE_EXECUTABLE,FILE_TYPE_EXECUTABLE,'') -TARGET_TYPES={'framework':TARGET_TYPE_FRAMEWORK,'app':TARGET_TYPE_APPLICATION,'dylib':TARGET_TYPE_DYNAMIC_LIB,'stlib':TARGET_TYPE_STATIC_LIB,'exe':TARGET_TYPE_EXECUTABLE,} def delete_invalid_values(dct): - for k,v in list(dct.items()): - if isinstance(v,dict)or isinstance(v,set): + """ Deletes entries that are dictionaries or sets """ + for k, v in list(dct.items()): + if isinstance(v, dict) or isinstance(v, set): del dct[k] return dct + +""" +Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION' +which is a dictionary of configuration name and buildsettings pair. +E.g.: +env.PROJ_CONFIGURATION = { + 'Debug': { + 'ARCHS': 'x86', + ... + } + 'Release': { + 'ARCHS' x86_64' + ... + } +} +The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created +based on env variable +""" def configure(self): if not self.env.PROJ_CONFIGURATION: self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n") - if'cfg_files'in self.env: - self.env.INCLUDES=Utils.to_list(self.env.INCLUDES)+[os.path.abspath(os.path.dirname(f))for f in self.env.cfg_files] - if'PROJ_CONFIGURATION'not in self.env: - defaults=delete_invalid_values(self.env.get_merged_dict()) - self.env.PROJ_CONFIGURATION={"Debug":defaults,"Release":defaults,} - defaults_required=[('PRODUCT_NAME','$(TARGET_NAME)')] + + # Check for any added config files added by the tool 'c_config'. + if 'cfg_files' in self.env: + self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files] + + # Create default project configuration? + if 'PROJ_CONFIGURATION' not in self.env: + defaults = delete_invalid_values(self.env.get_merged_dict()) + self.env.PROJ_CONFIGURATION = { + "Debug": defaults, + "Release": defaults, + } + + # Some build settings are required to be present by XCode. We will supply default values + # if user hasn't defined any. + defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')] for cfgname,settings in self.env.PROJ_CONFIGURATION.items(): - for default_var,default_val in defaults_required: + for default_var, default_val in defaults_required: if default_var not in settings: - settings[default_var]=default_val - if not isinstance(self.env.PROJ_CONFIGURATION,dict): + settings[default_var] = default_val + + # Error check customization + if not isinstance(self.env.PROJ_CONFIGURATION, dict): raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.") -part1=0 -part2=10000 -part3=0 -id=562000999 + +part1 = 0 +part2 = 10000 +part3 = 0 +id = 562000999 def newid(): global id - id+=1 - return"%04X%04X%04X%012d"%(0,10000,0,id) + id += 1 + return "%04X%04X%04X%012d" % (0, 10000, 0, id) + +""" +Represents a tree node in the XCode project plist file format. +When written to a file, all attributes of XCodeNode are stringified together with +its value. However, attributes starting with an underscore _ are ignored +during that process and allows you to store arbitrary values that are not supposed +to be written out. +""" class XCodeNode(object): def __init__(self): - self._id=newid() - self._been_written=False - def tostring(self,value): - if isinstance(value,dict): - result="{\n" + self._id = newid() + self._been_written = False + + def tostring(self, value): + if isinstance(value, dict): + result = "{\n" for k,v in value.items(): - result=result+"\t\t\t%s = %s;\n"%(k,self.tostring(v)) - result=result+"\t\t}" + result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v)) + result = result + "\t\t}" return result - elif isinstance(value,str): - return"\"%s\""%value - elif isinstance(value,list): - result="(\n" + elif isinstance(value, str): + return "\"%s\"" % value + elif isinstance(value, list): + result = "(\n" for i in value: - result=result+"\t\t\t%s,\n"%self.tostring(i) - result=result+"\t\t)" + result = result + "\t\t\t%s,\n" % self.tostring(i) + result = result + "\t\t)" return result - elif isinstance(value,XCodeNode): + elif isinstance(value, XCodeNode): return value._id else: return str(value) - def write_recursive(self,value,file): - if isinstance(value,dict): + + def write_recursive(self, value, file): + if isinstance(value, dict): for k,v in value.items(): - self.write_recursive(v,file) - elif isinstance(value,list): + self.write_recursive(v, file) + elif isinstance(value, list): for i in value: - self.write_recursive(i,file) - elif isinstance(value,XCodeNode): + self.write_recursive(i, file) + elif isinstance(value, XCodeNode): value.write(file) - def write(self,file): + + def write(self, file): if not self._been_written: - self._been_written=True + self._been_written = True for attribute,value in self.__dict__.items(): - if attribute[0]!='_': - self.write_recursive(value,file) - w=file.write - w("\t%s = {\n"%self._id) - w("\t\tisa = %s;\n"%self.__class__.__name__) + if attribute[0] != '_': + self.write_recursive(value, file) + w = file.write + w("\t%s = {\n" % self._id) + w("\t\tisa = %s;\n" % self.__class__.__name__) for attribute,value in self.__dict__.items(): - if attribute[0]!='_': - w("\t\t%s = %s;\n"%(attribute,self.tostring(value))) + if attribute[0] != '_': + w("\t\t%s = %s;\n" % (attribute, self.tostring(value))) w("\t};\n\n") + +# Configurations class XCBuildConfiguration(XCodeNode): - def __init__(self,name,settings={},env=None): + def __init__(self, name, settings = {}, env=None): XCodeNode.__init__(self) - self.baseConfigurationReference="" - self.buildSettings=settings - self.name=name + self.baseConfigurationReference = "" + self.buildSettings = settings + self.name = name if env and env.ARCH: - settings['ARCHS']=" ".join(env.ARCH) + settings['ARCHS'] = " ".join(env.ARCH) + + class XCConfigurationList(XCodeNode): - def __init__(self,configlst): + def __init__(self, configlst): + """ :param configlst: list of XCConfigurationList """ XCodeNode.__init__(self) - self.buildConfigurations=configlst - self.defaultConfigurationIsVisible=0 - self.defaultConfigurationName=configlst and configlst[0].name or"" + self.buildConfigurations = configlst + self.defaultConfigurationIsVisible = 0 + self.defaultConfigurationName = configlst and configlst[0].name or "" + +# Group/Files class PBXFileReference(XCodeNode): - def __init__(self,name,path,filetype='',sourcetree="SOURCE_ROOT"): + def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"): + XCodeNode.__init__(self) - self.fileEncoding=4 + self.fileEncoding = 4 if not filetype: - _,ext=os.path.splitext(name) - filetype=MAP_EXT.get(ext,'text') - self.lastKnownFileType=filetype - self.explicitFileType=filetype - self.name=name - self.path=path - self.sourceTree=sourcetree + _, ext = os.path.splitext(name) + filetype = MAP_EXT.get(ext, 'text') + self.lastKnownFileType = filetype + self.explicitFileType = filetype + self.name = name + self.path = path + self.sourceTree = sourcetree + def __hash__(self): - return(self.path+self.name).__hash__() - def __eq__(self,other): - return(self.path,self.name)==(other.path,other.name) + return (self.path+self.name).__hash__() + + def __eq__(self, other): + return (self.path, self.name) == (other.path, other.name) + class PBXBuildFile(XCodeNode): - def __init__(self,fileRef,settings={}): + """ This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """ + def __init__(self, fileRef, settings={}): XCodeNode.__init__(self) - self.fileRef=fileRef - self.settings=settings + + # fileRef is a reference to a PBXFileReference object + self.fileRef = fileRef + + # A map of key/value pairs for additional settings. + self.settings = settings + def __hash__(self): - return(self.fileRef).__hash__() - def __eq__(self,other): - return self.fileRef==other.fileRef + return (self.fileRef).__hash__() + + def __eq__(self, other): + return self.fileRef == other.fileRef + class PBXGroup(XCodeNode): - def __init__(self,name,sourcetree='SOURCE_TREE'): + def __init__(self, name, sourcetree = 'SOURCE_TREE'): XCodeNode.__init__(self) - self.children=[] - self.name=name - self.sourceTree=sourcetree - self._filerefs={} - def add(self,sources): - self._filerefs.update(dict(zip(sources,sources))) + self.children = [] + self.name = name + self.sourceTree = sourcetree + + # Maintain a lookup table for all PBXFileReferences + # that are contained in this group. + self._filerefs = {} + + def add(self, sources): + """ + Add a list of PBXFileReferences to this group + + :param sources: list of PBXFileReferences objects + """ + self._filerefs.update(dict(zip(sources, sources))) self.children.extend(sources) + def get_sub_groups(self): - return list(filter(lambda x:isinstance(x,PBXGroup),self.children)) - def find_fileref(self,fileref): + """ + Returns all child PBXGroup objects contained in this group + """ + return list(filter(lambda x: isinstance(x, PBXGroup), self.children)) + + def find_fileref(self, fileref): + """ + Recursively search this group for an existing PBXFileReference. Returns None + if none were found. + + The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy. + If it isn't, the consequence is that certain UI features like 'Reveal in Finder' + stops working. + """ if fileref in self._filerefs: return self._filerefs[fileref] elif self.children: for childgroup in self.get_sub_groups(): - f=childgroup.find_fileref(fileref) + f = childgroup.find_fileref(fileref) if f: return f return None + class PBXContainerItemProxy(XCodeNode): - def __init__(self,containerPortal,remoteGlobalIDString,remoteInfo='',proxyType=1): + """ This is the element for to decorate a target item. """ + def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1): XCodeNode.__init__(self) - self.containerPortal=containerPortal - self.remoteGlobalIDString=remoteGlobalIDString - self.remoteInfo=remoteInfo - self.proxyType=proxyType + self.containerPortal = containerPortal # PBXProject + self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget + self.remoteInfo = remoteInfo # Target name + self.proxyType = proxyType + class PBXTargetDependency(XCodeNode): - def __init__(self,native_target,proxy): + """ This is the element for referencing other target through content proxies. """ + def __init__(self, native_target, proxy): XCodeNode.__init__(self) - self.target=native_target - self.targetProxy=proxy + self.target = native_target + self.targetProxy = proxy + class PBXFrameworksBuildPhase(XCodeNode): - def __init__(self,pbxbuildfiles): + """ This is the element for the framework link build phase, i.e. linking to frameworks """ + def __init__(self, pbxbuildfiles): XCodeNode.__init__(self) - self.buildActionMask=2147483647 - self.runOnlyForDeploymentPostprocessing=0 - self.files=pbxbuildfiles + self.buildActionMask = 2147483647 + self.runOnlyForDeploymentPostprocessing = 0 + self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib) + class PBXHeadersBuildPhase(XCodeNode): - def __init__(self,pbxbuildfiles): + """ This is the element for adding header files to be packaged into the .framework """ + def __init__(self, pbxbuildfiles): XCodeNode.__init__(self) - self.buildActionMask=2147483647 - self.runOnlyForDeploymentPostprocessing=0 - self.files=pbxbuildfiles + self.buildActionMask = 2147483647 + self.runOnlyForDeploymentPostprocessing = 0 + self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib) + class PBXCopyFilesBuildPhase(XCodeNode): - def __init__(self,pbxbuildfiles,dstpath,dstSubpathSpec=0,*args,**kwargs): - XCodeNode.__init__(self) - self.files=pbxbuildfiles - self.dstPath=dstpath - self.dstSubfolderSpec=dstSubpathSpec + """ + Represents the PBXCopyFilesBuildPhase section. PBXBuildFile + can be added to this node to copy files after build is done. + """ + def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs): + XCodeNode.__init__(self) + self.files = pbxbuildfiles + self.dstPath = dstpath + self.dstSubfolderSpec = dstSubpathSpec + class PBXSourcesBuildPhase(XCodeNode): - def __init__(self,buildfiles): + """ Represents the 'Compile Sources' build phase in a Xcode target """ + def __init__(self, buildfiles): XCodeNode.__init__(self) - self.files=buildfiles + self.files = buildfiles # List of PBXBuildFile objects + class PBXLegacyTarget(XCodeNode): - def __init__(self,action,target=''): + def __init__(self, action, target=''): XCodeNode.__init__(self) - self.buildConfigurationList=XCConfigurationList([XCBuildConfiguration('waf',{})]) + self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})]) if not target: - self.buildArgumentsString="%s %s"%(sys.argv[0],action) + self.buildArgumentsString = "%s %s" % (sys.argv[0], action) else: - self.buildArgumentsString="%s %s --targets=%s"%(sys.argv[0],action,target) - self.buildPhases=[] - self.buildToolPath=sys.executable - self.buildWorkingDirectory="" - self.dependencies=[] - self.name=target or action - self.productName=target or action - self.passBuildSettingsInEnvironment=0 + self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target) + self.buildPhases = [] + self.buildToolPath = sys.executable + self.buildWorkingDirectory = "" + self.dependencies = [] + self.name = target or action + self.productName = target or action + self.passBuildSettingsInEnvironment = 0 + class PBXShellScriptBuildPhase(XCodeNode): - def __init__(self,action,target): + def __init__(self, action, target): XCodeNode.__init__(self) - self.buildActionMask=2147483647 - self.files=[] - self.inputPaths=[] - self.outputPaths=[] - self.runOnlyForDeploymentPostProcessing=0 - self.shellPath="/bin/sh" - self.shellScript="%s %s %s --targets=%s"%(sys.executable,sys.argv[0],action,target) + self.buildActionMask = 2147483647 + self.files = [] + self.inputPaths = [] + self.outputPaths = [] + self.runOnlyForDeploymentPostProcessing = 0 + self.shellPath = "/bin/sh" + self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target) + class PBXNativeTarget(XCodeNode): - def __init__(self,target,node,target_type=TARGET_TYPE_APPLICATION,configlist=[],buildphases=[]): + """ Represents a target in XCode, e.g. App, DyLib, Framework etc. """ + def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]): XCodeNode.__init__(self) - product_type=target_type[0] - file_type=target_type[1] - self.buildConfigurationList=XCConfigurationList(configlist) - self.buildPhases=buildphases - self.buildRules=[] - self.dependencies=[] - self.name=target - self.productName=target - self.productType=product_type - self.productReference=PBXFileReference(node.name,node.abspath(),file_type,'') - def add_configuration(self,cf): + product_type = target_type[0] + file_type = target_type[1] + + self.buildConfigurationList = XCConfigurationList(configlist) + self.buildPhases = buildphases + self.buildRules = [] + self.dependencies = [] + self.name = target + self.productName = target + self.productType = product_type # See TARGET_TYPE_ tuples constants + self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '') + + def add_configuration(self, cf): + """ :type cf: XCBuildConfiguration """ self.buildConfigurationList.buildConfigurations.append(cf) - def add_build_phase(self,phase): - if((phase.__class__==PBXFrameworksBuildPhase)or(phase.__class__==PBXSourcesBuildPhase)): + + def add_build_phase(self, phase): + # Some build phase types may appear only once. If a phase type already exists, then merge them. + if ( (phase.__class__ == PBXFrameworksBuildPhase) + or (phase.__class__ == PBXSourcesBuildPhase) ): for b in self.buildPhases: - if b.__class__==phase.__class__: + if b.__class__ == phase.__class__: b.files.extend(phase.files) return self.buildPhases.append(phase) - def add_dependency(self,depnd): + + def add_dependency(self, depnd): self.dependencies.append(depnd) + +# Root project object class PBXProject(XCodeNode): - def __init__(self,name,version,env): + def __init__(self, name, version, env): XCodeNode.__init__(self) - if not isinstance(env.PROJ_CONFIGURATION,dict): + + if not isinstance(env.PROJ_CONFIGURATION, dict): raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?") - configurations=[] - for config_name,settings in env.PROJ_CONFIGURATION.items(): - cf=XCBuildConfiguration(config_name,settings) + + # Retrieve project configuration + configurations = [] + for config_name, settings in env.PROJ_CONFIGURATION.items(): + cf = XCBuildConfiguration(config_name, settings) configurations.append(cf) - self.buildConfigurationList=XCConfigurationList(configurations) - self.compatibilityVersion=version[0] - self.hasScannedForEncodings=1 - self.mainGroup=PBXGroup(name) - self.projectRoot="" - self.projectDirPath="" - self.targets=[] - self._objectVersion=version[1] - def create_target_dependency(self,target,name): - proxy=PBXContainerItemProxy(self,target,name) - dependecy=PBXTargetDependency(target,proxy) - return dependecy - def write(self,file): + + self.buildConfigurationList = XCConfigurationList(configurations) + self.compatibilityVersion = version[0] + self.hasScannedForEncodings = 1 + self.mainGroup = PBXGroup(name) + self.projectRoot = "" + self.projectDirPath = "" + self.targets = [] + self._objectVersion = version[1] + + def create_target_dependency(self, target, name): + """ : param target : PXBNativeTarget """ + proxy = PBXContainerItemProxy(self, target, name) + dependency = PBXTargetDependency(target, proxy) + return dependency + + def write(self, file): + + # Make sure this is written only once if self._been_written: return - w=file.write + + w = file.write w("// !$*UTF8*$!\n") w("{\n") w("\tarchiveVersion = 1;\n") w("\tclasses = {\n") w("\t};\n") - w("\tobjectVersion = %d;\n"%self._objectVersion) + w("\tobjectVersion = %d;\n" % self._objectVersion) w("\tobjects = {\n\n") - XCodeNode.write(self,file) + + XCodeNode.write(self, file) + w("\t};\n") - w("\trootObject = %s;\n"%self._id) + w("\trootObject = %s;\n" % self._id) w("}\n") - def add_target(self,target): + + def add_target(self, target): self.targets.append(target) - def get_target(self,name): + + def get_target(self, name): + """ Get a reference to PBXNativeTarget if it exists """ for t in self.targets: - if t.name==name: + if t.name == name: return t return None -@TaskGen.feature('c','cxx') -@TaskGen.after('propagate_uselib_vars','apply_incpaths') + +@TaskGen.feature('c', 'cxx') +@TaskGen.after('propagate_uselib_vars', 'apply_incpaths') def process_xcode(self): - bld=self.bld + bld = self.bld try: - p=bld.project + p = bld.project except AttributeError: return - if not hasattr(self,'target_type'): + + if not hasattr(self, 'target_type'): return - products_group=bld.products_group - target_group=PBXGroup(self.name) + + products_group = bld.products_group + + target_group = PBXGroup(self.name) p.mainGroup.children.append(target_group) - target_type=getattr(self,'target_type','app') + + # Determine what type to build - framework, app bundle etc. + target_type = getattr(self, 'target_type', 'app') if target_type not in TARGET_TYPES: - raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'"%(target_type,"', '".join(TARGET_TYPES.keys()),self.name)) + raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name)) else: - target_type=TARGET_TYPES[target_type] - file_ext=target_type[2] - target_node=self.path.find_or_declare(self.name+file_ext) - target=PBXNativeTarget(self.name,target_node,target_type,[],[]) + target_type = TARGET_TYPES[target_type] + file_ext = target_type[2] + + # Create the output node + target_node = self.path.find_or_declare(self.name+file_ext) + target = PBXNativeTarget(self.name, target_node, target_type, [], []) + products_group.children.append(target.productReference) - sources=getattr(self,'source',[]) - if hasattr(self,'group_files'): - group_files=getattr(self,'group_files',[]) + + # Pull source files from the 'source' attribute and assign them to a UI group. + # Use a default UI group named 'Source' unless the user + # provides a 'group_files' dictionary to customize the UI grouping. + sources = getattr(self, 'source', []) + if hasattr(self, 'group_files'): + group_files = getattr(self, 'group_files', []) for grpname,files in group_files.items(): - group=bld.create_group(grpname,files) + group = bld.create_group(grpname, files) target_group.children.append(group) else: - group=bld.create_group('Source',sources) + group = bld.create_group('Source', sources) target_group.children.append(group) - for idx,path in enumerate(sources): - fileref=PBXFileReference(path.name,path.abspath()) - existing_fileref=target_group.find_fileref(fileref) + + # Create a PBXFileReference for each source file. + # If the source file already exists as a PBXFileReference in any of the UI groups, then + # reuse that PBXFileReference object (XCode does not like it if we don't reuse) + for idx, path in enumerate(sources): + fileref = PBXFileReference(path.name, path.abspath()) + existing_fileref = target_group.find_fileref(fileref) if existing_fileref: - sources[idx]=existing_fileref + sources[idx] = existing_fileref else: - sources[idx]=fileref - is_valid_file_extension=lambda file:os.path.splitext(file.path)[1]in XCODE_EXTS - sources=list(filter(is_valid_file_extension,sources)) - buildfiles=[bld.unique_buildfile(PBXBuildFile(x))for x in sources] + sources[idx] = fileref + + # If the 'source' attribute contains any file extension that XCode can't work with, + # then remove it. The allowed file extensions are defined in XCODE_EXTS. + is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS + sources = list(filter(is_valid_file_extension, sources)) + + buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources] target.add_build_phase(PBXSourcesBuildPhase(buildfiles)) - libs=getattr(self,'tmp_use_seen',[]) + + # Check if any framework to link against is some other target we've made + libs = getattr(self, 'tmp_use_seen', []) for lib in libs: - use_target=p.get_target(lib) + use_target = p.get_target(lib) if use_target: - dependency=p.create_target_dependency(use_target,use_target.name) + # Create an XCode dependency so that XCode knows to build the other target before this target + dependency = p.create_target_dependency(use_target, use_target.name) target.add_dependency(dependency) - buildphase=PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)]) + + buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)]) target.add_build_phase(buildphase) if lib in self.env.LIB: - self.env.LIB=list(filter(lambda x:x!=lib,self.env.LIB)) - exp_hdrs=getattr(self,'export_headers',[]) - hdrs=bld.as_nodes(Utils.to_list(exp_hdrs)) - files=[p.mainGroup.find_fileref(PBXFileReference(n.name,n.abspath()))for n in hdrs] - files=[PBXBuildFile(f,{'ATTRIBUTES':('Public',)})for f in files] - buildphase=PBXHeadersBuildPhase(files) + self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB)) + + # If 'export_headers' is present, add files to the Headers build phase in xcode. + # These are files that'll get packed into the Framework for instance. + exp_hdrs = getattr(self, 'export_headers', []) + hdrs = bld.as_nodes(Utils.to_list(exp_hdrs)) + files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs] + files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files] + buildphase = PBXHeadersBuildPhase(files) target.add_build_phase(buildphase) - frameworks=Utils.to_list(self.env.FRAMEWORK) - frameworks=' '.join(['-framework %s'%(f.split('.framework')[0])for f in frameworks]) - libs=Utils.to_list(self.env.STLIB)+Utils.to_list(self.env.LIB) - libs=' '.join(bld.env['STLIB_ST']%t for t in libs) - bldsettings={'HEADER_SEARCH_PATHS':['$(inherited)']+self.env['INCPATHS'],'LIBRARY_SEARCH_PATHS':['$(inherited)']+Utils.to_list(self.env.LIBPATH)+Utils.to_list(self.env.STLIBPATH)+Utils.to_list(self.env.LIBDIR),'FRAMEWORK_SEARCH_PATHS':['$(inherited)']+Utils.to_list(self.env.FRAMEWORKPATH),'OTHER_LDFLAGS':libs+' '+frameworks,'OTHER_LIBTOOLFLAGS':bld.env['LINKFLAGS'],'OTHER_CPLUSPLUSFLAGS':Utils.to_list(self.env['CXXFLAGS']),'OTHER_CFLAGS':Utils.to_list(self.env['CFLAGS']),'INSTALL_PATH':[]} - installpaths=Utils.to_list(getattr(self,'install',[])) - prodbuildfile=PBXBuildFile(target.productReference) + + # Merge frameworks and libs into one list, and prefix the frameworks + frameworks = Utils.to_list(self.env.FRAMEWORK) + frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks]) + + libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB) + libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs) + + # Override target specific build settings + bldsettings = { + 'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'], + 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) , + 'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH), + 'OTHER_LDFLAGS': libs + ' ' + frameworks, + 'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'], + 'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']), + 'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']), + 'INSTALL_PATH': [] + } + + # Install path + installpaths = Utils.to_list(getattr(self, 'install', [])) + prodbuildfile = PBXBuildFile(target.productReference) for instpath in installpaths: bldsettings['INSTALL_PATH'].append(instpath) - target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile],instpath)) + target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath)) + if not bldsettings['INSTALL_PATH']: del bldsettings['INSTALL_PATH'] - settings=getattr(self,'settings',{}) - keys=set(settings.keys()+bld.env.PROJ_CONFIGURATION.keys()) + + # Create build settings which can override the project settings. Defaults to none if user + # did not pass argument. This will be filled up with target specific + # search paths, libs to link etc. + settings = getattr(self, 'settings', {}) + + # The keys represents different build configuration, e.g. Debug, Release and so on.. + # Insert our generated build settings to all configuration names + keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys()) for k in keys: if k in settings: settings[k].update(bldsettings) else: - settings[k]=bldsettings + settings[k] = bldsettings + for k,v in settings.items(): - target.add_configuration(XCBuildConfiguration(k,v)) + target.add_configuration(XCBuildConfiguration(k, v)) + p.add_target(target) + + class xcode(Build.BuildContext): - cmd='xcode6' - fun='build' - def as_nodes(self,files): - nodes=[] + cmd = 'xcode6' + fun = 'build' + + def as_nodes(self, files): + """ Returns a list of waflib.Nodes from a list of string of file paths """ + nodes = [] for x in files: - if not isinstance(x,str): - d=x + if not isinstance(x, str): + d = x else: - d=self.srcnode.find_node(x) + d = self.srcnode.find_node(x) if not d: - raise Errors.WafError('File \'%s\' was not found'%x) + raise Errors.WafError('File \'%s\' was not found' % x) nodes.append(d) return nodes - def create_group(self,name,files): - group=PBXGroup(name) - files_=[] + + def create_group(self, name, files): + """ + Returns a new PBXGroup containing the files (paths) passed in the files arg + :type files: string + """ + group = PBXGroup(name) + """ + Do not use unique file reference here, since XCode seem to allow only one file reference + to be referenced by a group. + """ + files_ = [] for d in self.as_nodes(Utils.to_list(files)): - fileref=PBXFileReference(d.name,d.abspath()) + fileref = PBXFileReference(d.name, d.abspath()) files_.append(fileref) group.add(files_) return group - def unique_buildfile(self,buildfile): + + def unique_buildfile(self, buildfile): + """ + Returns a unique buildfile, possibly an existing one. + Use this after you've constructed a PBXBuildFile to make sure there is + only one PBXBuildFile for the same file in the same project. + """ try: - build_files=self.build_files + build_files = self.build_files except AttributeError: - build_files=self.build_files={} + build_files = self.build_files = {} + if buildfile not in build_files: - build_files[buildfile]=buildfile + build_files[buildfile] = buildfile return build_files[buildfile] + def execute(self): + """ + Entry point + """ self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) - appname=getattr(Context.g_module,Context.APPNAME,os.path.basename(self.srcnode.abspath())) - p=PBXProject(appname,('Xcode 3.2',46),self.env) - products_group=PBXGroup('Products') + + appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath())) + + p = PBXProject(appname, ('Xcode 3.2', 46), self.env) + + # If we don't create a Products group, then + # XCode will create one, which entails that + # we'll start to see duplicate files in the UI + # for some reason. + products_group = PBXGroup('Products') p.mainGroup.children.append(products_group) - self.project=p - self.products_group=products_group - if self.targets and self.targets!='*': - (self._min_grp,self._exact_tg)=self.get_targets() - self.current_group=0 - while self.current_group=1: - os.environ['TERM']='vt100' - colors_lst['USE']=use + term = os.environ.get('TERM', 'dumb') + + if term in ('dumb', 'emacs'): + use = 0 + + if use >= 1: + os.environ['TERM'] = 'vt100' + + colors_lst['USE'] = use + +# If console packages are available, replace the dummy function with a real +# implementation try: - get_term_cols=ansiterm.get_term_cols + get_term_cols = ansiterm.get_term_cols except AttributeError: def get_term_cols(): return 80 -get_term_cols.__doc__=""" + +get_term_cols.__doc__ = """ Returns the console width in characters. :return: the number of characters per line :rtype: int """ + def get_color(cl): + """ + Returns the ansi sequence corresponding to the given color name. + An empty string is returned when coloring is globally disabled. + + :param cl: color name in capital letters + :type cl: string + """ if colors_lst['USE']: - return colors_lst.get(cl,'') - return'' + return colors_lst.get(cl, '') + return '' + class color_dict(object): - def __getattr__(self,a): + """attribute-based color access, eg: colors.PINK""" + def __getattr__(self, a): return get_color(a) - def __call__(self,a): + def __call__(self, a): return get_color(a) -colors=color_dict() -re_log=re.compile(r'(\w+): (.*)',re.M) + +colors = color_dict() + +re_log = re.compile(r'(\w+): (.*)', re.M) class log_filter(logging.Filter): - def __init__(self,name=''): - logging.Filter.__init__(self,name) - def filter(self,rec): - rec.zone=rec.module - if rec.levelno>=logging.INFO: + """ + Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'. + For example, the following:: + + from waflib import Logs + Logs.debug('test: here is a message') + + Will be displayed only when executing:: + + $ waf --zones=test + """ + def __init__(self, name=''): + logging.Filter.__init__(self, name) + + def filter(self, rec): + """ + Filters log records by zone and by logging level + + :param rec: log entry + """ + rec.zone = rec.module + if rec.levelno >= logging.INFO: return True - m=re_log.match(rec.msg) + + m = re_log.match(rec.msg) if m: - rec.zone=m.group(1) - rec.msg=m.group(2) + rec.zone = m.group(1) + rec.msg = m.group(2) + if zones: - return getattr(rec,'zone','')in zones or'*'in zones - elif not verbose>2: + return getattr(rec, 'zone', '') in zones or '*' in zones + elif not verbose > 2: return False return True + class log_handler(logging.StreamHandler): - def emit(self,record): + """Dispatches messages to stderr/stdout depending on the severity level""" + def emit(self, record): + """ + Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override` + """ + # default implementation try: try: - self.stream=record.stream + self.stream = record.stream except AttributeError: - if record.levelno>=logging.WARNING: - record.stream=self.stream=sys.stderr + if record.levelno >= logging.WARNING: + record.stream = self.stream = sys.stderr else: - record.stream=self.stream=sys.stdout + record.stream = self.stream = sys.stdout self.emit_override(record) self.flush() - except(KeyboardInterrupt,SystemExit): + except (KeyboardInterrupt, SystemExit): raise - except: + except: # from the python library -_- self.handleError(record) - def emit_override(self,record,**kw): - self.terminator=getattr(record,'terminator','\n') - stream=self.stream + + def emit_override(self, record, **kw): + """ + Writes the log record to the desired stream (stderr/stdout) + """ + self.terminator = getattr(record, 'terminator', '\n') + stream = self.stream if unicode: - msg=self.formatter.format(record) - fs='%s'+self.terminator + # python2 + msg = self.formatter.format(record) + fs = '%s' + self.terminator try: - if(isinstance(msg,unicode)and getattr(stream,'encoding',None)): - fs=fs.decode(stream.encoding) + if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)): + fs = fs.decode(stream.encoding) try: - stream.write(fs%msg) + stream.write(fs % msg) except UnicodeEncodeError: - stream.write((fs%msg).encode(stream.encoding)) + stream.write((fs % msg).encode(stream.encoding)) else: - stream.write(fs%msg) + stream.write(fs % msg) except UnicodeError: - stream.write((fs%msg).encode('utf-8')) + stream.write((fs % msg).encode('utf-8')) else: - logging.StreamHandler.emit(self,record) + logging.StreamHandler.emit(self, record) + class formatter(logging.Formatter): + """Simple log formatter which handles colors""" def __init__(self): - logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT) - def format(self,rec): + logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT) + + def format(self, rec): + """ + Formats records and adds colors as needed. The records do not get + a leading hour format if the logging level is above *INFO*. + """ try: - msg=rec.msg.decode('utf-8') + msg = rec.msg.decode('utf-8') except Exception: - msg=rec.msg - use=colors_lst['USE'] - if(use==1 and rec.stream.isatty())or use==2: - c1=getattr(rec,'c1',None) + msg = rec.msg + + use = colors_lst['USE'] + if (use == 1 and rec.stream.isatty()) or use == 2: + + c1 = getattr(rec, 'c1', None) if c1 is None: - c1='' - if rec.levelno>=logging.ERROR: - c1=colors.RED - elif rec.levelno>=logging.WARNING: - c1=colors.YELLOW - elif rec.levelno>=logging.INFO: - c1=colors.GREEN - c2=getattr(rec,'c2',colors.NORMAL) - msg='%s%s%s'%(c1,msg,c2) + c1 = '' + if rec.levelno >= logging.ERROR: + c1 = colors.RED + elif rec.levelno >= logging.WARNING: + c1 = colors.YELLOW + elif rec.levelno >= logging.INFO: + c1 = colors.GREEN + c2 = getattr(rec, 'c2', colors.NORMAL) + msg = '%s%s%s' % (c1, msg, c2) else: - msg=re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))','',msg) - if rec.levelno>=logging.INFO: + # remove single \r that make long lines in text files + # and other terminal commands + msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg) + + if rec.levelno >= logging.INFO: + # the goal of this is to format without the leading "Logs, hour" prefix if rec.args: - return msg%rec.args + try: + return msg % rec.args + except UnicodeDecodeError: + return msg.encode('utf-8') % rec.args return msg - rec.msg=msg - rec.c1=colors.PINK - rec.c2=colors.NORMAL - return logging.Formatter.format(self,rec) -log=None -def debug(*k,**kw): + + rec.msg = msg + rec.c1 = colors.PINK + rec.c2 = colors.NORMAL + return logging.Formatter.format(self, rec) + +log = None +"""global logger for Logs.debug, Logs.error, etc""" + +def debug(*k, **kw): + """ + Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0 + """ if verbose: - k=list(k) - k[0]=k[0].replace('\n',' ') - log.debug(*k,**kw) -def error(*k,**kw): - log.error(*k,**kw) - if verbose>2: - st=traceback.extract_stack() + k = list(k) + k[0] = k[0].replace('\n', ' ') + log.debug(*k, **kw) + +def error(*k, **kw): + """ + Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2 + """ + log.error(*k, **kw) + if verbose > 2: + st = traceback.extract_stack() if st: - st=st[:-1] - buf=[] - for filename,lineno,name,line in st: - buf.append(' File %r, line %d, in %s'%(filename,lineno,name)) + st = st[:-1] + buf = [] + for filename, lineno, name, line in st: + buf.append(' File %r, line %d, in %s' % (filename, lineno, name)) if line: - buf.append(' %s'%line.strip()) + buf.append(' %s' % line.strip()) if buf: log.error('\n'.join(buf)) -def warn(*k,**kw): - log.warn(*k,**kw) -def info(*k,**kw): - log.info(*k,**kw) + +def warn(*k, **kw): + """ + Wraps logging.warning + """ + log.warning(*k, **kw) + +def info(*k, **kw): + """ + Wraps logging.info + """ + log.info(*k, **kw) + def init_log(): + """ + Initializes the logger :py:attr:`waflib.Logs.log` + """ global log - log=logging.getLogger('waflib') - log.handlers=[] - log.filters=[] - hdlr=log_handler() + log = logging.getLogger('waflib') + log.handlers = [] + log.filters = [] + hdlr = log_handler() hdlr.setFormatter(formatter()) log.addHandler(hdlr) log.addFilter(log_filter()) log.setLevel(logging.DEBUG) -def make_logger(path,name): - logger=logging.getLogger(name) - if sys.hexversion>0x3000000: - encoding=sys.stdout.encoding + +def make_logger(path, name): + """ + Creates a simple logger, which is often used to redirect the context command output:: + + from waflib import Logs + bld.logger = Logs.make_logger('test.log', 'build') + bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False) + + # have the file closed immediately + Logs.free_logger(bld.logger) + + # stop logging + bld.logger = None + + The method finalize() of the command will try to free the logger, if any + + :param path: file name to write the log output to + :type path: string + :param name: logger name (loggers are reused) + :type name: string + """ + logger = logging.getLogger(name) + if sys.hexversion > 0x3000000: + encoding = sys.stdout.encoding else: - encoding=None - hdlr=logging.FileHandler(path,'w',encoding=encoding) - formatter=logging.Formatter('%(message)s') + encoding = None + hdlr = logging.FileHandler(path, 'w', encoding=encoding) + formatter = logging.Formatter('%(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.DEBUG) return logger -def make_mem_logger(name,to_log,size=8192): + +def make_mem_logger(name, to_log, size=8192): + """ + Creates a memory logger to avoid writing concurrently to the main logger + """ from logging.handlers import MemoryHandler - logger=logging.getLogger(name) - hdlr=MemoryHandler(size,target=to_log) - formatter=logging.Formatter('%(message)s') + logger = logging.getLogger(name) + hdlr = MemoryHandler(size, target=to_log) + formatter = logging.Formatter('%(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) - logger.memhandler=hdlr + logger.memhandler = hdlr logger.setLevel(logging.DEBUG) return logger + def free_logger(logger): + """ + Frees the resources held by the loggers created through make_logger or make_mem_logger. + This is used for file cleanup and for handler removal (logger objects are re-used). + """ try: for x in logger.handlers: x.close() logger.removeHandler(x) except Exception: pass -def pprint(col,msg,label='',sep='\n'): - info('%s%s%s %s',colors(col),msg,colors.NORMAL,label,extra={'terminator':sep}) + +def pprint(col, msg, label='', sep='\n'): + """ + Prints messages in color immediately on stderr:: + + from waflib import Logs + Logs.pprint('RED', 'Something bad just happened') + + :param col: color name to use in :py:const:`Logs.colors_lst` + :type col: string + :param msg: message to display + :type msg: string or a value that can be printed by %s + :param label: a message to add after the colored output + :type label: string + :param sep: a string to append at the end (line separator) + :type sep: string + """ + info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep}) + diff -Nru lilv-0.24.4~dfsg0/waflib/Node.py lilv-0.24.6/waflib/Node.py --- lilv-0.24.4~dfsg0/waflib/Node.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Node.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,10 +1,30 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) -import os,re,sys,shutil -from waflib import Utils,Errors -exclude_regs=''' +""" +Node: filesystem structure + +#. Each file/folder is represented by exactly one node. + +#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc. + Unused class members can increase the `.wafpickle` file size sensibly. + +#. Node objects should never be created directly, use + the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` for the low-level operations + +#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` must be + used when a build context is present + +#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass required for serialization. + (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context + owning a node is held as *self.ctx* +""" + +import os, re, sys, shutil +from waflib import Utils, Errors + +exclude_regs = ''' **/*~ **/#*# **/.#* @@ -36,36 +56,42 @@ **/_darcs/** **/.intlcache **/.DS_Store''' -def ant_matcher(s,ignorecase): - reflags=re.I if ignorecase else 0 - ret=[] +""" +Ant patterns for files and folders to exclude while doing the +recursive traversal in :py:meth:`waflib.Node.Node.ant_glob` +""" + +def ant_matcher(s, ignorecase): + reflags = re.I if ignorecase else 0 + ret = [] for x in Utils.to_list(s): - x=x.replace('\\','/').replace('//','/') + x = x.replace('\\', '/').replace('//', '/') if x.endswith('/'): - x+='**' - accu=[] + x += '**' + accu = [] for k in x.split('/'): - if k=='**': + if k == '**': accu.append(k) else: - k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+') - k='^%s$'%k + k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.').replace('+', '\\+') + k = '^%s$' % k try: - exp=re.compile(k,flags=reflags) + exp = re.compile(k, flags=reflags) except Exception as e: - raise Errors.WafError('Invalid pattern: %s'%k,e) + raise Errors.WafError('Invalid pattern: %s' % k, e) else: accu.append(exp) ret.append(accu) return ret -def ant_sub_filter(name,nn): - ret=[] + +def ant_sub_filter(name, nn): + ret = [] for lst in nn: if not lst: pass - elif lst[0]=='**': + elif lst[0] == '**': ret.append(lst) - if len(lst)>1: + if len(lst) > 1: if lst[1].match(name): ret.append(lst[2:]) else: @@ -73,77 +99,200 @@ elif lst[0].match(name): ret.append(lst[1:]) return ret -def ant_sub_matcher(name,pats): - nacc=ant_sub_filter(name,pats[0]) - nrej=ant_sub_filter(name,pats[1]) - if[]in nrej: - nacc=[] - return[nacc,nrej] + +def ant_sub_matcher(name, pats): + nacc = ant_sub_filter(name, pats[0]) + nrej = ant_sub_filter(name, pats[1]) + if [] in nrej: + nacc = [] + return [nacc, nrej] + class Node(object): - dict_class=dict - __slots__=('name','parent','children','cache_abspath','cache_isdir') - def __init__(self,name,parent): - self.name=name - self.parent=parent + """ + This class is organized in two parts: + + * The basic methods meant for filesystem access (compute paths, create folders, etc) + * The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``) + """ + + dict_class = dict + """ + Subclasses can provide a dict class to enable case insensitivity for example. + """ + + __slots__ = ('name', 'parent', 'children', 'cache_abspath', 'cache_isdir') + def __init__(self, name, parent): + """ + .. note:: Use :py:func:`Node.make_node` or :py:func:`Node.find_node` instead of calling this constructor + """ + self.name = name + self.parent = parent if parent: if name in parent.children: - raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent)) - parent.children[name]=self - def __setstate__(self,data): - self.name=data[0] - self.parent=data[1] - if data[2]is not None: - self.children=self.dict_class(data[2]) + raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent)) + parent.children[name] = self + + def __setstate__(self, data): + "Deserializes node information, used for persistence" + self.name = data[0] + self.parent = data[1] + if data[2] is not None: + # Issue 1480 + self.children = self.dict_class(data[2]) + def __getstate__(self): - return(self.name,self.parent,getattr(self,'children',None)) + "Serializes node information, used for persistence" + return (self.name, self.parent, getattr(self, 'children', None)) + def __str__(self): + """ + String representation (abspath), for debugging purposes + + :rtype: string + """ return self.abspath() + def __repr__(self): + """ + String representation (abspath), for debugging purposes + + :rtype: string + """ return self.abspath() + def __copy__(self): + """ + Provided to prevent nodes from being copied + + :raises: :py:class:`waflib.Errors.WafError` + """ raise Errors.WafError('nodes are not supposed to be copied') - def read(self,flags='r',encoding='latin-1'): - return Utils.readf(self.abspath(),flags,encoding) - def write(self,data,flags='w',encoding='latin-1'): - Utils.writef(self.abspath(),data,flags,encoding) - def read_json(self,convert=True,encoding='utf-8'): - import json - object_pairs_hook=None - if convert and sys.hexversion<0x3000000: + + def read(self, flags='r', encoding='latin-1'): + """ + Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`:: + + def build(bld): + bld.path.find_node('wscript').read() + + :param flags: Open mode + :type flags: string + :param encoding: encoding value for Python3 + :type encoding: string + :rtype: string or bytes + :return: File contents + """ + return Utils.readf(self.abspath(), flags, encoding) + + def write(self, data, flags='w', encoding='latin-1'): + """ + Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`:: + + def build(bld): + bld.path.make_node('foo.txt').write('Hello, world!') + + :param data: data to write + :type data: string + :param flags: Write mode + :type flags: string + :param encoding: encoding value for Python3 + :type encoding: string + """ + Utils.writef(self.abspath(), data, flags, encoding) + + def read_json(self, convert=True, encoding='utf-8'): + """ + Reads and parses the contents of this node as JSON (Python ≥ 2.6):: + + def build(bld): + bld.path.find_node('abc.json').read_json() + + Note that this by default automatically decodes unicode strings on Python2, unlike what the Python JSON module does. + + :type convert: boolean + :param convert: Prevents decoding of unicode strings on Python2 + :type encoding: string + :param encoding: The encoding of the file to read. This default to UTF8 as per the JSON standard + :rtype: object + :return: Parsed file contents + """ + import json # Python 2.6 and up + object_pairs_hook = None + if convert and sys.hexversion < 0x3000000: try: - _type=unicode + _type = unicode except NameError: - _type=str + _type = str + def convert(value): - if isinstance(value,list): - return[convert(element)for element in value] - elif isinstance(value,_type): + if isinstance(value, list): + return [convert(element) for element in value] + elif isinstance(value, _type): return str(value) else: return value + def object_pairs(pairs): - return dict((str(pair[0]),convert(pair[1]))for pair in pairs) - object_pairs_hook=object_pairs - return json.loads(self.read(encoding=encoding),object_pairs_hook=object_pairs_hook) - def write_json(self,data,pretty=True): - import json - indent=2 - separators=(',',': ') - sort_keys=pretty - newline=os.linesep + return dict((str(pair[0]), convert(pair[1])) for pair in pairs) + + object_pairs_hook = object_pairs + + return json.loads(self.read(encoding=encoding), object_pairs_hook=object_pairs_hook) + + def write_json(self, data, pretty=True): + """ + Writes a python object as JSON to disk (Python ≥ 2.6) as UTF-8 data (JSON standard):: + + def build(bld): + bld.path.find_node('xyz.json').write_json(199) + + :type data: object + :param data: The data to write to disk + :type pretty: boolean + :param pretty: Determines if the JSON will be nicely space separated + """ + import json # Python 2.6 and up + indent = 2 + separators = (',', ': ') + sort_keys = pretty + newline = os.linesep if not pretty: - indent=None - separators=(',',':') - newline='' - output=json.dumps(data,indent=indent,separators=separators,sort_keys=sort_keys)+newline - self.write(output,encoding='utf-8') + indent = None + separators = (',', ':') + newline = '' + output = json.dumps(data, indent=indent, separators=separators, sort_keys=sort_keys) + newline + self.write(output, encoding='utf-8') + def exists(self): + """ + Returns whether the Node is present on the filesystem + + :rtype: bool + """ return os.path.exists(self.abspath()) + def isdir(self): + """ + Returns whether the Node represents a folder + + :rtype: bool + """ return os.path.isdir(self.abspath()) - def chmod(self,val): - os.chmod(self.abspath(),val) - def delete(self,evict=True): + + def chmod(self, val): + """ + Changes the file/dir permissions:: + + def build(bld): + bld.path.chmod(493) # 0755 + """ + os.chmod(self.abspath(), val) + + def delete(self, evict=True): + """ + Removes the file/folder from the filesystem (equivalent to `rm -rf`), and remove this object from the Node tree. + Do not use this object after calling this method. + """ try: try: if os.path.isdir(self.abspath()): @@ -156,140 +305,240 @@ finally: if evict: self.evict() + def evict(self): + """ + Removes this node from the Node tree + """ del self.parent.children[self.name] + def suffix(self): - k=max(0,self.name.rfind('.')) + """ + Returns the file rightmost extension, for example `a.b.c.d → .d` + + :rtype: string + """ + k = max(0, self.name.rfind('.')) return self.name[k:] + def height(self): - d=self - val=-1 + """ + Returns the depth in the folder hierarchy from the filesystem root or from all the file drives + + :returns: filesystem depth + :rtype: integer + """ + d = self + val = -1 while d: - d=d.parent - val+=1 + d = d.parent + val += 1 return val + def listdir(self): - lst=Utils.listdir(self.abspath()) + """ + Lists the folder contents + + :returns: list of file/folder names ordered alphabetically + :rtype: list of string + """ + lst = Utils.listdir(self.abspath()) lst.sort() return lst + def mkdir(self): + """ + Creates a folder represented by this node. Intermediate folders are created as needed. + + :raises: :py:class:`waflib.Errors.WafError` when the folder is missing + """ if self.isdir(): return + try: self.parent.mkdir() except OSError: pass + if self.name: try: os.makedirs(self.abspath()) except OSError: pass + if not self.isdir(): - raise Errors.WafError('Could not create the directory %r'%self) + raise Errors.WafError('Could not create the directory %r' % self) + try: self.children except AttributeError: - self.children=self.dict_class() - def find_node(self,lst): - if isinstance(lst,str): - lst=[x for x in Utils.split_path(lst)if x and x!='.'] - if lst and lst[0].startswith('\\\\')and not self.parent: - node=self.ctx.root.make_node(lst[0]) - node.cache_isdir=True + self.children = self.dict_class() + + def find_node(self, lst): + """ + Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists + + :param lst: relative path + :type lst: string or list of string + :returns: The corresponding Node object or None if no entry was found on the filesystem + :rtype: :py:class:´waflib.Node.Node´ + """ + + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + if lst and lst[0].startswith('\\\\') and not self.parent: + node = self.ctx.root.make_node(lst[0]) + node.cache_isdir = True return node.find_node(lst[1:]) - cur=self + + cur = self for x in lst: - if x=='..': - cur=cur.parent or cur + if x == '..': + cur = cur.parent or cur continue + try: - ch=cur.children + ch = cur.children except AttributeError: - cur.children=self.dict_class() + cur.children = self.dict_class() else: try: - cur=ch[x] + cur = ch[x] continue except KeyError: pass - cur=self.__class__(x,cur) + + # optimistic: create the node first then look if it was correct to do so + cur = self.__class__(x, cur) if not cur.exists(): cur.evict() return None + if not cur.exists(): cur.evict() return None + return cur - def make_node(self,lst): - if isinstance(lst,str): - lst=[x for x in Utils.split_path(lst)if x and x!='.'] - cur=self + + def make_node(self, lst): + """ + Returns or creates a Node object corresponding to the input path without considering the filesystem. + + :param lst: relative path + :type lst: string or list of string + :rtype: :py:class:´waflib.Node.Node´ + """ + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + cur = self for x in lst: - if x=='..': - cur=cur.parent or cur + if x == '..': + cur = cur.parent or cur continue + try: - cur=cur.children[x] + cur = cur.children[x] except AttributeError: - cur.children=self.dict_class() + cur.children = self.dict_class() except KeyError: pass else: continue - cur=self.__class__(x,cur) + cur = self.__class__(x, cur) return cur - def search_node(self,lst): - if isinstance(lst,str): - lst=[x for x in Utils.split_path(lst)if x and x!='.'] - cur=self + + def search_node(self, lst): + """ + Returns a Node previously defined in the data structure. The filesystem is not considered. + + :param lst: relative path + :type lst: string or list of string + :rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure + """ + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + cur = self for x in lst: - if x=='..': - cur=cur.parent or cur + if x == '..': + cur = cur.parent or cur else: try: - cur=cur.children[x] - except(AttributeError,KeyError): + cur = cur.children[x] + except (AttributeError, KeyError): return None return cur - def path_from(self,node): - c1=self - c2=node - c1h=c1.height() - c2h=c2.height() - lst=[] - up=0 - while c1h>c2h: + + def path_from(self, node): + """ + Path of this node seen from the other:: + + def build(bld): + n1 = bld.path.find_node('foo/bar/xyz.txt') + n2 = bld.path.find_node('foo/stuff/') + n1.path_from(n2) # '../bar/xyz.txt' + + :param node: path to use as a reference + :type node: :py:class:`waflib.Node.Node` + :returns: a relative path or an absolute one if that is better + :rtype: string + """ + c1 = self + c2 = node + + c1h = c1.height() + c2h = c2.height() + + lst = [] + up = 0 + + while c1h > c2h: lst.append(c1.name) - c1=c1.parent - c1h-=1 - while c2h>c1h: - up+=1 - c2=c2.parent - c2h-=1 + c1 = c1.parent + c1h -= 1 + + while c2h > c1h: + up += 1 + c2 = c2.parent + c2h -= 1 + while not c1 is c2: lst.append(c1.name) - up+=1 - c1=c1.parent - c2=c2.parent + up += 1 + + c1 = c1.parent + c2 = c2.parent + if c1.parent: - lst.extend(['..']*up) + lst.extend(['..'] * up) lst.reverse() - return os.sep.join(lst)or'.' + return os.sep.join(lst) or '.' else: return self.abspath() + def abspath(self): + """ + Returns the absolute path. A cache is kept in the context as ``cache_node_abspath`` + + :rtype: string + """ try: return self.cache_abspath except AttributeError: pass + # think twice before touching this (performance + complexity + correctness) + if not self.parent: - val=os.sep + val = os.sep elif not self.parent.name: - val=os.sep+self.name + val = os.sep + self.name else: - val=self.parent.abspath()+os.sep+self.name - self.cache_abspath=val + val = self.parent.abspath() + os.sep + self.name + self.cache_abspath = val return val + if Utils.is_win32: def abspath(self): try: @@ -297,88 +546,230 @@ except AttributeError: pass if not self.parent: - val='' + val = '' elif not self.parent.name: - val=self.name+os.sep + val = self.name + os.sep else: - val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name - self.cache_abspath=val + val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name + self.cache_abspath = val return val - def is_child_of(self,node): - p=self - diff=self.height()-node.height() - while diff>0: - diff-=1 - p=p.parent + + def is_child_of(self, node): + """ + Returns whether the object belongs to a subtree of the input node:: + + def build(bld): + node = bld.path.find_node('wscript') + node.is_child_of(bld.path) # True + + :param node: path to use as a reference + :type node: :py:class:`waflib.Node.Node` + :rtype: bool + """ + p = self + diff = self.height() - node.height() + while diff > 0: + diff -= 1 + p = p.parent return p is node - def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True,quiet=False): - dircont=self.listdir() - dircont.sort() + + def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False): + """ + Recursive method used by :py:meth:`waflib.Node.ant_glob`. + + :param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion + :type accept: function + :param maxdepth: maximum depth in the filesystem (25) + :type maxdepth: int + :param pats: list of patterns to accept and list of patterns to exclude + :type pats: tuple + :param dir: return folders too (False by default) + :type dir: bool + :param src: return files (True by default) + :type src: bool + :param remove: remove files/folders that do not exist (True by default) + :type remove: bool + :param quiet: disable build directory traversal warnings (verbose mode) + :type quiet: bool + :returns: A generator object to iterate from + :rtype: iterator + """ + dircont = self.listdir() + try: - lst=set(self.children.keys()) + lst = set(self.children.keys()) except AttributeError: - self.children=self.dict_class() + self.children = self.dict_class() else: if remove: - for x in lst-set(dircont): + for x in lst - set(dircont): self.children[x].evict() + for name in dircont: - npats=accept(name,pats) + npats = accept(name, pats) if npats and npats[0]: - accepted=[]in npats[0] - node=self.make_node([name]) - isdir=node.isdir() + accepted = [] in npats[0] + + node = self.make_node([name]) + + isdir = node.isdir() if accepted: if isdir: if dir: yield node elif src: yield node + if isdir: - node.cache_isdir=True + node.cache_isdir = True if maxdepth: - for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove,quiet=quiet): + for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove, quiet=quiet): yield k - def ant_glob(self,*k,**kw): - src=kw.get('src',True) - dir=kw.get('dir') - excl=kw.get('excl',exclude_regs) - incl=k and k[0]or kw.get('incl','**') - remove=kw.get('remove',True) - maxdepth=kw.get('maxdepth',25) - ignorecase=kw.get('ignorecase',False) - quiet=kw.get('quiet',False) - pats=(ant_matcher(incl,ignorecase),ant_matcher(excl,ignorecase)) + + def ant_glob(self, *k, **kw): + """ + Finds files across folders and returns Node objects: + + * ``**/*`` find all files recursively + * ``**/*.class`` find all files ending by .class + * ``..`` find files having two dot characters + + For example:: + + def configure(cfg): + # find all .cpp files + cfg.path.ant_glob('**/*.cpp') + # find particular files from the root filesystem (can be slow) + cfg.root.ant_glob('etc/*.txt') + # simple exclusion rule example + cfg.path.ant_glob('*.c*', excl=['*.c'], src=True, dir=False) + + For more information about the patterns, consult http://ant.apache.org/manual/dirtasks.html + Please remember that the '..' sequence does not represent the parent directory:: + + def configure(cfg): + cfg.path.ant_glob('../*.h') # incorrect + cfg.path.parent.ant_glob('*.h') # correct + + The Node structure is itself a filesystem cache, so certain precautions must + be taken while matching files in the build or installation phases. + Nodes objects that do have a corresponding file or folder are garbage-collected by default. + This garbage collection is usually required to prevent returning files that do not + exist anymore. Yet, this may also remove Node objects of files that are yet-to-be built. + + This typically happens when trying to match files in the build directory, + but there are also cases when files are created in the source directory. + Run ``waf -v`` to display any warnings, and try consider passing ``remove=False`` + when matching files in the build directory. + + Since ant_glob can traverse both source and build folders, it is a best practice + to call this method only from the most specific build node:: + + def build(bld): + # traverses the build directory, may need ``remove=False``: + bld.path.ant_glob('project/dir/**/*.h') + # better, no accidental build directory traversal: + bld.path.find_node('project/dir').ant_glob('**/*.h') # best + + In addition, files and folders are listed immediately. When matching files in the + build folders, consider passing ``generator=True`` so that the generator object + returned can defer computation to a later stage. For example:: + + def build(bld): + bld(rule='tar xvf ${SRC}', source='arch.tar') + bld.add_group() + gen = bld.bldnode.ant_glob("*.h", generator=True, remove=True) + # files will be listed only after the arch.tar is unpacked + bld(rule='ls ${SRC}', source=gen, name='XYZ') + + + :param incl: ant patterns or list of patterns to include + :type incl: string or list of strings + :param excl: ant patterns or list of patterns to exclude + :type excl: string or list of strings + :param dir: return folders too (False by default) + :type dir: bool + :param src: return files (True by default) + :type src: bool + :param maxdepth: maximum depth of recursion + :type maxdepth: int + :param ignorecase: ignore case while matching (False by default) + :type ignorecase: bool + :param generator: Whether to evaluate the Nodes lazily + :type generator: bool + :param remove: remove files/folders that do not exist (True by default) + :type remove: bool + :param quiet: disable build directory traversal warnings (verbose mode) + :type quiet: bool + :returns: The corresponding Node objects as a list or as a generator object (generator=True) + :rtype: by default, list of :py:class:`waflib.Node.Node` instances + """ + src = kw.get('src', True) + dir = kw.get('dir') + excl = kw.get('excl', exclude_regs) + incl = k and k[0] or kw.get('incl', '**') + remove = kw.get('remove', True) + maxdepth = kw.get('maxdepth', 25) + ignorecase = kw.get('ignorecase', False) + quiet = kw.get('quiet', False) + pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase)) + if kw.get('generator'): - return Utils.lazy_generator(self.ant_iter,(ant_sub_matcher,maxdepth,pats,dir,src,remove,quiet)) - it=self.ant_iter(ant_sub_matcher,maxdepth,pats,dir,src,remove,quiet) + return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)) + + it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet) if kw.get('flat'): - return' '.join(x.path_from(self)for x in it) + # returns relative paths as a space-delimited string + # prefer Node objects whenever possible + return ' '.join(x.path_from(self) for x in it) return list(it) + + # ---------------------------------------------------------------------------- + # the methods below require the source/build folders (bld.srcnode/bld.bldnode) + def is_src(self): - cur=self - x=self.ctx.srcnode - y=self.ctx.bldnode + """ + Returns True if the node is below the source directory. Note that ``!is_src() ≠ is_bld()`` + + :rtype: bool + """ + cur = self + x = self.ctx.srcnode + y = self.ctx.bldnode while cur.parent: if cur is y: return False if cur is x: return True - cur=cur.parent + cur = cur.parent return False + def is_bld(self): - cur=self - y=self.ctx.bldnode + """ + Returns True if the node is below the build directory. Note that ``!is_bld() ≠ is_src()`` + + :rtype: bool + """ + cur = self + y = self.ctx.bldnode while cur.parent: if cur is y: return True - cur=cur.parent + cur = cur.parent return False + def get_src(self): - cur=self - x=self.ctx.srcnode - y=self.ctx.bldnode - lst=[] + """ + Returns the corresponding Node object in the source directory (or self if already + under the source directory). Use this method only if the purpose is to create + a Node object (this is common with folders but not with files, see ticket 1937) + + :rtype: :py:class:`waflib.Node.Node` + """ + cur = self + x = self.ctx.srcnode + y = self.ctx.bldnode + lst = [] while cur.parent: if cur is y: lst.reverse() @@ -386,13 +777,21 @@ if cur is x: return self lst.append(cur.name) - cur=cur.parent + cur = cur.parent return self + def get_bld(self): - cur=self - x=self.ctx.srcnode - y=self.ctx.bldnode - lst=[] + """ + Return the corresponding Node object in the build directory (or self if already + under the build directory). Use this method only if the purpose is to create + a Node object (this is common with folders but not with files, see ticket 1937) + + :rtype: :py:class:`waflib.Node.Node` + """ + cur = self + x = self.ctx.srcnode + y = self.ctx.bldnode + lst = [] while cur.parent: if cur is y: return self @@ -400,79 +799,171 @@ lst.reverse() return self.ctx.bldnode.make_node(lst) lst.append(cur.name) - cur=cur.parent + cur = cur.parent + # the file is external to the current project, make a fake root in the current build directory lst.reverse() - if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'): - lst[0]=lst[0][0] - return self.ctx.bldnode.make_node(['__root__']+lst) - def find_resource(self,lst): - if isinstance(lst,str): - lst=[x for x in Utils.split_path(lst)if x and x!='.'] - node=self.get_bld().search_node(lst) + if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'): + lst[0] = lst[0][0] + return self.ctx.bldnode.make_node(['__root__'] + lst) + + def find_resource(self, lst): + """ + Use this method in the build phase to find source files corresponding to the relative path given. + + First it looks up the Node data structure to find any declared Node object in the build directory. + If None is found, it then considers the filesystem in the source directory. + + :param lst: relative path + :type lst: string or list of string + :returns: the corresponding Node object or None + :rtype: :py:class:`waflib.Node.Node` + """ + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + node = self.get_bld().search_node(lst) if not node: - node=self.get_src().find_node(lst) + node = self.get_src().find_node(lst) if node and node.isdir(): return None return node - def find_or_declare(self,lst): - if isinstance(lst,str)and os.path.isabs(lst): - node=self.ctx.root.make_node(lst) + + def find_or_declare(self, lst): + """ + Use this method in the build phase to declare output files which + are meant to be written in the build directory. + + This method creates the Node object and its parent folder + as needed. + + :param lst: relative path + :type lst: string or list of string + """ + if isinstance(lst, str) and os.path.isabs(lst): + node = self.ctx.root.make_node(lst) else: - node=self.get_bld().make_node(lst) + node = self.get_bld().make_node(lst) node.parent.mkdir() return node - def find_dir(self,lst): - if isinstance(lst,str): - lst=[x for x in Utils.split_path(lst)if x and x!='.'] - node=self.find_node(lst) + + def find_dir(self, lst): + """ + Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`) + + :param lst: relative path + :type lst: string or list of string + :returns: The corresponding Node object or None if there is no such folder + :rtype: :py:class:`waflib.Node.Node` + """ + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + node = self.find_node(lst) if node and not node.isdir(): return None return node - def change_ext(self,ext,ext_in=None): - name=self.name + + # helpers for building things + def change_ext(self, ext, ext_in=None): + """ + Declares a build node with a distinct extension; this is uses :py:meth:`waflib.Node.Node.find_or_declare` + + :return: A build node of the same path, but with a different extension + :rtype: :py:class:`waflib.Node.Node` + """ + name = self.name if ext_in is None: - k=name.rfind('.') - if k>=0: - name=name[:k]+ext + k = name.rfind('.') + if k >= 0: + name = name[:k] + ext else: - name=name+ext + name = name + ext else: - name=name[:-len(ext_in)]+ext + name = name[:- len(ext_in)] + ext + return self.parent.find_or_declare([name]) + def bldpath(self): + """ + Returns the relative path seen from the build directory ``src/foo.cpp`` + + :rtype: string + """ return self.path_from(self.ctx.bldnode) + def srcpath(self): + """ + Returns the relative path seen from the source directory ``../src/foo.cpp`` + + :rtype: string + """ return self.path_from(self.ctx.srcnode) + def relpath(self): - cur=self - x=self.ctx.bldnode + """ + If a file in the build directory, returns :py:meth:`waflib.Node.Node.bldpath`, + else returns :py:meth:`waflib.Node.Node.srcpath` + + :rtype: string + """ + cur = self + x = self.ctx.bldnode while cur.parent: if cur is x: return self.bldpath() - cur=cur.parent + cur = cur.parent return self.srcpath() + def bld_dir(self): + """ + Equivalent to self.parent.bldpath() + + :rtype: string + """ return self.parent.bldpath() + def h_file(self): + """ + See :py:func:`waflib.Utils.h_file` + + :return: a hash representing the file contents + :rtype: string or bytes + """ return Utils.h_file(self.abspath()) + def get_bld_sig(self): + """ + Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose + of build dependency calculation. This method uses a per-context cache. + + :return: a hash representing the object contents + :rtype: string or bytes + """ + # previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node try: - cache=self.ctx.cache_sig + cache = self.ctx.cache_sig except AttributeError: - cache=self.ctx.cache_sig={} + cache = self.ctx.cache_sig = {} try: - ret=cache[self] + ret = cache[self] except KeyError: - p=self.abspath() + p = self.abspath() try: - ret=cache[self]=self.h_file() + ret = cache[self] = self.h_file() except EnvironmentError: if self.isdir(): - st=os.stat(p) - ret=cache[self]=Utils.h_list([p,st.st_ino,st.st_mode]) + # allow folders as build nodes, do not use the creation time + st = os.stat(p) + ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode]) return ret raise return ret -pickle_lock=Utils.threading.Lock() + +pickle_lock = Utils.threading.Lock() +"""Lock mandatory for thread-safe node serialization""" + class Nod3(Node): - pass + """Mandatory subclass for thread-safe node serialization""" + pass # do not remove + + diff -Nru lilv-0.24.4~dfsg0/waflib/Options.py lilv-0.24.6/waflib/Options.py --- lilv-0.24.4~dfsg0/waflib/Options.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Options.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,200 +1,342 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Scott Newton, 2005 (scottn) +# Thomas Nagy, 2006-2018 (ita) + +""" +Support for waf command-line options + +Provides default and command-line options, as well the command +that reads the ``options`` wscript function. +""" + +import os, tempfile, optparse, sys, re +from waflib import Logs, Utils, Context, Errors + +options = optparse.Values() +""" +A global dictionary representing user-provided command-line options:: + + $ waf --foo=bar +""" + +commands = [] +""" +List of commands to execute extracted from the command-line. This list +is consumed during the execution by :py:func:`waflib.Scripting.run_commands`. +""" + +envvars = [] +""" +List of environment variable declarations placed after the Waf executable name. +These are detected by searching for "=" in the remaining arguments. +You probably do not want to use this. +""" + +lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform) +""" +Name of the lock file that marks a project as configured +""" -import os,tempfile,optparse,sys,re -from waflib import Logs,Utils,Context,Errors -options=optparse.Values() -commands=[] -envvars=[] -lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform) class opt_parser(optparse.OptionParser): - def __init__(self,ctx,allow_unknown=False): - optparse.OptionParser.__init__(self,conflict_handler='resolve',add_help_option=False,version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION)) - self.formatter.width=Logs.get_term_cols() - self.ctx=ctx - self.allow_unknown=allow_unknown - def _process_args(self,largs,rargs,values): + """ + Command-line options parser. + """ + def __init__(self, ctx, allow_unknown=False): + optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False, + version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION)) + self.formatter.width = Logs.get_term_cols() + self.ctx = ctx + self.allow_unknown = allow_unknown + + def _process_args(self, largs, rargs, values): + """ + Custom _process_args to allow unknown options according to the allow_unknown status + """ while rargs: try: optparse.OptionParser._process_args(self,largs,rargs,values) - except(optparse.BadOptionError,optparse.AmbiguousOptionError)as e: + except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e: if self.allow_unknown: largs.append(e.opt_str) else: self.error(str(e)) - def print_usage(self,file=None): + + def print_usage(self, file=None): return self.print_help(file) + def get_usage(self): - cmds_str={} + """ + Builds the message to print on ``waf --help`` + + :rtype: string + """ + cmds_str = {} for cls in Context.classes: - if not cls.cmd or cls.cmd=='options'or cls.cmd.startswith('_'): + if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ): continue - s=cls.__doc__ or'' - cmds_str[cls.cmd]=s + + s = cls.__doc__ or '' + cmds_str[cls.cmd] = s + if Context.g_module: - for(k,v)in Context.g_module.__dict__.items(): - if k in('options','init','shutdown'): + for (k, v) in Context.g_module.__dict__.items(): + if k in ('options', 'init', 'shutdown'): continue - if type(v)is type(Context.create_context): + + if type(v) is type(Context.create_context): if v.__doc__ and not k.startswith('_'): - cmds_str[k]=v.__doc__ - just=0 + cmds_str[k] = v.__doc__ + + just = 0 for k in cmds_str: - just=max(just,len(k)) - lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()] + just = max(just, len(k)) + + lst = [' %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()] lst.sort() - ret='\n'.join(lst) - return'''waf [commands] [options] + ret = '\n'.join(lst) + + return '''waf [commands] [options] Main commands (example: ./waf build -j4) %s -'''%ret +''' % ret + + class OptionsContext(Context.Context): - cmd='options' - fun='options' - def __init__(self,**kw): - super(OptionsContext,self).__init__(**kw) - self.parser=opt_parser(self) - self.option_groups={} - jobs=self.jobs() - p=self.add_option - color=os.environ.get('NOCOLOR','')and'no'or'auto' - if os.environ.get('CLICOLOR','')=='0': - color='no' - elif os.environ.get('CLICOLOR_FORCE','')=='1': - color='yes' - p('-c','--color',dest='colors',default=color,action='store',help='whether to use colors (yes/no/auto) [default: auto]',choices=('yes','no','auto')) - p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs) - p('-k','--keep',dest='keep',default=0,action='count',help='continue despite errors (-kk to try harder)') - p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]') - p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)') - p('--profile',dest='profile',default=0,action='store_true',help=optparse.SUPPRESS_HELP) - p('--pdb',dest='pdb',default=0,action='store_true',help=optparse.SUPPRESS_HELP) - p('-h','--help',dest='whelp',default=0,action='store_true',help="show this help message and exit") - gr=self.add_option_group('Configuration options') - self.option_groups['configure options']=gr - gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out') - gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top') - gr.add_option('--no-lock-in-run',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_run') - gr.add_option('--no-lock-in-out',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_out') - gr.add_option('--no-lock-in-top',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_top') - default_prefix=getattr(Context.g_module,'default_prefix',os.environ.get('PREFIX')) + """ + Collects custom options from wscript files and parses the command line. + Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values. + """ + cmd = 'options' + fun = 'options' + + def __init__(self, **kw): + super(OptionsContext, self).__init__(**kw) + + self.parser = opt_parser(self) + """Instance of :py:class:`waflib.Options.opt_parser`""" + + self.option_groups = {} + + jobs = self.jobs() + p = self.add_option + color = os.environ.get('NOCOLOR', '') and 'no' or 'auto' + if os.environ.get('CLICOLOR', '') == '0': + color = 'no' + elif os.environ.get('CLICOLOR_FORCE', '') == '1': + color = 'yes' + p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto')) + p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs) + p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)') + p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]') + p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)') + p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP) + p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP) + p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit") + + gr = self.add_option_group('Configuration options') + self.option_groups['configure options'] = gr + + gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') + gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top') + + gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run') + gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out') + gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top') + + default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) if not default_prefix: - if Utils.unversioned_sys_platform()=='win32': - d=tempfile.gettempdir() - default_prefix=d[0].upper()+d[1:] + if Utils.unversioned_sys_platform() == 'win32': + d = tempfile.gettempdir() + default_prefix = d[0].upper() + d[1:] + # win32 preserves the case, but gettempdir does not else: - default_prefix='/usr/local/' - gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix) - gr.add_option('--bindir',dest='bindir',help='bindir') - gr.add_option('--libdir',dest='libdir',help='libdir') - gr=self.add_option_group('Build and installation options') - self.option_groups['build and install options']=gr - gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output') - gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"') - gr=self.add_option_group('Step options') - self.option_groups['step options']=gr - gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') - default_destdir=os.environ.get('DESTDIR','') - gr=self.add_option_group('Installation and uninstallation options') - self.option_groups['install/uninstall options']=gr - gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir') - gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation') - gr.add_option('--distcheck-args',metavar='ARGS',help='arguments to pass to distcheck',default=None,action='store') + default_prefix = '/usr/local/' + gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix) + gr.add_option('--bindir', dest='bindir', help='bindir') + gr.add_option('--libdir', dest='libdir', help='libdir') + + gr = self.add_option_group('Build and installation options') + self.option_groups['build and install options'] = gr + gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output') + gr.add_option('--targets', dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"') + + gr = self.add_option_group('Step options') + self.option_groups['step options'] = gr + gr.add_option('--files', dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') + + default_destdir = os.environ.get('DESTDIR', '') + + gr = self.add_option_group('Installation and uninstallation options') + self.option_groups['install/uninstall options'] = gr + gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir') + gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation') + gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store') + def jobs(self): - count=int(os.environ.get('JOBS',0)) - if count<1: - if'NUMBER_OF_PROCESSORS'in os.environ: - count=int(os.environ.get('NUMBER_OF_PROCESSORS',1)) + """ + Finds the optimal amount of cpu cores to use for parallel jobs. + At runtime the options can be obtained from :py:const:`waflib.Options.options` :: + + from waflib.Options import options + njobs = options.jobs + + :return: the amount of cpu cores + :rtype: int + """ + count = int(os.environ.get('JOBS', 0)) + if count < 1: + if 'NUMBER_OF_PROCESSORS' in os.environ: + # on Windows, use the NUMBER_OF_PROCESSORS environment variable + count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1)) else: - if hasattr(os,'sysconf_names'): - if'SC_NPROCESSORS_ONLN'in os.sysconf_names: - count=int(os.sysconf('SC_NPROCESSORS_ONLN')) - elif'SC_NPROCESSORS_CONF'in os.sysconf_names: - count=int(os.sysconf('SC_NPROCESSORS_CONF')) - if not count and os.name not in('nt','java'): + # on everything else, first try the POSIX sysconf values + if hasattr(os, 'sysconf_names'): + if 'SC_NPROCESSORS_ONLN' in os.sysconf_names: + count = int(os.sysconf('SC_NPROCESSORS_ONLN')) + elif 'SC_NPROCESSORS_CONF' in os.sysconf_names: + count = int(os.sysconf('SC_NPROCESSORS_CONF')) + if not count and os.name not in ('nt', 'java'): try: - tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0) + tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0) except Errors.WafError: pass else: - if re.match('^[0-9]+$',tmp): - count=int(tmp) - if count<1: - count=1 - elif count>1024: - count=1024 + if re.match('^[0-9]+$', tmp): + count = int(tmp) + if count < 1: + count = 1 + elif count > 1024: + count = 1024 return count - def add_option(self,*k,**kw): - return self.parser.add_option(*k,**kw) - def add_option_group(self,*k,**kw): + + def add_option(self, *k, **kw): + """ + Wraps ``optparse.add_option``:: + + def options(ctx): + ctx.add_option('-u', '--use', dest='use', default=False, + action='store_true', help='a boolean option') + + :rtype: optparse option object + """ + return self.parser.add_option(*k, **kw) + + def add_option_group(self, *k, **kw): + """ + Wraps ``optparse.add_option_group``:: + + def options(ctx): + gr = ctx.add_option_group('some options') + gr.add_option('-u', '--use', dest='use', default=False, action='store_true') + + :rtype: optparse option group object + """ try: - gr=self.option_groups[k[0]] + gr = self.option_groups[k[0]] except KeyError: - gr=self.parser.add_option_group(*k,**kw) - self.option_groups[k[0]]=gr + gr = self.parser.add_option_group(*k, **kw) + self.option_groups[k[0]] = gr return gr - def get_option_group(self,opt_str): + + def get_option_group(self, opt_str): + """ + Wraps ``optparse.get_option_group``:: + + def options(ctx): + gr = ctx.get_option_group('configure options') + gr.add_option('-o', '--out', action='store', default='', + help='build dir for the project', dest='out') + + :rtype: optparse option group object + """ try: return self.option_groups[opt_str] except KeyError: for group in self.parser.option_groups: - if group.title==opt_str: + if group.title == opt_str: return group return None - def sanitize_path(self,path,cwd=None): + + def sanitize_path(self, path, cwd=None): if not cwd: - cwd=Context.launch_dir - p=os.path.expanduser(path) - p=os.path.join(cwd,p) - p=os.path.normpath(p) - p=os.path.abspath(p) + cwd = Context.launch_dir + p = os.path.expanduser(path) + p = os.path.join(cwd, p) + p = os.path.normpath(p) + p = os.path.abspath(p) return p - def parse_cmd_args(self,_args=None,cwd=None,allow_unknown=False): - self.parser.allow_unknown=allow_unknown - (options,leftover_args)=self.parser.parse_args(args=_args) - envvars=[] - commands=[] + + def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False): + """ + Just parse the arguments + """ + self.parser.allow_unknown = allow_unknown + (options, leftover_args) = self.parser.parse_args(args=_args) + envvars = [] + commands = [] for arg in leftover_args: - if'='in arg: + if '=' in arg: envvars.append(arg) - elif arg!='options': + elif arg != 'options': commands.append(arg) - for name in'top out destdir prefix bindir libdir'.split(): - if getattr(options,name,None): - path=self.sanitize_path(getattr(options,name),cwd) - setattr(options,name,path) - return options,commands,envvars - def init_module_vars(self,arg_options,arg_commands,arg_envvars): + + for name in 'top out destdir prefix bindir libdir'.split(): + # those paths are usually expanded from Context.launch_dir + if getattr(options, name, None): + path = self.sanitize_path(getattr(options, name), cwd) + setattr(options, name, path) + return options, commands, envvars + + def init_module_vars(self, arg_options, arg_commands, arg_envvars): options.__dict__.clear() del commands[:] del envvars[:] + options.__dict__.update(arg_options.__dict__) commands.extend(arg_commands) envvars.extend(arg_envvars) + for var in envvars: - (name,value)=var.split('=',1) - os.environ[name.strip()]=value - def init_logs(self,options,commands,envvars): - Logs.verbose=options.verbose - if options.verbose>=1: + (name, value) = var.split('=', 1) + os.environ[name.strip()] = value + + def init_logs(self, options, commands, envvars): + Logs.verbose = options.verbose + if options.verbose >= 1: self.load('errcheck') - colors={'yes':2,'auto':1,'no':0}[options.colors] + + colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors] Logs.enable_colors(colors) + if options.zones: - Logs.zones=options.zones.split(',') + Logs.zones = options.zones.split(',') if not Logs.verbose: - Logs.verbose=1 - elif Logs.verbose>0: - Logs.zones=['runner'] - if Logs.verbose>2: - Logs.zones=['*'] - def parse_args(self,_args=None): - options,commands,envvars=self.parse_cmd_args() - self.init_logs(options,commands,envvars) - self.init_module_vars(options,commands,envvars) + Logs.verbose = 1 + elif Logs.verbose > 0: + Logs.zones = ['runner'] + if Logs.verbose > 2: + Logs.zones = ['*'] + + def parse_args(self, _args=None): + """ + Parses arguments from a list which is not necessarily the command-line. + Initializes the module variables options, commands and envvars + If help is requested, prints it and exit the application + + :param _args: arguments + :type _args: list of strings + """ + options, commands, envvars = self.parse_cmd_args() + self.init_logs(options, commands, envvars) + self.init_module_vars(options, commands, envvars) + def execute(self): - super(OptionsContext,self).execute() + """ + See :py:func:`waflib.Context.Context.execute` + """ + super(OptionsContext, self).execute() self.parse_args() Utils.alloc_process_pool(options.jobs) + diff -Nru lilv-0.24.4~dfsg0/waflib/processor.py lilv-0.24.6/waflib/processor.py --- lilv-0.24.4~dfsg0/waflib/processor.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/processor.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,55 +1,68 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2016-2018 (ita) -import os,sys,traceback,base64,signal +import os, sys, traceback, base64, signal try: import cPickle except ImportError: import pickle as cPickle + try: import subprocess32 as subprocess except ImportError: import subprocess + try: - TimeoutExpired=subprocess.TimeoutExpired + TimeoutExpired = subprocess.TimeoutExpired except AttributeError: class TimeoutExpired(Exception): pass + def run(): - txt=sys.stdin.readline().strip() + txt = sys.stdin.readline().strip() if not txt: + # parent process probably ended sys.exit(1) - [cmd,kwargs,cargs]=cPickle.loads(base64.b64decode(txt)) - cargs=cargs or{} - ret=1 - out,err,ex,trace=(None,None,None,None) + [cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt)) + cargs = cargs or {} + + if not 'close_fds' in kwargs: + # workers have no fds + kwargs['close_fds'] = False + + ret = 1 + out, err, ex, trace = (None, None, None, None) try: - proc=subprocess.Popen(cmd,**kwargs) + proc = subprocess.Popen(cmd, **kwargs) try: - out,err=proc.communicate(**cargs) + out, err = proc.communicate(**cargs) except TimeoutExpired: - if kwargs.get('start_new_session')and hasattr(os,'killpg'): - os.killpg(proc.pid,signal.SIGKILL) + if kwargs.get('start_new_session') and hasattr(os, 'killpg'): + os.killpg(proc.pid, signal.SIGKILL) else: proc.kill() - out,err=proc.communicate() - exc=TimeoutExpired(proc.args,timeout=cargs['timeout'],output=out) - exc.stderr=err + out, err = proc.communicate() + exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out) + exc.stderr = err raise exc - ret=proc.returncode + ret = proc.returncode except Exception as e: - exc_type,exc_value,tb=sys.exc_info() - exc_lines=traceback.format_exception(exc_type,exc_value,tb) - trace=str(cmd)+'\n'+''.join(exc_lines) - ex=e.__class__.__name__ - tmp=[ret,out,err,ex,trace] - obj=base64.b64encode(cPickle.dumps(tmp)) + exc_type, exc_value, tb = sys.exc_info() + exc_lines = traceback.format_exception(exc_type, exc_value, tb) + trace = str(cmd) + '\n' + ''.join(exc_lines) + ex = e.__class__.__name__ + + # it is just text so maybe we do not need to pickle() + tmp = [ret, out, err, ex, trace] + obj = base64.b64encode(cPickle.dumps(tmp)) sys.stdout.write(obj.decode()) sys.stdout.write('\n') sys.stdout.flush() + while 1: try: run() except KeyboardInterrupt: break + diff -Nru lilv-0.24.4~dfsg0/waflib/README.md lilv-0.24.6/waflib/README.md --- lilv-0.24.4~dfsg0/waflib/README.md 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/README.md 2019-06-06 20:19:08.000000000 +0000 @@ -0,0 +1,24 @@ +Autowaf +======= + +This is autowaf, a bundle of waf and a few extensions intended to be easy to +use directly as source code in a project. Using this as a submodule or subtree +named `waflib` in a project allows waf to be used without including binary +encoded data in the waf script. This gets along with revision control and +distributions better, among other advantages, without losing +self-containedness. + +To use this in a project, add this repository as a directory named `waflib` in +the top level of the project, and link or copy `waf` to the top level. + +Two waf extras are also included: `autowaf.py` and `lv2.py`. + +The `autowaf.py` module is a kitchen sink of Python utilities for building +consistent packages, and can be imported in a wcript as +`waflib.extras.autowaf`. + +The `lv2.py` extra defines options for LV2 plugin installation paths. It can +be used by calling `opt.load('lv2')` and `conf.load('lv2')` in the appropriate +locations in a wscript. + + -- David Robillard diff -Nru lilv-0.24.4~dfsg0/waflib/Runner.py lilv-0.24.6/waflib/Runner.py --- lilv-0.24.4~dfsg0/waflib/Runner.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Runner.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,112 +1,223 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) -import heapq,traceback +""" +Runner.py: Task scheduling and execution +""" + +import heapq, traceback try: - from queue import Queue,PriorityQueue + from queue import Queue, PriorityQueue except ImportError: from Queue import Queue try: from Queue import PriorityQueue except ImportError: class PriorityQueue(Queue): - def _init(self,maxsize): - self.maxsize=maxsize - self.queue=[] - def _put(self,item): - heapq.heappush(self.queue,item) + def _init(self, maxsize): + self.maxsize = maxsize + self.queue = [] + def _put(self, item): + heapq.heappush(self.queue, item) def _get(self): return heapq.heappop(self.queue) -from waflib import Utils,Task,Errors,Logs -GAP=5 + +from waflib import Utils, Task, Errors, Logs + +GAP = 5 +""" +Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run +""" + class PriorityTasks(object): def __init__(self): - self.lst=[] + self.lst = [] def __len__(self): return len(self.lst) def __iter__(self): return iter(self.lst) + def __str__(self): + return 'PriorityTasks: [%s]' % '\n '.join(str(x) for x in self.lst) def clear(self): - self.lst=[] - def append(self,task): - heapq.heappush(self.lst,task) - def appendleft(self,task): - heapq.heappush(self.lst,task) + self.lst = [] + def append(self, task): + heapq.heappush(self.lst, task) + def appendleft(self, task): + "Deprecated, do not use" + heapq.heappush(self.lst, task) def pop(self): return heapq.heappop(self.lst) - def extend(self,lst): + def extend(self, lst): if self.lst: for x in lst: self.append(x) else: - if isinstance(lst,list): - self.lst=lst + if isinstance(lst, list): + self.lst = lst heapq.heapify(lst) else: - self.lst=lst.lst + self.lst = lst.lst + class Consumer(Utils.threading.Thread): - def __init__(self,spawner,task): + """ + Daemon thread object that executes a task. It shares a semaphore with + the coordinator :py:class:`waflib.Runner.Spawner`. There is one + instance per task to consume. + """ + def __init__(self, spawner, task): Utils.threading.Thread.__init__(self) - self.task=task - self.spawner=spawner + self.task = task + """Task to execute""" + self.spawner = spawner + """Coordinator object""" self.setDaemon(1) self.start() def run(self): + """ + Processes a single task + """ try: if not self.spawner.master.stop: self.spawner.master.process_task(self.task) finally: self.spawner.sem.release() self.spawner.master.out.put(self.task) - self.task=None - self.spawner=None + self.task = None + self.spawner = None + class Spawner(Utils.threading.Thread): - def __init__(self,master): + """ + Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and + spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each + :py:class:`waflib.Task.Task` instance. + """ + def __init__(self, master): Utils.threading.Thread.__init__(self) - self.master=master - self.sem=Utils.threading.Semaphore(master.numjobs) + self.master = master + """:py:class:`waflib.Runner.Parallel` producer instance""" + self.sem = Utils.threading.Semaphore(master.numjobs) + """Bounded semaphore that prevents spawning more than *n* concurrent consumers""" self.setDaemon(1) self.start() def run(self): + """ + Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop` + """ try: self.loop() except Exception: + # Python 2 prints unnecessary messages when shutting down + # we also want to stop the thread properly pass def loop(self): - master=self.master + """ + Consumes task objects from the producer; ends when the producer has no more + task to provide. + """ + master = self.master while 1: - task=master.ready.get() + task = master.ready.get() self.sem.acquire() if not master.stop: task.log_display(task.generator.bld) - Consumer(self,task) + Consumer(self, task) + class Parallel(object): - def __init__(self,bld,j=2): - self.numjobs=j - self.bld=bld - self.outstanding=PriorityTasks() - self.postponed=PriorityTasks() - self.incomplete=set() - self.ready=PriorityQueue(0) - self.out=Queue(0) - self.count=0 - self.processed=0 - self.stop=False - self.error=[] - self.biter=None - self.dirty=False - self.revdeps=Utils.defaultdict(set) - self.spawner=Spawner(self) + """ + Schedule the tasks obtained from the build context for execution. + """ + def __init__(self, bld, j=2): + """ + The initialization requires a build context reference + for computing the total number of jobs. + """ + + self.numjobs = j + """ + Amount of parallel consumers to use + """ + + self.bld = bld + """ + Instance of :py:class:`waflib.Build.BuildContext` + """ + + self.outstanding = PriorityTasks() + """Heap of :py:class:`waflib.Task.Task` that may be ready to be executed""" + + self.postponed = PriorityTasks() + """Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons""" + + self.incomplete = set() + """List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)""" + + self.ready = PriorityQueue(0) + """List of :py:class:`waflib.Task.Task` ready to be executed by consumers""" + + self.out = Queue(0) + """List of :py:class:`waflib.Task.Task` returned by the task consumers""" + + self.count = 0 + """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`""" + + self.processed = 0 + """Amount of tasks processed""" + + self.stop = False + """Error flag to stop the build""" + + self.error = [] + """Tasks that could not be executed""" + + self.biter = None + """Task iterator which must give groups of parallelizable tasks when calling ``next()``""" + + self.dirty = False + """ + Flag that indicates that the build cache must be saved when a task was executed + (calls :py:meth:`waflib.Build.BuildContext.store`)""" + + self.revdeps = Utils.defaultdict(set) + """ + The reverse dependency graph of dependencies obtained from Task.run_after + """ + + self.spawner = None + """ + Coordinating daemon thread that spawns thread consumers + """ + if self.numjobs > 1: + self.spawner = Spawner(self) + def get_next_task(self): + """ + Obtains the next Task instance to run + + :rtype: :py:class:`waflib.Task.Task` + """ if not self.outstanding: return None return self.outstanding.pop() - def postpone(self,tsk): + + def postpone(self, tsk): + """ + Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`. + The order is scrambled so as to consume as many tasks in parallel as possible. + + :param tsk: task instance + :type tsk: :py:class:`waflib.Task.Task` + """ self.postponed.append(tsk) + def refill_task_list(self): - while self.count>self.numjobs*GAP: + """ + Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`. + Ensures that all tasks in the current build group are complete before processing the next one. + """ + while self.count > self.numjobs * GAP: self.get_out() + while not self.outstanding: if self.count: self.get_out() @@ -114,19 +225,24 @@ break elif self.postponed: try: - cond=self.deadlock==self.processed + cond = self.deadlock == self.processed except AttributeError: pass else: if cond: - lst=[] + # The most common reason is conflicting build order declaration + # for example: "X run_after Y" and "Y run_after X" + # Another can be changing "run_after" dependencies while the build is running + # for example: updating "tsk.run_after" in the "runnable_status" method + lst = [] for tsk in self.postponed: - deps=[id(x)for x in tsk.run_after if not x.hasrun] - lst.append('%s\t-> %r'%(repr(tsk),deps)) + deps = [id(x) for x in tsk.run_after if not x.hasrun] + lst.append('%s\t-> %r' % (repr(tsk), deps)) if not deps: - lst.append('\n task %r dependencies are done, check its *runnable_status*?'%id(tsk)) - raise Errors.WafError('Deadlock detected: check the task build order%s'%''.join(lst)) - self.deadlock=self.processed + lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk)) + raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst)) + self.deadlock = self.processed + if self.postponed: self.outstanding.extend(self.postponed) self.postponed.clear() @@ -137,186 +253,338 @@ if not k.hasrun: break else: + # dependency added after the build started without updating revdeps self.incomplete.remove(x) self.outstanding.append(x) break else: - raise Errors.WafError('Broken revdeps detected on %r'%self.incomplete) + if self.stop or self.error: + break + raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete) else: - tasks=next(self.biter) - ready,waiting=self.prio_and_split(tasks) + tasks = next(self.biter) + ready, waiting = self.prio_and_split(tasks) self.outstanding.extend(ready) self.incomplete.update(waiting) - self.total=self.bld.total() + self.total = self.bld.total() break - def add_more_tasks(self,tsk): - if getattr(tsk,'more_tasks',None): - more=set(tsk.more_tasks) - groups_done=set() - def iteri(a,b): + + def add_more_tasks(self, tsk): + """ + If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained + in that list are added to the current build and will be processed before the next build group. + + The priorities for dependent tasks are not re-calculated globally + + :param tsk: task instance + :type tsk: :py:attr:`waflib.Task.Task` + """ + if getattr(tsk, 'more_tasks', None): + more = set(tsk.more_tasks) + groups_done = set() + def iteri(a, b): for x in a: yield x for x in b: yield x - for x in iteri(self.outstanding,self.incomplete): + + # Update the dependency tree + # this assumes that task.run_after values were updated + for x in iteri(self.outstanding, self.incomplete): for k in x.run_after: - if isinstance(k,Task.TaskGroup): + if isinstance(k, Task.TaskGroup): if k not in groups_done: groups_done.add(k) - for j in k.prev&more: + for j in k.prev & more: self.revdeps[j].add(k) elif k in more: self.revdeps[k].add(x) - ready,waiting=self.prio_and_split(tsk.more_tasks) + + ready, waiting = self.prio_and_split(tsk.more_tasks) self.outstanding.extend(ready) self.incomplete.update(waiting) - self.total+=len(tsk.more_tasks) - def mark_finished(self,tsk): + self.total += len(tsk.more_tasks) + + def mark_finished(self, tsk): def try_unfreeze(x): + # DAG ancestors are likely to be in the incomplete set + # This assumes that the run_after contents have not changed + # after the build starts, else a deadlock may occur if x in self.incomplete: + # TODO remove dependencies to free some memory? + # x.run_after.remove(tsk) for k in x.run_after: if not k.hasrun: break else: self.incomplete.remove(x) self.outstanding.append(x) + if tsk in self.revdeps: for x in self.revdeps[tsk]: - if isinstance(x,Task.TaskGroup): + if isinstance(x, Task.TaskGroup): x.prev.remove(tsk) if not x.prev: for k in x.next: + # TODO necessary optimization? k.run_after.remove(x) try_unfreeze(k) - x.next=[] + # TODO necessary optimization? + x.next = [] else: try_unfreeze(x) del self.revdeps[tsk] + + if hasattr(tsk, 'semaphore'): + sem = tsk.semaphore + try: + sem.release(tsk) + except KeyError: + # TODO + pass + else: + while sem.waiting and not sem.is_locked(): + # take a frozen task, make it ready to run + x = sem.waiting.pop() + self._add_task(x) + def get_out(self): - tsk=self.out.get() + """ + Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution. + Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`. + + :rtype: :py:attr:`waflib.Task.Task` + """ + tsk = self.out.get() if not self.stop: self.add_more_tasks(tsk) self.mark_finished(tsk) - self.count-=1 - self.dirty=True + + self.count -= 1 + self.dirty = True return tsk - def add_task(self,tsk): + + def add_task(self, tsk): + """ + Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them. + + :param tsk: task instance + :type tsk: :py:attr:`waflib.Task.Task` + """ + # TODO change in waf 2.1 self.ready.put(tsk) - def process_task(self,tsk): + + def _add_task(self, tsk): + if hasattr(tsk, 'semaphore'): + sem = tsk.semaphore + try: + sem.acquire(tsk) + except IndexError: + sem.waiting.add(tsk) + return + + self.count += 1 + self.processed += 1 + if self.numjobs == 1: + tsk.log_display(tsk.generator.bld) + try: + self.process_task(tsk) + finally: + self.out.put(tsk) + else: + self.add_task(tsk) + + def process_task(self, tsk): + """ + Processes a task and attempts to stop the build in case of errors + """ tsk.process() - if tsk.hasrun!=Task.SUCCESS: + if tsk.hasrun != Task.SUCCESS: self.error_handler(tsk) - def skip(self,tsk): - tsk.hasrun=Task.SKIPPED + + def skip(self, tsk): + """ + Mark a task as skipped/up-to-date + """ + tsk.hasrun = Task.SKIPPED self.mark_finished(tsk) - def cancel(self,tsk): - tsk.hasrun=Task.CANCELED + + def cancel(self, tsk): + """ + Mark a task as failed because of unsatisfiable dependencies + """ + tsk.hasrun = Task.CANCELED self.mark_finished(tsk) - def error_handler(self,tsk): + + def error_handler(self, tsk): + """ + Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, + unless the build is executed with:: + + $ waf build -k + + :param tsk: task instance + :type tsk: :py:attr:`waflib.Task.Task` + """ if not self.bld.keep: - self.stop=True + self.stop = True self.error.append(tsk) - def task_status(self,tsk): + + def task_status(self, tsk): + """ + Obtains the task status to decide whether to run it immediately or not. + + :return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER` + :rtype: integer + """ try: return tsk.runnable_status() except Exception: - self.processed+=1 - tsk.err_msg=traceback.format_exc() + self.processed += 1 + tsk.err_msg = traceback.format_exc() if not self.stop and self.bld.keep: self.skip(tsk) - if self.bld.keep==1: - if Logs.verbose>1 or not self.error: + if self.bld.keep == 1: + # if -k stop on the first exception, if -kk try to go as far as possible + if Logs.verbose > 1 or not self.error: self.error.append(tsk) - self.stop=True + self.stop = True else: - if Logs.verbose>1: + if Logs.verbose > 1: self.error.append(tsk) return Task.EXCEPTION - tsk.hasrun=Task.EXCEPTION + + tsk.hasrun = Task.EXCEPTION self.error_handler(tsk) + return Task.EXCEPTION + def start(self): - self.total=self.bld.total() + """ + Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to + :py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread + has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out` + and marks the build as failed by setting the ``stop`` flag. + If only one job is used, then executes the tasks one by one, without consumers. + """ + self.total = self.bld.total() + while not self.stop: + self.refill_task_list() - tsk=self.get_next_task() + + # consider the next task + tsk = self.get_next_task() if not tsk: if self.count: + # tasks may add new ones after they are run continue else: + # no tasks to run, no tasks running, time to exit break + if tsk.hasrun: - self.processed+=1 + # if the task is marked as "run", just skip it + self.processed += 1 continue - if self.stop: + + if self.stop: # stop immediately after a failure is detected break - st=self.task_status(tsk) - if st==Task.RUN_ME: - self.count+=1 - self.processed+=1 - if self.numjobs==1: - tsk.log_display(tsk.generator.bld) - try: - self.process_task(tsk) - finally: - self.out.put(tsk) - else: - self.add_task(tsk) - elif st==Task.ASK_LATER: + + st = self.task_status(tsk) + if st == Task.RUN_ME: + self._add_task(tsk) + elif st == Task.ASK_LATER: self.postpone(tsk) - elif st==Task.SKIP_ME: - self.processed+=1 + elif st == Task.SKIP_ME: + self.processed += 1 self.skip(tsk) self.add_more_tasks(tsk) - elif st==Task.CANCEL_ME: - if Logs.verbose>1: + elif st == Task.CANCEL_ME: + # A dependency problem has occurred, and the + # build is most likely run with `waf -k` + if Logs.verbose > 1: self.error.append(tsk) - self.processed+=1 + self.processed += 1 self.cancel(tsk) + + # self.count represents the tasks that have been made available to the consumer threads + # collect all the tasks after an error else the message may be incomplete while self.error and self.count: self.get_out() + self.ready.put(None) if not self.stop: assert not self.count assert not self.postponed assert not self.incomplete - def prio_and_split(self,tasks): + + def prio_and_split(self, tasks): + """ + Label input tasks with priority values, and return a pair containing + the tasks that are ready to run and the tasks that are necessarily + waiting for other tasks to complete. + + The priority system is really meant as an optional layer for optimization: + dependency cycles are found quickly, and builds should be more efficient. + A high priority number means that a task is processed first. + + This method can be overridden to disable the priority system:: + + def prio_and_split(self, tasks): + return tasks, [] + + :return: A pair of task lists + :rtype: tuple + """ + # to disable: + #return tasks, [] for x in tasks: - x.visited=0 - reverse=self.revdeps - groups_done=set() + x.visited = 0 + + reverse = self.revdeps + + groups_done = set() for x in tasks: for k in x.run_after: - if isinstance(k,Task.TaskGroup): + if isinstance(k, Task.TaskGroup): if k not in groups_done: groups_done.add(k) for j in k.prev: reverse[j].add(k) else: reverse[k].add(x) + + # the priority number is not the tree depth def visit(n): - if isinstance(n,Task.TaskGroup): - return sum(visit(k)for k in n.next) - if n.visited==0: - n.visited=1 + if isinstance(n, Task.TaskGroup): + return sum(visit(k) for k in n.next) + + if n.visited == 0: + n.visited = 1 + if n in reverse: - rev=reverse[n] - n.prio_order=n.tree_weight+len(rev)+sum(visit(k)for k in rev) + rev = reverse[n] + n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev) else: - n.prio_order=n.tree_weight - n.visited=2 - elif n.visited==1: + n.prio_order = n.tree_weight + + n.visited = 2 + elif n.visited == 1: raise Errors.WafError('Dependency cycle found!') return n.prio_order + for x in tasks: - if x.visited!=0: + if x.visited != 0: + # must visit all to detect cycles continue try: visit(x) except Errors.WafError: - self.debug_cycles(tasks,reverse) - ready=[] - waiting=[] + self.debug_cycles(tasks, reverse) + + ready = [] + waiting = [] for x in tasks: for k in x.run_after: if not k.hasrun: @@ -324,27 +592,31 @@ break else: ready.append(x) - return(ready,waiting) - def debug_cycles(self,tasks,reverse): - tmp={} + return (ready, waiting) + + def debug_cycles(self, tasks, reverse): + tmp = {} for x in tasks: - tmp[x]=0 - def visit(n,acc): - if isinstance(n,Task.TaskGroup): + tmp[x] = 0 + + def visit(n, acc): + if isinstance(n, Task.TaskGroup): for k in n.next: - visit(k,acc) + visit(k, acc) return - if tmp[n]==0: - tmp[n]=1 - for k in reverse.get(n,[]): - visit(k,[n]+acc) - tmp[n]=2 - elif tmp[n]==1: - lst=[] + if tmp[n] == 0: + tmp[n] = 1 + for k in reverse.get(n, []): + visit(k, [n] + acc) + tmp[n] = 2 + elif tmp[n] == 1: + lst = [] for tsk in acc: lst.append(repr(tsk)) if tsk is n: + # exclude prior nodes, we want the minimum cycle break - raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s'%''.join(lst)) + raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst)) for x in tasks: - visit(x,[]) + visit(x, []) + diff -Nru lilv-0.24.4~dfsg0/waflib/Scripting.py lilv-0.24.6/waflib/Scripting.py --- lilv-0.24.4~dfsg0/waflib/Scripting.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Scripting.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,117 +1,158 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) + +"Module called for configuring, compiling and installing targets" from __future__ import with_statement -import os,shlex,shutil,traceback,errno,sys,stat -from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node -build_dir_override=None -no_climb_commands=['configure'] -default_cmd="build" -def waf_entry_point(current_directory,version,wafdir): + +import os, shlex, shutil, traceback, errno, sys, stat +from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node + +build_dir_override = None + +no_climb_commands = ['configure'] + +default_cmd = "build" + +def waf_entry_point(current_directory, version, wafdir): + """ + This is the main entry point, all Waf execution starts here. + + :param current_directory: absolute path representing the current directory + :type current_directory: string + :param version: version number + :type version: string + :param wafdir: absolute path representing the directory of the waf library + :type wafdir: string + """ Logs.init_log() - if Context.WAFVERSION!=version: - Logs.error('Waf script %r and library %r do not match (directory %r)',version,Context.WAFVERSION,wafdir) + + if Context.WAFVERSION != version: + Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir) sys.exit(1) - Context.waf_dir=wafdir - Context.run_dir=Context.launch_dir=current_directory - start_dir=current_directory - no_climb=os.environ.get('NOCLIMB') - if len(sys.argv)>1: - potential_wscript=os.path.join(current_directory,sys.argv[1]) - if os.path.basename(potential_wscript)==Context.WSCRIPT_FILE and os.path.isfile(potential_wscript): - path=os.path.normpath(os.path.dirname(potential_wscript)) - start_dir=os.path.abspath(path) - no_climb=True + + # Store current directory before any chdir + Context.waf_dir = wafdir + Context.run_dir = Context.launch_dir = current_directory + start_dir = current_directory + no_climb = os.environ.get('NOCLIMB') + + if len(sys.argv) > 1: + # os.path.join handles absolute paths + # if sys.argv[1] is not an absolute path, then it is relative to the current working directory + potential_wscript = os.path.join(current_directory, sys.argv[1]) + if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript): + # need to explicitly normalize the path, as it may contain extra '/.' + path = os.path.normpath(os.path.dirname(potential_wscript)) + start_dir = os.path.abspath(path) + no_climb = True sys.argv.pop(1) - ctx=Context.create_context('options') - (options,commands,env)=ctx.parse_cmd_args(allow_unknown=True) + + ctx = Context.create_context('options') + (options, commands, env) = ctx.parse_cmd_args(allow_unknown=True) if options.top: - start_dir=Context.run_dir=Context.top_dir=options.top - no_climb=True + start_dir = Context.run_dir = Context.top_dir = options.top + no_climb = True if options.out: - Context.out_dir=options.out + Context.out_dir = options.out + + # if 'configure' is in the commands, do not search any further if not no_climb: for k in no_climb_commands: for y in commands: if y.startswith(k): - no_climb=True + no_climb = True break - cur=start_dir + + # try to find a lock file (if the project was configured) + # at the same time, store the first wscript file seen + cur = start_dir while cur: try: - lst=os.listdir(cur) + lst = os.listdir(cur) except OSError: - lst=[] - Logs.error('Directory %r is unreadable!',cur) + lst = [] + Logs.error('Directory %r is unreadable!', cur) if Options.lockfile in lst: - env=ConfigSet.ConfigSet() + env = ConfigSet.ConfigSet() try: - env.load(os.path.join(cur,Options.lockfile)) - ino=os.stat(cur)[stat.ST_INO] + env.load(os.path.join(cur, Options.lockfile)) + ino = os.stat(cur)[stat.ST_INO] except EnvironmentError: pass else: - for x in(env.run_dir,env.top_dir,env.out_dir): + # check if the folder was not moved + for x in (env.run_dir, env.top_dir, env.out_dir): if not x: continue if Utils.is_win32: - if cur==x: - load=True + if cur == x: + load = True break else: + # if the filesystem features symlinks, compare the inode numbers try: - ino2=os.stat(x)[stat.ST_INO] + ino2 = os.stat(x)[stat.ST_INO] except OSError: pass else: - if ino==ino2: - load=True + if ino == ino2: + load = True break else: - Logs.warn('invalid lock file in %s',cur) - load=False + Logs.warn('invalid lock file in %s', cur) + load = False + if load: - Context.run_dir=env.run_dir - Context.top_dir=env.top_dir - Context.out_dir=env.out_dir + Context.run_dir = env.run_dir + Context.top_dir = env.top_dir + Context.out_dir = env.out_dir break + if not Context.run_dir: if Context.WSCRIPT_FILE in lst: - Context.run_dir=cur - next=os.path.dirname(cur) - if next==cur: + Context.run_dir = cur + + next = os.path.dirname(cur) + if next == cur: break - cur=next + cur = next + if no_climb: break - if not Context.run_dir: + + wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)) + if not os.path.exists(wscript): if options.whelp: Logs.warn('These are the generic options (no wscript/project found)') ctx.parser.print_help() sys.exit(0) - Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)',Context.WSCRIPT_FILE) + Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE) sys.exit(1) + try: os.chdir(Context.run_dir) except OSError: - Logs.error('Waf: The folder %r is unreadable',Context.run_dir) + Logs.error('Waf: The folder %r is unreadable', Context.run_dir) sys.exit(1) + try: - set_main_module(os.path.normpath(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))) + set_main_module(wscript) except Errors.WafError as e: - Logs.pprint('RED',e.verbose_msg) + Logs.pprint('RED', e.verbose_msg) Logs.error(str(e)) sys.exit(1) except Exception as e: - Logs.error('Waf: The wscript in %r is unreadable',Context.run_dir) + Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir) traceback.print_exc(file=sys.stdout) sys.exit(2) + if options.profile: - import cProfile,pstats - cProfile.runctx('from waflib import Scripting; Scripting.run_commands()',{},{},'profi.txt') - p=pstats.Stats('profi.txt') - p.sort_stats('time').print_stats(75) + import cProfile, pstats + cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt') + p = pstats.Stats('profi.txt') + p.sort_stats('time').print_stats(75) # or 'cumulative' else: try: try: @@ -119,14 +160,14 @@ except: if options.pdb: import pdb - type,value,tb=sys.exc_info() + type, value, tb = sys.exc_info() traceback.print_exc() pdb.post_mortem(tb) else: raise except Errors.WafError as e: - if Logs.verbose>1: - Logs.pprint('RED',e.verbose_msg) + if Logs.verbose > 1: + Logs.pprint('RED', e.verbose_msg) Logs.error(e.msg) sys.exit(1) except SystemExit: @@ -135,269 +176,450 @@ traceback.print_exc(file=sys.stdout) sys.exit(2) except KeyboardInterrupt: - Logs.pprint('RED','Interrupted') + Logs.pprint('RED', 'Interrupted') sys.exit(68) + def set_main_module(file_path): - Context.g_module=Context.load_module(file_path) - Context.g_module.root_path=file_path + """ + Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and + bind default functions such as ``init``, ``dist``, ``distclean`` if not defined. + Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. + + :param file_path: absolute path representing the top-level wscript file + :type file_path: string + """ + Context.g_module = Context.load_module(file_path) + Context.g_module.root_path = file_path + + # note: to register the module globally, use the following: + # sys.modules['wscript_main'] = g_module + def set_def(obj): - name=obj.__name__ + name = obj.__name__ if not name in Context.g_module.__dict__: - setattr(Context.g_module,name,obj) - for k in(dist,distclean,distcheck): + setattr(Context.g_module, name, obj) + for k in (dist, distclean, distcheck): set_def(k) - if not'init'in Context.g_module.__dict__: - Context.g_module.init=Utils.nada - if not'shutdown'in Context.g_module.__dict__: - Context.g_module.shutdown=Utils.nada - if not'options'in Context.g_module.__dict__: - Context.g_module.options=Utils.nada + # add dummy init and shutdown functions if they're not defined + if not 'init' in Context.g_module.__dict__: + Context.g_module.init = Utils.nada + if not 'shutdown' in Context.g_module.__dict__: + Context.g_module.shutdown = Utils.nada + if not 'options' in Context.g_module.__dict__: + Context.g_module.options = Utils.nada + def parse_options(): - ctx=Context.create_context('options') + """ + Parses the command-line options and initialize the logging system. + Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. + """ + ctx = Context.create_context('options') ctx.execute() if not Options.commands: - Options.commands.append(default_cmd) + if isinstance(default_cmd, list): + Options.commands.extend(default_cmd) + else: + Options.commands.append(default_cmd) if Options.options.whelp: ctx.parser.print_help() sys.exit(0) + def run_command(cmd_name): - ctx=Context.create_context(cmd_name) - ctx.log_timer=Utils.Timer() - ctx.options=Options.options - ctx.cmd=cmd_name + """ + Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`. + + :param cmd_name: command to execute, like ``build`` + :type cmd_name: string + """ + ctx = Context.create_context(cmd_name) + ctx.log_timer = Utils.Timer() + ctx.options = Options.options # provided for convenience + ctx.cmd = cmd_name try: ctx.execute() finally: + # Issue 1374 ctx.finalize() return ctx + def run_commands(): + """ + Execute the Waf commands that were given on the command-line, and the other options + Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed + after :py:func:`waflib.Scripting.parse_options`. + """ parse_options() run_command('init') while Options.commands: - cmd_name=Options.commands.pop(0) - ctx=run_command(cmd_name) - Logs.info('%r finished successfully (%s)',cmd_name,ctx.log_timer) + cmd_name = Options.commands.pop(0) + ctx = run_command(cmd_name) + Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer) run_command('shutdown') + +########################################################################################### + def distclean_dir(dirname): - for(root,dirs,files)in os.walk(dirname): + """ + Distclean function called in the particular case when:: + + top == out + + :param dirname: absolute path of the folder to clean + :type dirname: string + """ + for (root, dirs, files) in os.walk(dirname): for f in files: - if f.endswith(('.o','.moc','.exe')): - fname=os.path.join(root,f) + if f.endswith(('.o', '.moc', '.exe')): + fname = os.path.join(root, f) try: os.remove(fname) except OSError: - Logs.warn('Could not remove %r',fname) - for x in(Context.DBFILE,'config.log'): + Logs.warn('Could not remove %r', fname) + + for x in (Context.DBFILE, 'config.log'): try: os.remove(x) except OSError: pass + try: - shutil.rmtree('c4che') + shutil.rmtree(Build.CACHE_DIR) except OSError: pass + def distclean(ctx): '''removes build folders and data''' - def remove_and_log(k,fun): + + def remove_and_log(k, fun): try: fun(k) except EnvironmentError as e: - if e.errno!=errno.ENOENT: - Logs.warn('Could not remove %r',k) + if e.errno != errno.ENOENT: + Logs.warn('Could not remove %r', k) + + # remove waf cache folders on the top-level if not Options.commands: for k in os.listdir('.'): - for x in'.waf-2 waf-2 .waf3-2 waf3-2'.split(): + for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split(): if k.startswith(x): - remove_and_log(k,shutil.rmtree) - cur='.' + remove_and_log(k, shutil.rmtree) + + # remove a build folder, if any + cur = '.' if ctx.options.no_lock_in_top: - cur=ctx.options.out + cur = ctx.options.out + try: - lst=os.listdir(cur) + lst = os.listdir(cur) except OSError: - Logs.warn('Could not read %r',cur) + Logs.warn('Could not read %r', cur) return + if Options.lockfile in lst: - f=os.path.join(cur,Options.lockfile) + f = os.path.join(cur, Options.lockfile) try: - env=ConfigSet.ConfigSet(f) + env = ConfigSet.ConfigSet(f) except EnvironmentError: - Logs.warn('Could not read %r',f) + Logs.warn('Could not read %r', f) return + if not env.out_dir or not env.top_dir: - Logs.warn('Invalid lock file %r',f) + Logs.warn('Invalid lock file %r', f) return - if env.out_dir==env.top_dir: + + if env.out_dir == env.top_dir: distclean_dir(env.out_dir) else: - remove_and_log(env.out_dir,shutil.rmtree) - for k in(env.out_dir,env.top_dir,env.run_dir): - p=os.path.join(k,Options.lockfile) - remove_and_log(p,os.remove) + remove_and_log(env.out_dir, shutil.rmtree) + + env_dirs = [env.out_dir] + if not ctx.options.no_lock_in_top: + env_dirs.append(env.top_dir) + if not ctx.options.no_lock_in_run: + env_dirs.append(env.run_dir) + for k in env_dirs: + p = os.path.join(k, Options.lockfile) + remove_and_log(p, os.remove) + class Dist(Context.Context): '''creates an archive containing the project source code''' - cmd='dist' - fun='dist' - algo='tar.bz2' - ext_algo={} + cmd = 'dist' + fun = 'dist' + algo = 'tar.bz2' + ext_algo = {} + def execute(self): + """ + See :py:func:`waflib.Context.Context.execute` + """ self.recurse([os.path.dirname(Context.g_module.root_path)]) self.archive() + def archive(self): + """ + Creates the source archive. + """ import tarfile - arch_name=self.get_arch_name() + + arch_name = self.get_arch_name() + try: self.base_path except AttributeError: - self.base_path=self.path - node=self.base_path.make_node(arch_name) + self.base_path = self.path + + node = self.base_path.make_node(arch_name) try: node.delete() except OSError: pass - files=self.get_files() + + files = self.get_files() + if self.algo.startswith('tar.'): - tar=tarfile.open(node.abspath(),'w:'+self.algo.replace('tar.','')) + tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', '')) + for x in files: - self.add_tar_file(x,tar) + self.add_tar_file(x, tar) tar.close() - elif self.algo=='zip': + elif self.algo == 'zip': import zipfile - zip=zipfile.ZipFile(node.abspath(),'w',compression=zipfile.ZIP_DEFLATED) + zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED) + for x in files: - archive_name=self.get_base_name()+'/'+x.path_from(self.base_path) - zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED) + archive_name = self.get_base_name() + '/' + x.path_from(self.base_path) + zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) zip.close() else: self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') + try: from hashlib import sha256 except ImportError: - digest='' + digest = '' else: - digest=' (sha256=%r)'%sha256(node.read(flags='rb')).hexdigest() - Logs.info('New archive created: %s%s',self.arch_name,digest) - def get_tar_path(self,node): + digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest() + + Logs.info('New archive created: %s%s', self.arch_name, digest) + + def get_tar_path(self, node): + """ + Return the path to use for a node in the tar archive, the purpose of this + is to let subclases resolve symbolic links or to change file names + + :return: absolute path + :rtype: string + """ return node.abspath() - def add_tar_file(self,x,tar): - p=self.get_tar_path(x) - tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path)) - tinfo.uid=0 - tinfo.gid=0 - tinfo.uname='root' - tinfo.gname='root' + + def add_tar_file(self, x, tar): + """ + Adds a file to the tar archive. Symlinks are not verified. + + :param x: file path + :param tar: tar file object + """ + p = self.get_tar_path(x) + tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path)) + tinfo.uid = 0 + tinfo.gid = 0 + tinfo.uname = 'root' + tinfo.gname = 'root' + if os.path.isfile(p): - with open(p,'rb')as f: - tar.addfile(tinfo,fileobj=f) + with open(p, 'rb') as f: + tar.addfile(tinfo, fileobj=f) else: tar.addfile(tinfo) + def get_tar_prefix(self): + """ + Returns the base path for files added into the archive tar file + + :rtype: string + """ try: return self.tar_prefix except AttributeError: return self.get_base_name() + def get_arch_name(self): + """ + Returns the archive file name. + Set the attribute *arch_name* to change the default value:: + + def dist(ctx): + ctx.arch_name = 'ctx.tar.bz2' + + :rtype: string + """ try: self.arch_name except AttributeError: - self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo) + self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo) return self.arch_name + def get_base_name(self): + """ + Returns the default name of the main directory in the archive, which is set to *appname-version*. + Set the attribute *base_name* to change the default value:: + + def dist(ctx): + ctx.base_name = 'files' + + :rtype: string + """ try: self.base_name except AttributeError: - appname=getattr(Context.g_module,Context.APPNAME,'noname') - version=getattr(Context.g_module,Context.VERSION,'1.0') - self.base_name=appname+'-'+version + appname = getattr(Context.g_module, Context.APPNAME, 'noname') + version = getattr(Context.g_module, Context.VERSION, '1.0') + self.base_name = appname + '-' + version return self.base_name + def get_excl(self): + """ + Returns the patterns to exclude for finding the files in the top-level directory. + Set the attribute *excl* to change the default value:: + + def dist(ctx): + ctx.excl = 'build **/*.o **/*.class' + + :rtype: string + """ try: return self.excl except AttributeError: - self.excl=Node.exclude_regs+' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' + self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' if Context.out_dir: - nd=self.root.find_node(Context.out_dir) + nd = self.root.find_node(Context.out_dir) if nd: - self.excl+=' '+nd.path_from(self.base_path) + self.excl += ' ' + nd.path_from(self.base_path) return self.excl + def get_files(self): + """ + Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. + Set *files* to prevent this behaviour:: + + def dist(ctx): + ctx.files = ctx.path.find_node('wscript') + + Files are also searched from the directory 'base_path', to change it, set:: + + def dist(ctx): + ctx.base_path = path + + :rtype: list of :py:class:`waflib.Node.Node` + """ try: - files=self.files + files = self.files except AttributeError: - files=self.base_path.ant_glob('**/*',excl=self.get_excl()) + files = self.base_path.ant_glob('**/*', excl=self.get_excl()) return files + def dist(ctx): '''makes a tarball for redistributing the sources''' pass + class DistCheck(Dist): - fun='distcheck' - cmd='distcheck' + """creates an archive with dist, then tries to build it""" + fun = 'distcheck' + cmd = 'distcheck' + def execute(self): + """ + See :py:func:`waflib.Context.Context.execute` + """ self.recurse([os.path.dirname(Context.g_module.root_path)]) self.archive() self.check() - def make_distcheck_cmd(self,tmpdir): - cfg=[] + + def make_distcheck_cmd(self, tmpdir): + cfg = [] if Options.options.distcheck_args: - cfg=shlex.split(Options.options.distcheck_args) + cfg = shlex.split(Options.options.distcheck_args) else: - cfg=[x for x in sys.argv if x.startswith('-')] - cmd=[sys.executable,sys.argv[0],'configure','build','install','uninstall','--destdir='+tmpdir]+cfg + cfg = [x for x in sys.argv if x.startswith('-')] + cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg return cmd + def check(self): - import tempfile,tarfile - with tarfile.open(self.get_arch_name())as t: + """ + Creates the archive, uncompresses it and tries to build the project + """ + import tempfile, tarfile + + with tarfile.open(self.get_arch_name()) as t: for x in t: t.extract(x) - instdir=tempfile.mkdtemp('.inst',self.get_base_name()) - cmd=self.make_distcheck_cmd(instdir) - ret=Utils.subprocess.Popen(cmd,cwd=self.get_base_name()).wait() + + instdir = tempfile.mkdtemp('.inst', self.get_base_name()) + cmd = self.make_distcheck_cmd(instdir) + ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait() if ret: - raise Errors.WafError('distcheck failed with code %r'%ret) + raise Errors.WafError('distcheck failed with code %r' % ret) + if os.path.exists(instdir): - raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir) + raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir) + shutil.rmtree(self.get_base_name()) + + def distcheck(ctx): '''checks if the project compiles (tarball from 'dist')''' pass + def autoconfigure(execute_method): + """ + Decorator that enables context commands to run *configure* as needed. + """ def execute(self): + """ + Wraps :py:func:`waflib.Context.Context.execute` on the context class + """ if not Configure.autoconfig: return execute_method(self) - env=ConfigSet.ConfigSet() - do_config=False + + env = ConfigSet.ConfigSet() + do_config = False try: - env.load(os.path.join(Context.top_dir,Options.lockfile)) + env.load(os.path.join(Context.top_dir, Options.lockfile)) except EnvironmentError: Logs.warn('Configuring the project') - do_config=True + do_config = True else: - if env.run_dir!=Context.run_dir: - do_config=True + if env.run_dir != Context.run_dir: + do_config = True else: - h=0 + h = 0 for f in env.files: try: - h=Utils.h_list((h,Utils.readf(f,'rb'))) + h = Utils.h_list((h, Utils.readf(f, 'rb'))) except EnvironmentError: - do_config=True + do_config = True break else: - do_config=h!=env.hash + do_config = h != env.hash + if do_config: - cmd=env.config_cmd or'configure' - if Configure.autoconfig=='clobber': - tmp=Options.options.__dict__ + cmd = env.config_cmd or 'configure' + if Configure.autoconfig == 'clobber': + tmp = Options.options.__dict__ + launch_dir_tmp = Context.launch_dir if env.options: - Options.options.__dict__=env.options + Options.options.__dict__ = env.options + Context.launch_dir = env.launch_dir try: run_command(cmd) finally: - Options.options.__dict__=tmp + Options.options.__dict__ = tmp + Context.launch_dir = launch_dir_tmp else: run_command(cmd) run_command(self.cmd) else: return execute_method(self) return execute -Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute) +Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute) + diff -Nru lilv-0.24.4~dfsg0/waflib/TaskGen.py lilv-0.24.6/waflib/TaskGen.py --- lilv-0.24.4~dfsg0/waflib/TaskGen.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/TaskGen.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,101 +1,214 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) + +""" +Task generators + +The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code) +The instances can have various parameters, but the creation of task nodes (Task.py) +is deferred. To achieve this, various methods are called from the method "apply" +""" + +import copy, re, os, functools +from waflib import Task, Utils, Logs, Errors, ConfigSet, Node + +feats = Utils.defaultdict(set) +"""remember the methods declaring features""" + +HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh'] -import copy,re,os,functools -from waflib import Task,Utils,Logs,Errors,ConfigSet,Node -feats=Utils.defaultdict(set) -HEADER_EXTS=['.h','.hpp','.hxx','.hh'] class task_gen(object): - mappings=Utils.ordered_iter_dict() - prec=Utils.defaultdict(set) - def __init__(self,*k,**kw): - self.source=[] - self.target='' - self.meths=[] - self.features=[] - self.tasks=[] - if not'bld'in kw: - self.env=ConfigSet.ConfigSet() - self.idx=0 - self.path=None + """ + Instances of this class create :py:class:`waflib.Task.Task` when + calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. + A few notes: + + * The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..) + * The 'features' are used to add methods to self.meths and then execute them + * The attribute 'path' is a node representing the location of the task generator + * The tasks created are added to the attribute *tasks* + * The attribute 'idx' is a counter of task generators in the same path + """ + + mappings = Utils.ordered_iter_dict() + """Mappings are global file extension mappings that are retrieved in the order of definition""" + + prec = Utils.defaultdict(set) + """Dict that holds the precedence execution rules for task generator methods""" + + def __init__(self, *k, **kw): + """ + Task generator objects predefine various attributes (source, target) for possible + processing by process_rule (make-like rules) or process_source (extensions, misc methods) + + Tasks are stored on the attribute 'tasks'. They are created by calling methods + listed in ``self.meths`` or referenced in the attribute ``features`` + A topological sort is performed to execute the methods in correct order. + + The extra key/value elements passed in ``kw`` are set as attributes + """ + self.source = [] + self.target = '' + + self.meths = [] + """ + List of method names to execute (internal) + """ + + self.features = [] + """ + List of feature names for bringing new methods in + """ + + self.tasks = [] + """ + Tasks created are added to this list + """ + + if not 'bld' in kw: + # task generators without a build context :-/ + self.env = ConfigSet.ConfigSet() + self.idx = 0 + self.path = None else: - self.bld=kw['bld'] - self.env=self.bld.env.derive() - self.path=self.bld.path - path=self.path.abspath() + self.bld = kw['bld'] + self.env = self.bld.env.derive() + self.path = kw.get('path', self.bld.path) # by default, emulate chdir when reading scripts + + # Provide a unique index per folder + # This is part of a measure to prevent output file name collisions + path = self.path.abspath() try: - self.idx=self.bld.idx[path]=self.bld.idx.get(path,0)+1 + self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1 except AttributeError: - self.bld.idx={} - self.idx=self.bld.idx[path]=1 + self.bld.idx = {} + self.idx = self.bld.idx[path] = 1 + + # Record the global task generator count try: - self.tg_idx_count=self.bld.tg_idx_count=self.bld.tg_idx_count+1 + self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1 except AttributeError: - self.tg_idx_count=self.bld.tg_idx_count=1 - for key,val in kw.items(): - setattr(self,key,val) + self.tg_idx_count = self.bld.tg_idx_count = 1 + + for key, val in kw.items(): + setattr(self, key, val) + def __str__(self): - return""%(self.name,self.path.abspath()) + """Debugging helper""" + return "" % (self.name, self.path.abspath()) + def __repr__(self): - lst=[] + """Debugging helper""" + lst = [] for x in self.__dict__: - if x not in('env','bld','compiled_tasks','tasks'): - lst.append("%s=%s"%(x,repr(getattr(self,x)))) - return"bld(%s) in %s"%(", ".join(lst),self.path.abspath()) + if x not in ('env', 'bld', 'compiled_tasks', 'tasks'): + lst.append("%s=%s" % (x, repr(getattr(self, x)))) + return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) + def get_cwd(self): + """ + Current working directory for the task generator, defaults to the build directory. + This is still used in a few places but it should disappear at some point as the classes + define their own working directory. + + :rtype: :py:class:`waflib.Node.Node` + """ return self.bld.bldnode + def get_name(self): + """ + If the attribute ``name`` is not set on the instance, + the name is computed from the target name:: + + def build(bld): + x = bld(name='foo') + x.get_name() # foo + y = bld(target='bar') + y.get_name() # bar + + :rtype: string + :return: name of this task generator + """ try: return self._name except AttributeError: - if isinstance(self.target,list): - lst=[str(x)for x in self.target] - name=self._name=','.join(lst) + if isinstance(self.target, list): + lst = [str(x) for x in self.target] + name = self._name = ','.join(lst) else: - name=self._name=str(self.target) + name = self._name = str(self.target) return name - def set_name(self,name): - self._name=name - name=property(get_name,set_name) - def to_list(self,val): - if isinstance(val,str): + def set_name(self, name): + self._name = name + + name = property(get_name, set_name) + + def to_list(self, val): + """ + Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list` + + :type val: string or list of string + :param val: input to return as a list + :rtype: list + """ + if isinstance(val, str): return val.split() else: return val + def post(self): - if getattr(self,'posted',None): + """ + Creates tasks for this task generators. The following operations are performed: + + #. The body of this method is called only once and sets the attribute ``posted`` + #. The attribute ``features`` is used to add more methods in ``self.meths`` + #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` + #. The methods are then executed in order + #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` + """ + if getattr(self, 'posted', None): return False - self.posted=True - keys=set(self.meths) + self.posted = True + + keys = set(self.meths) keys.update(feats['*']) - self.features=Utils.to_list(self.features) + + # add the methods listed in the features + self.features = Utils.to_list(self.features) for x in self.features: - st=feats[x] + st = feats[x] if st: keys.update(st) elif not x in Task.classes: - Logs.warn('feature %r does not exist - bind at least one method to it?',x) - prec={} - prec_tbl=self.prec + Logs.warn('feature %r does not exist - bind at least one method to it?', x) + + # copy the precedence table + prec = {} + prec_tbl = self.prec for x in prec_tbl: if x in keys: - prec[x]=prec_tbl[x] - tmp=[] + prec[x] = prec_tbl[x] + + # elements disconnected + tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) + tmp.sort(reverse=True) - out=[] + + # topological sort + out = [] while tmp: - e=tmp.pop() + e = tmp.pop() if e in keys: out.append(e) try: - nlst=prec[e] + nlst = prec[e] except KeyError: pass else: @@ -107,35 +220,62 @@ else: tmp.append(x) tmp.sort(reverse=True) + if prec: - buf=['Cycle detected in the method execution:'] - for k,v in prec.items(): - buf.append('- %s after %s'%(k,[x for x in v if x in prec])) + buf = ['Cycle detected in the method execution:'] + for k, v in prec.items(): + buf.append('- %s after %s' % (k, [x for x in v if x in prec])) raise Errors.WafError('\n'.join(buf)) - self.meths=out - Logs.debug('task_gen: posting %s %d',self,id(self)) + self.meths = out + + # then we run the methods in order + Logs.debug('task_gen: posting %s %d', self, id(self)) for x in out: try: - v=getattr(self,x) + v = getattr(self, x) except AttributeError: - raise Errors.WafError('%r is not a valid task generator method'%x) - Logs.debug('task_gen: -> %s (%d)',x,id(self)) + raise Errors.WafError('%r is not a valid task generator method' % x) + Logs.debug('task_gen: -> %s (%d)', x, id(self)) v() - Logs.debug('task_gen: posted %s',self.name) + + Logs.debug('task_gen: posted %s', self.name) return True - def get_hook(self,node): - name=node.name + + def get_hook(self, node): + """ + Returns the ``@extension`` method to call for a Node of a particular extension. + + :param node: Input file to process + :type node: :py:class:`waflib.Tools.Node.Node` + :return: A method able to process the input node by looking at the extension + :rtype: function + """ + name = node.name for k in self.mappings: try: if name.endswith(k): return self.mappings[k] except TypeError: + # regexps objects if k.match(name): return self.mappings[k] - keys=list(self.mappings.keys()) - raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)"%(node,keys)) - def create_task(self,name,src=None,tgt=None,**kw): - task=Task.classes[name](env=self.env.derive(),generator=self) + keys = list(self.mappings.keys()) + raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys)) + + def create_task(self, name, src=None, tgt=None, **kw): + """ + Creates task instances. + + :param name: task class name + :type name: string + :param src: input nodes + :type src: list of :py:class:`waflib.Tools.Node.Node` + :param tgt: output nodes + :type tgt: list of :py:class:`waflib.Tools.Node.Node` + :return: A task object + :rtype: :py:class:`waflib.Task.Task` + """ + task = Task.classes[name](env=self.env.derive(), generator=self) if src: task.set_inputs(src) if tgt: @@ -143,329 +283,635 @@ task.__dict__.update(kw) self.tasks.append(task) return task - def clone(self,env): - newobj=self.bld() + + def clone(self, env): + """ + Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the + it does not create the same output files as the original, or the same files may + be compiled several times. + + :param env: A configuration set + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :return: A copy + :rtype: :py:class:`waflib.TaskGen.task_gen` + """ + newobj = self.bld() for x in self.__dict__: - if x in('env','bld'): + if x in ('env', 'bld'): continue - elif x in('path','features'): - setattr(newobj,x,getattr(self,x)) + elif x in ('path', 'features'): + setattr(newobj, x, getattr(self, x)) else: - setattr(newobj,x,copy.copy(getattr(self,x))) - newobj.posted=False - if isinstance(env,str): - newobj.env=self.bld.all_envs[env].derive() + setattr(newobj, x, copy.copy(getattr(self, x))) + + newobj.posted = False + if isinstance(env, str): + newobj.env = self.bld.all_envs[env].derive() else: - newobj.env=env.derive() + newobj.env = env.derive() + return newobj -def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False): - ext_in=Utils.to_list(ext_in) - ext_out=Utils.to_list(ext_out) + +def declare_chain(name='', rule=None, reentrant=None, color='BLUE', + ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False): + """ + Creates a new mapping and a task class for processing files by extension. + See Tools/flex.py for an example. + + :param name: name for the task class + :type name: string + :param rule: function to execute or string to be compiled in a function + :type rule: string or function + :param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable) + :type reentrant: int + :param color: color for the task output + :type color: string + :param ext_in: execute the task only after the files of such extensions are created + :type ext_in: list of string + :param ext_out: execute the task only before files of such extensions are processed + :type ext_out: list of string + :param before: execute instances of this task before classes of the given names + :type before: list of string + :param after: execute instances of this task after classes of the given names + :type after: list of string + :param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order) + :type decider: function + :param scan: scanner function for the task + :type scan: function + :param install_path: installation path for the output nodes + :type install_path: string + """ + ext_in = Utils.to_list(ext_in) + ext_out = Utils.to_list(ext_out) if not name: - name=rule - cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell) - def x_file(self,node): + name = rule + cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell) + + def x_file(self, node): if ext_in: - _ext_in=ext_in[0] - tsk=self.create_task(name,node) - cnt=0 - ext=decider(self,node)if decider else cls.ext_out + _ext_in = ext_in[0] + + tsk = self.create_task(name, node) + cnt = 0 + + ext = decider(self, node) if decider else cls.ext_out for x in ext: - k=node.change_ext(x,ext_in=_ext_in) + k = node.change_ext(x, ext_in=_ext_in) tsk.outputs.append(k) - if reentrant!=None: - if cntfoo.h with ambiguous dependencies for xt in HEADER_EXTS: if b.name.endswith(xt): - tsk.ext_in=tsk.ext_in+['.h'] + tsk.ext_out = tsk.ext_out + ['.h'] break - inst_to=getattr(self,'install_path',None) + + inst_to = getattr(self, 'install_path', None) if inst_to: - self.install_task=self.add_install_files(install_to=inst_to,install_from=b,chmod=getattr(self,'chmod',Utils.O644)) - self.source=[] + self.install_task = self.add_install_files(install_to=inst_to, + install_from=b, chmod=getattr(self, 'chmod', Utils.O644)) + + self.source = [] + diff -Nru lilv-0.24.4~dfsg0/waflib/Task.py lilv-0.24.6/waflib/Task.py --- lilv-0.24.4~dfsg0/waflib/Task.py 2018-07-04 05:24:14.000000000 +0000 +++ lilv-0.24.6/waflib/Task.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,31 +1,63 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) -import os,re,sys,tempfile,traceback -from waflib import Utils,Logs,Errors -NOT_RUN=0 -MISSING=1 -CRASHED=2 -EXCEPTION=3 -CANCELED=4 -SKIPPED=8 -SUCCESS=9 -ASK_LATER=-1 -SKIP_ME=-2 -RUN_ME=-3 -CANCEL_ME=-4 -COMPILE_TEMPLATE_SHELL=''' +""" +Tasks represent atomic operations such as processes. +""" + +import os, re, sys, tempfile, traceback +from waflib import Utils, Logs, Errors + +# task states +NOT_RUN = 0 +"""The task was not executed yet""" + +MISSING = 1 +"""The task has been executed but the files have not been created""" + +CRASHED = 2 +"""The task execution returned a non-zero exit status""" + +EXCEPTION = 3 +"""An exception occurred in the task execution""" + +CANCELED = 4 +"""A dependency for the task is missing so it was cancelled""" + +SKIPPED = 8 +"""The task did not have to be executed""" + +SUCCESS = 9 +"""The task was successfully executed""" + +ASK_LATER = -1 +"""The task is not ready to be executed""" + +SKIP_ME = -2 +"""The task does not need to be executed""" + +RUN_ME = -3 +"""The task must be executed""" + +CANCEL_ME = -4 +"""The task cannot be executed because of a dependency problem""" + +COMPILE_TEMPLATE_SHELL = ''' def f(tsk): env = tsk.env gen = tsk.generator bld = gen.bld cwdx = tsk.get_cwd() p = env.get_flat + def to_list(xx): + if isinstance(xx, str): return [xx] + return xx tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None) ''' -COMPILE_TEMPLATE_NOSHELL=''' + +COMPILE_TEMPLATE_NOSHELL = ''' def f(tsk): env = tsk.env gen = tsk.generator @@ -45,9 +77,11 @@ tsk.last_cmd = lst return tsk.exec_command(lst, cwd=cwdx, env=env.env or None) ''' -COMPILE_TEMPLATE_SIG_VARS=''' + +COMPILE_TEMPLATE_SIG_VARS = ''' def f(tsk): - super(tsk.__class__, tsk).sig_vars() + sig = tsk.generator.bld.hash_env_vars(tsk.env, tsk.vars) + tsk.m.update(sig) env = tsk.env gen = tsk.generator bld = gen.bld @@ -57,458 +91,876 @@ %s tsk.m.update(repr(buf).encode()) ''' -classes={} + +classes = {} +""" +The metaclass :py:class:`waflib.Task.store_task_type` stores all class tasks +created by user scripts or Waf tools to this dict. It maps class names to class objects. +""" + class store_task_type(type): - def __init__(cls,name,bases,dict): - super(store_task_type,cls).__init__(name,bases,dict) - name=cls.__name__ - if name!='evil'and name!='Task': - if getattr(cls,'run_str',None): - (f,dvars)=compile_fun(cls.run_str,cls.shell) - cls.hcode=Utils.h_cmd(cls.run_str) - cls.orig_run_str=cls.run_str - cls.run_str=None - cls.run=f - cls.vars=list(set(cls.vars+dvars)) + """ + Metaclass: store the task classes into the dict pointed by the + class attribute 'register' which defaults to :py:const:`waflib.Task.classes`, + + The attribute 'run_str' is compiled into a method 'run' bound to the task class. + """ + def __init__(cls, name, bases, dict): + super(store_task_type, cls).__init__(name, bases, dict) + name = cls.__name__ + + if name != 'evil' and name != 'Task': + if getattr(cls, 'run_str', None): + # if a string is provided, convert it to a method + (f, dvars) = compile_fun(cls.run_str, cls.shell) + cls.hcode = Utils.h_cmd(cls.run_str) + cls.orig_run_str = cls.run_str + # change the name of run_str or it is impossible to subclass with a function + cls.run_str = None + cls.run = f + # process variables + cls.vars = list(set(cls.vars + dvars)) cls.vars.sort() if cls.vars: - fun=compile_sig_vars(cls.vars) + fun = compile_sig_vars(cls.vars) if fun: - cls.sig_vars=fun - elif getattr(cls,'run',None)and not'hcode'in cls.__dict__: - cls.hcode=Utils.h_cmd(cls.run) - getattr(cls,'register',classes)[name]=cls -evil=store_task_type('evil',(object,),{}) + cls.sig_vars = fun + elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__: + # getattr(cls, 'hcode') would look in the upper classes + cls.hcode = Utils.h_cmd(cls.run) + + # be creative + getattr(cls, 'register', classes)[name] = cls + +evil = store_task_type('evil', (object,), {}) +"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified" + class Task(evil): - vars=[] - always_run=False - shell=False - color='GREEN' - ext_in=[] - ext_out=[] - before=[] - after=[] - hcode=Utils.SIG_NIL - keep_last_cmd=False - weight=0 - tree_weight=0 - prio_order=0 - __slots__=('hasrun','generator','env','inputs','outputs','dep_nodes','run_after') - def __init__(self,*k,**kw): - self.hasrun=NOT_RUN + """ + Task objects represents actions to perform such as commands to execute by calling the `run` method. + + Detecting when to execute a task occurs in the method :py:meth:`waflib.Task.Task.runnable_status`. + + Detecting which tasks to execute is performed through a hash value returned by + :py:meth:`waflib.Task.Task.signature`. The task signature is persistent from build to build. + """ + vars = [] + """ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)""" + + always_run = False + """Specify whether task instances must always be executed or not (class attribute)""" + + shell = False + """Execute the command with the shell (class attribute)""" + + color = 'GREEN' + """Color for the console display, see :py:const:`waflib.Logs.colors_lst`""" + + ext_in = [] + """File extensions that objects of this task class may use""" + + ext_out = [] + """File extensions that objects of this task class may create""" + + before = [] + """The instances of this class are executed before the instances of classes whose names are in this list""" + + after = [] + """The instances of this class are executed after the instances of classes whose names are in this list""" + + hcode = Utils.SIG_NIL + """String representing an additional hash for the class representation""" + + keep_last_cmd = False + """Whether to keep the last command executed on the instance after execution. + This may be useful for certain extensions but it can a lot of memory. + """ + + weight = 0 + """Optional weight to tune the priority for task instances. + The higher, the earlier. The weight only applies to single task objects.""" + + tree_weight = 0 + """Optional weight to tune the priority of task instances and whole subtrees. + The higher, the earlier.""" + + prio_order = 0 + """Priority order set by the scheduler on instances during the build phase. + You most likely do not need to set it. + """ + + __slots__ = ('hasrun', 'generator', 'env', 'inputs', 'outputs', 'dep_nodes', 'run_after') + + def __init__(self, *k, **kw): + self.hasrun = NOT_RUN try: - self.generator=kw['generator'] + self.generator = kw['generator'] except KeyError: - self.generator=self - self.env=kw['env'] - self.inputs=[] - self.outputs=[] - self.dep_nodes=[] - self.run_after=set() - def __lt__(self,other): - return self.priority()>other.priority() - def __le__(self,other): - return self.priority()>=other.priority() - def __gt__(self,other): - return self.priority() other.priority() + def __le__(self, other): + return self.priority() >= other.priority() + def __gt__(self, other): + return self.priority() < other.priority() + def __ge__(self, other): + return self.priority() <= other.priority() + def get_cwd(self): - bld=self.generator.bld - ret=getattr(self,'cwd',None)or getattr(bld,'cwd',bld.bldnode) - if isinstance(ret,str): + """ + :return: current working directory + :rtype: :py:class:`waflib.Node.Node` + """ + bld = self.generator.bld + ret = getattr(self, 'cwd', None) or getattr(bld, 'cwd', bld.bldnode) + if isinstance(ret, str): if os.path.isabs(ret): - ret=bld.root.make_node(ret) + ret = bld.root.make_node(ret) else: - ret=self.generator.path.make_node(ret) + ret = self.generator.path.make_node(ret) return ret - def quote_flag(self,x): - old=x - if'\\'in x: - x=x.replace('\\','\\\\') - if'"'in x: - x=x.replace('"','\\"') - if old!=x or' 'in x or'\t'in x or"'"in x: - x='"%s"'%x + + def quote_flag(self, x): + """ + Surround a process argument by quotes so that a list of arguments can be written to a file + + :param x: flag + :type x: string + :return: quoted flag + :rtype: string + """ + old = x + if '\\' in x: + x = x.replace('\\', '\\\\') + if '"' in x: + x = x.replace('"', '\\"') + if old != x or ' ' in x or '\t' in x or "'" in x: + x = '"%s"' % x return x + def priority(self): - return(self.weight+self.prio_order,-getattr(self.generator,'tg_idx_count',0)) - def split_argfile(self,cmd): - return([cmd[0]],[self.quote_flag(x)for x in cmd[1:]]) - def exec_command(self,cmd,**kw): - if not'cwd'in kw: - kw['cwd']=self.get_cwd() - if hasattr(self,'timeout'): - kw['timeout']=self.timeout + """ + Priority of execution; the higher, the earlier + + :return: the priority value + :rtype: a tuple of numeric values + """ + return (self.weight + self.prio_order, - getattr(self.generator, 'tg_idx_count', 0)) + + def split_argfile(self, cmd): + """ + Splits a list of process commands into the executable part and its list of arguments + + :return: a tuple containing the executable first and then the rest of arguments + :rtype: tuple + """ + return ([cmd[0]], [self.quote_flag(x) for x in cmd[1:]]) + + def exec_command(self, cmd, **kw): + """ + Wrapper for :py:meth:`waflib.Context.Context.exec_command`. + This version set the current working directory (``build.variant_dir``), + applies PATH settings (if self.env.PATH is provided), and can run long + commands through a temporary ``@argfile``. + + :param cmd: process command to execute + :type cmd: list of string (best) or string (process will use a shell) + :return: the return code + :rtype: int + + Optional parameters: + + #. cwd: current working directory (Node or string) + #. stdout: set to None to prevent waf from capturing the process standard output + #. stderr: set to None to prevent waf from capturing the process standard error + #. timeout: timeout value (Python 3) + """ + if not 'cwd' in kw: + kw['cwd'] = self.get_cwd() + + if hasattr(self, 'timeout'): + kw['timeout'] = self.timeout + if self.env.PATH: - env=kw['env']=dict(kw.get('env')or self.env.env or os.environ) - env['PATH']=self.env.PATH if isinstance(self.env.PATH,str)else os.pathsep.join(self.env.PATH) - if hasattr(self,'stdout'): - kw['stdout']=self.stdout - if hasattr(self,'stderr'): - kw['stderr']=self.stderr - if not isinstance(cmd,str)and(len(repr(cmd))>=8192 if Utils.is_win32 else len(cmd)>200000): - cmd,args=self.split_argfile(cmd) - try: - (fd,tmp)=tempfile.mkstemp() - os.write(fd,'\r\n'.join(args).encode()) - os.close(fd) - if Logs.verbose: - Logs.debug('argfile: @%r -> %r',tmp,args) - return self.generator.bld.exec_command(cmd+['@'+tmp],**kw) - finally: + env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ) + env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH) + + if hasattr(self, 'stdout'): + kw['stdout'] = self.stdout + if hasattr(self, 'stderr'): + kw['stderr'] = self.stderr + + if not isinstance(cmd, str): + if Utils.is_win32: + # win32 compares the resulting length http://support.microsoft.com/kb/830473 + too_long = sum([len(arg) for arg in cmd]) + len(cmd) > 8192 + else: + # non-win32 counts the amount of arguments (200k) + too_long = len(cmd) > 200000 + + if too_long and getattr(self, 'allow_argsfile', True): + # Shunt arguments to a temporary file if the command is too long. + cmd, args = self.split_argfile(cmd) try: - os.remove(tmp) - except OSError: - pass - else: - return self.generator.bld.exec_command(cmd,**kw) + (fd, tmp) = tempfile.mkstemp() + os.write(fd, '\r\n'.join(args).encode()) + os.close(fd) + if Logs.verbose: + Logs.debug('argfile: @%r -> %r', tmp, args) + return self.generator.bld.exec_command(cmd + ['@' + tmp], **kw) + finally: + try: + os.remove(tmp) + except OSError: + # anti-virus and indexers can keep files open -_- + pass + return self.generator.bld.exec_command(cmd, **kw) + def process(self): + """ + Runs the task and handles errors + + :return: 0 or None if everything is fine + :rtype: integer + """ + # remove the task signature immediately before it is executed + # so that the task will be executed again in case of failure try: del self.generator.bld.task_sigs[self.uid()] except KeyError: pass + try: - ret=self.run() + ret = self.run() except Exception: - self.err_msg=traceback.format_exc() - self.hasrun=EXCEPTION + self.err_msg = traceback.format_exc() + self.hasrun = EXCEPTION else: if ret: - self.err_code=ret - self.hasrun=CRASHED + self.err_code = ret + self.hasrun = CRASHED else: try: self.post_run() except Errors.WafError: pass except Exception: - self.err_msg=traceback.format_exc() - self.hasrun=EXCEPTION + self.err_msg = traceback.format_exc() + self.hasrun = EXCEPTION else: - self.hasrun=SUCCESS - if self.hasrun!=SUCCESS and self.scan: + self.hasrun = SUCCESS + + if self.hasrun != SUCCESS and self.scan: + # rescan dependencies on next run try: del self.generator.bld.imp_sigs[self.uid()] except KeyError: pass - def log_display(self,bld): - if self.generator.bld.progress_bar==3: + + def log_display(self, bld): + "Writes the execution status on the context logger" + if self.generator.bld.progress_bar == 3: return - s=self.display() + + s = self.display() if s: if bld.logger: - logger=bld.logger + logger = bld.logger else: - logger=Logs - if self.generator.bld.progress_bar==1: - c1=Logs.colors.cursor_off - c2=Logs.colors.cursor_on - logger.info(s,extra={'stream':sys.stderr,'terminator':'','c1':c1,'c2':c2}) + logger = Logs + + if self.generator.bld.progress_bar == 1: + c1 = Logs.colors.cursor_off + c2 = Logs.colors.cursor_on + logger.info(s, extra={'stream': sys.stderr, 'terminator':'', 'c1': c1, 'c2' : c2}) else: - logger.info(s,extra={'terminator':'','c1':'','c2':''}) + logger.info(s, extra={'terminator':'', 'c1': '', 'c2' : ''}) + def display(self): - col1=Logs.colors(self.color) - col2=Logs.colors.NORMAL - master=self.generator.bld.producer + """ + Returns an execution status for the console, the progress bar, or the IDE output. + + :rtype: string + """ + col1 = Logs.colors(self.color) + col2 = Logs.colors.NORMAL + master = self.generator.bld.producer + def cur(): - return master.processed-master.ready.qsize() - if self.generator.bld.progress_bar==1: - return self.generator.bld.progress_line(cur(),master.total,col1,col2) - if self.generator.bld.progress_bar==2: - ela=str(self.generator.bld.timer) + # the current task position, computed as late as possible + return master.processed - master.ready.qsize() + + if self.generator.bld.progress_bar == 1: + return self.generator.bld.progress_line(cur(), master.total, col1, col2) + + if self.generator.bld.progress_bar == 2: + ela = str(self.generator.bld.timer) try: - ins=','.join([n.name for n in self.inputs]) + ins = ','.join([n.name for n in self.inputs]) except AttributeError: - ins='' + ins = '' try: - outs=','.join([n.name for n in self.outputs]) + outs = ','.join([n.name for n in self.outputs]) except AttributeError: - outs='' - return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela) - s=str(self) + outs = '' + return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (master.total, cur(), ins, outs, ela) + + s = str(self) if not s: return None - total=master.total - n=len(str(total)) - fs='[%%%dd/%%%dd] %%s%%s%%s%%s\n'%(n,n) - kw=self.keyword() + + total = master.total + n = len(str(total)) + fs = '[%%%dd/%%%dd] %%s%%s%%s%%s\n' % (n, n) + kw = self.keyword() if kw: - kw+=' ' - return fs%(cur(),total,kw,col1,s,col2) + kw += ' ' + return fs % (cur(), total, kw, col1, s, col2) + def hash_constraints(self): - return(tuple(self.before),tuple(self.after),tuple(self.ext_in),tuple(self.ext_out),self.__class__.__name__,self.hcode) + """ + Identifies a task type for all the constraints relevant for the scheduler: precedence, file production + + :return: a hash value + :rtype: string + """ + return (tuple(self.before), tuple(self.after), tuple(self.ext_in), tuple(self.ext_out), self.__class__.__name__, self.hcode) + def format_error(self): + """ + Returns an error message to display the build failure reasons + + :rtype: string + """ if Logs.verbose: - msg=': %r\n%r'%(self,getattr(self,'last_cmd','')) + msg = ': %r\n%r' % (self, getattr(self, 'last_cmd', '')) else: - msg=' (run with -v to display more information)' - name=getattr(self.generator,'name','') - if getattr(self,"err_msg",None): + msg = ' (run with -v to display more information)' + name = getattr(self.generator, 'name', '') + if getattr(self, "err_msg", None): return self.err_msg elif not self.hasrun: - return'task in %r was not executed for some reason: %r'%(name,self) - elif self.hasrun==CRASHED: + return 'task in %r was not executed for some reason: %r' % (name, self) + elif self.hasrun == CRASHED: try: - return' -> task in %r failed with exit status %r%s'%(name,self.err_code,msg) + return ' -> task in %r failed with exit status %r%s' % (name, self.err_code, msg) except AttributeError: - return' -> task in %r failed%s'%(name,msg) - elif self.hasrun==MISSING: - return' -> missing files in %r%s'%(name,msg) - elif self.hasrun==CANCELED: - return' -> %r canceled because of missing dependencies'%name - else: - return'invalid status for task in %r: %r'%(name,self.hasrun) - def colon(self,var1,var2): - tmp=self.env[var1] + return ' -> task in %r failed%s' % (name, msg) + elif self.hasrun == MISSING: + return ' -> missing files in %r%s' % (name, msg) + elif self.hasrun == CANCELED: + return ' -> %r canceled because of missing dependencies' % name + else: + return 'invalid status for task in %r: %r' % (name, self.hasrun) + + def colon(self, var1, var2): + """ + Enable scriptlet expressions of the form ${FOO_ST:FOO} + If the first variable (FOO_ST) is empty, then an empty list is returned + + The results will be slightly different if FOO_ST is a list, for example:: + + env.FOO = ['p1', 'p2'] + env.FOO_ST = '-I%s' + # ${FOO_ST:FOO} returns + ['-Ip1', '-Ip2'] + + env.FOO_ST = ['-a', '-b'] + # ${FOO_ST:FOO} returns + ['-a', '-b', 'p1', '-a', '-b', 'p2'] + """ + tmp = self.env[var1] if not tmp: - return[] - if isinstance(var2,str): - it=self.env[var2] - else: - it=var2 - if isinstance(tmp,str): - return[tmp%x for x in it] + return [] + + if isinstance(var2, str): + it = self.env[var2] + else: + it = var2 + if isinstance(tmp, str): + return [tmp % x for x in it] else: - lst=[] + lst = [] for y in it: lst.extend(tmp) lst.append(y) return lst + def __str__(self): - name=self.__class__.__name__ + "string to display to the user" + name = self.__class__.__name__ if self.outputs: - if name.endswith(('lib','program'))or not self.inputs: - node=self.outputs[0] + if name.endswith(('lib', 'program')) or not self.inputs: + node = self.outputs[0] return node.path_from(node.ctx.launch_node()) - if not(self.inputs or self.outputs): + if not (self.inputs or self.outputs): return self.__class__.__name__ - if len(self.inputs)==1: - node=self.inputs[0] + if len(self.inputs) == 1: + node = self.inputs[0] return node.path_from(node.ctx.launch_node()) - src_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.inputs]) - tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs]) + + src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs]) + tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs]) if self.outputs: - sep=' -> ' + sep = ' -> ' else: - sep='' - return'%s: %s%s%s'%(self.__class__.__name__,src_str,sep,tgt_str) + sep = '' + return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str) + def keyword(self): - name=self.__class__.__name__ - if name.endswith(('lib','program')): - return'Linking' - if len(self.inputs)==1 and len(self.outputs)==1: - return'Compiling' + "Display keyword used to prettify the console outputs" + name = self.__class__.__name__ + if name.endswith(('lib', 'program')): + return 'Linking' + if len(self.inputs) == 1 and len(self.outputs) == 1: + return 'Compiling' if not self.inputs: if self.outputs: - return'Creating' + return 'Creating' else: - return'Running' - return'Processing' + return 'Running' + return 'Processing' + def __repr__(self): + "for debugging purposes" try: - ins=",".join([x.name for x in self.inputs]) - outs=",".join([x.name for x in self.outputs]) + ins = ",".join([x.name for x in self.inputs]) + outs = ",".join([x.name for x in self.outputs]) except AttributeError: - ins=",".join([str(x)for x in self.inputs]) - outs=",".join([str(x)for x in self.outputs]) - return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}']) + ins = ",".join([str(x) for x in self.inputs]) + outs = ",".join([str(x) for x in self.outputs]) + return "".join(['\n\t{task %r: ' % id(self), self.__class__.__name__, " ", ins, " -> ", outs, '}']) + def uid(self): + """ + Returns an identifier used to determine if tasks are up-to-date. Since the + identifier will be stored between executions, it must be: + + - unique for a task: no two tasks return the same value (for a given build context) + - the same for a given task instance + + By default, the node paths, the class name, and the function are used + as inputs to compute a hash. + + The pointer to the object (python built-in 'id') will change between build executions, + and must be avoided in such hashes. + + :return: hash value + :rtype: string + """ try: return self.uid_ except AttributeError: - m=Utils.md5(self.__class__.__name__) - up=m.update - for x in self.inputs+self.outputs: + m = Utils.md5(self.__class__.__name__) + up = m.update + for x in self.inputs + self.outputs: up(x.abspath()) - self.uid_=m.digest() + self.uid_ = m.digest() return self.uid_ - def set_inputs(self,inp): - if isinstance(inp,list): - self.inputs+=inp + + def set_inputs(self, inp): + """ + Appends the nodes to the *inputs* list + + :param inp: input nodes + :type inp: node or list of nodes + """ + if isinstance(inp, list): + self.inputs += inp else: self.inputs.append(inp) - def set_outputs(self,out): - if isinstance(out,list): - self.outputs+=out + + def set_outputs(self, out): + """ + Appends the nodes to the *outputs* list + + :param out: output nodes + :type out: node or list of nodes + """ + if isinstance(out, list): + self.outputs += out else: self.outputs.append(out) - def set_run_after(self,task): - assert isinstance(task,Task) + + def set_run_after(self, task): + """ + Run this task only after the given *task*. + + Calling this method from :py:meth:`waflib.Task.Task.runnable_status` may cause + build deadlocks; see :py:meth:`waflib.Tools.fc.fc.runnable_status` for details. + + :param task: task + :type task: :py:class:`waflib.Task.Task` + """ + assert isinstance(task, Task) self.run_after.add(task) + def signature(self): + """ + Task signatures are stored between build executions, they are use to track the changes + made to the input nodes (not to the outputs!). The signature hashes data from various sources: + + * explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps` + * implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps` + * hashed data: variables/values read from task.vars/task.env :py:meth:`waflib.Task.Task.sig_vars` + + If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``:: + + from waflib import Task + class cls(Task.Task): + def signature(self): + sig = super(Task.Task, self).signature() + delattr(self, 'cache_sig') + return super(Task.Task, self).signature() + + :return: the signature value + :rtype: string or bytes + """ try: return self.cache_sig except AttributeError: pass - self.m=Utils.md5(self.hcode) + + self.m = Utils.md5(self.hcode) + + # explicit deps self.sig_explicit_deps() + + # env vars self.sig_vars() + + # implicit deps / scanner results if self.scan: try: self.sig_implicit_deps() except Errors.TaskRescan: return self.signature() - ret=self.cache_sig=self.m.digest() + + ret = self.cache_sig = self.m.digest() return ret + def runnable_status(self): - bld=self.generator.bld - if bld.is_install<0: + """ + Returns the Task status + + :return: a task state in :py:const:`waflib.Task.RUN_ME`, + :py:const:`waflib.Task.SKIP_ME`, :py:const:`waflib.Task.CANCEL_ME` or :py:const:`waflib.Task.ASK_LATER`. + :rtype: int + """ + bld = self.generator.bld + if bld.is_install < 0: return SKIP_ME + for t in self.run_after: if not t.hasrun: return ASK_LATER - elif t.hasrun1.66s) thanks to caching (28s->1.86s) + """ + bld = self.generator.bld try: - cache=bld.dct_implicit_nodes + cache = bld.dct_implicit_nodes except AttributeError: - bld.dct_implicit_nodes=cache={} + bld.dct_implicit_nodes = cache = {} + + # one cache per build group try: - dct=cache[bld.current_group] + dct = cache[bld.current_group] except KeyError: - dct=cache[bld.current_group]={} + dct = cache[bld.current_group] = {} for tsk in bld.cur_tasks: for x in tsk.outputs: - dct[x]=tsk - modified=False - for x in bld.node_deps.get(self.uid(),[]): + dct[x] = tsk + + modified = False + for x in bld.node_deps.get(self.uid(), []): if x in dct: self.run_after.add(dct[x]) - modified=True + modified = True + if modified: for tsk in self.run_after: if not tsk.hasrun: + #print "task is not ready..." raise Errors.TaskNotReady('not ready') -if sys.hexversion>0x3000000: +if sys.hexversion > 0x3000000: def uid(self): try: return self.uid_ except AttributeError: - m=Utils.md5(self.__class__.__name__.encode('latin-1','xmlcharrefreplace')) - up=m.update - for x in self.inputs+self.outputs: - up(x.abspath().encode('latin-1','xmlcharrefreplace')) - self.uid_=m.digest() + m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace')) + up = m.update + for x in self.inputs + self.outputs: + up(x.abspath().encode('latin-1', 'xmlcharrefreplace')) + self.uid_ = m.digest() return self.uid_ - uid.__doc__=Task.uid.__doc__ - Task.uid=uid -def is_before(t1,t2): - to_list=Utils.to_list + uid.__doc__ = Task.uid.__doc__ + Task.uid = uid + +def is_before(t1, t2): + """ + Returns a non-zero value if task t1 is to be executed before task t2:: + + t1.ext_out = '.h' + t2.ext_in = '.h' + t2.after = ['t1'] + t1.before = ['t2'] + waflib.Task.is_before(t1, t2) # True + + :param t1: Task object + :type t1: :py:class:`waflib.Task.Task` + :param t2: Task object + :type t2: :py:class:`waflib.Task.Task` + """ + to_list = Utils.to_list for k in to_list(t2.ext_in): if k in to_list(t1.ext_out): return 1 + if t1.__class__.__name__ in to_list(t2.after): return 1 + if t2.__class__.__name__ in to_list(t1.before): return 1 + return 0 + def set_file_constraints(tasks): - ins=Utils.defaultdict(set) - outs=Utils.defaultdict(set) + """ + Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs + + :param tasks: tasks + :type tasks: list of :py:class:`waflib.Task.Task` + """ + ins = Utils.defaultdict(set) + outs = Utils.defaultdict(set) for x in tasks: for a in x.inputs: ins[a].add(x) @@ -516,256 +968,439 @@ ins[a].add(x) for a in x.outputs: outs[a].add(x) - links=set(ins.keys()).intersection(outs.keys()) + + links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k]) + + class TaskGroup(object): - def __init__(self,prev,next): - self.prev=prev - self.next=next - self.done=False + """ + Wrap nxm task order constraints into a single object + to prevent the creation of large list/set objects + + This is an optimization + """ + def __init__(self, prev, next): + self.prev = prev + self.next = next + self.done = False + def get_hasrun(self): for k in self.prev: if not k.hasrun: return NOT_RUN return SUCCESS - hasrun=property(get_hasrun,None) + + hasrun = property(get_hasrun, None) + def set_precedence_constraints(tasks): - cstr_groups=Utils.defaultdict(list) + """ + Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes + + :param tasks: tasks + :type tasks: list of :py:class:`waflib.Task.Task` + """ + cstr_groups = Utils.defaultdict(list) for x in tasks: - h=x.hash_constraints() + h = x.hash_constraints() cstr_groups[h].append(x) - keys=list(cstr_groups.keys()) - maxi=len(keys) + + keys = list(cstr_groups.keys()) + maxi = len(keys) + + # this list should be short for i in range(maxi): - t1=cstr_groups[keys[i]][0] - for j in range(i+1,maxi): - t2=cstr_groups[keys[j]][0] - if is_before(t1,t2): - a=i - b=j - elif is_before(t2,t1): - a=j - b=i + t1 = cstr_groups[keys[i]][0] + for j in range(i + 1, maxi): + t2 = cstr_groups[keys[j]][0] + + # add the constraints based on the comparisons + if is_before(t1, t2): + a = i + b = j + elif is_before(t2, t1): + a = j + b = i else: continue - a=cstr_groups[keys[a]] - b=cstr_groups[keys[b]] - if len(a)<2 or len(b)<2: + + a = cstr_groups[keys[a]] + b = cstr_groups[keys[b]] + + if len(a) < 2 or len(b) < 2: for x in b: x.run_after.update(a) else: - group=TaskGroup(set(a),set(b)) + group = TaskGroup(set(a), set(b)) for x in b: x.run_after.add(group) + def funex(c): - dc={} - exec(c,dc) + """ + Compiles a scriptlet expression into a Python function + + :param c: function to compile + :type c: string + :return: the function 'f' declared in the input string + :rtype: function + """ + dc = {} + exec(c, dc) return dc['f'] -re_cond=re.compile('(?P\w+)|(?P\|)|(?P&)') -re_novar=re.compile(r'^(SRC|TGT)\W+.*?$') -reg_act=re.compile(r'(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})',re.M) + +re_cond = re.compile(r'(?P\w+)|(?P\|)|(?P&)') +re_novar = re.compile(r'^(SRC|TGT)\W+.*?$') +reg_act = re.compile(r'(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})', re.M) def compile_fun_shell(line): - extr=[] + """ + Creates a compiled function to execute a process through a sub-shell + """ + extr = [] def repl(match): - g=match.group + g = match.group if g('dollar'): - return"$" + return "$" elif g('backslash'): - return'\\\\' + return '\\\\' elif g('subst'): - extr.append((g('var'),g('code'))) - return"%s" + extr.append((g('var'), g('code'))) + return "%s" return None - line=reg_act.sub(repl,line)or line - dvars=[] + line = reg_act.sub(repl, line) or line + dvars = [] def add_dvar(x): if x not in dvars: dvars.append(x) + def replc(m): + # performs substitutions and populates dvars if m.group('and'): - return' and ' + return ' and ' elif m.group('or'): - return' or ' + return ' or ' else: - x=m.group('var') + x = m.group('var') add_dvar(x) - return'env[%r]'%x - parm=[] - app=parm.append - for(var,meth)in extr: - if var=='SRC': + return 'env[%r]' % x + + parm = [] + app = parm.append + for (var, meth) in extr: + if var == 'SRC': if meth: - app('tsk.inputs%s'%meth) + app('tsk.inputs%s' % meth) else: app('" ".join([a.path_from(cwdx) for a in tsk.inputs])') - elif var=='TGT': + elif var == 'TGT': if meth: - app('tsk.outputs%s'%meth) + app('tsk.outputs%s' % meth) else: app('" ".join([a.path_from(cwdx) for a in tsk.outputs])') elif meth: if meth.startswith(':'): add_dvar(var) - m=meth[1:] - if m=='SRC': - m='[a.path_from(cwdx) for a in tsk.inputs]' - elif m=='TGT': - m='[a.path_from(cwdx) for a in tsk.outputs]' + m = meth[1:] + if m == 'SRC': + m = '[a.path_from(cwdx) for a in tsk.inputs]' + elif m == 'TGT': + m = '[a.path_from(cwdx) for a in tsk.outputs]' elif re_novar.match(m): - m='[tsk.inputs%s]'%m[3:] + m = '[tsk.inputs%s]' % m[3:] elif re_novar.match(m): - m='[tsk.outputs%s]'%m[3:] + m = '[tsk.outputs%s]' % m[3:] else: add_dvar(m) - if m[:3]not in('tsk','gen','bld'): - m='%r'%m - app('" ".join(tsk.colon(%r, %s))'%(var,m)) + if m[:3] not in ('tsk', 'gen', 'bld'): + m = '%r' % m + app('" ".join(tsk.colon(%r, %s))' % (var, m)) elif meth.startswith('?'): - expr=re_cond.sub(replc,meth[1:]) - app('p(%r) if (%s) else ""'%(var,expr)) + # In A?B|C output env.A if one of env.B or env.C is non-empty + expr = re_cond.sub(replc, meth[1:]) + app('p(%r) if (%s) else ""' % (var, expr)) else: - call='%s%s'%(var,meth) + call = '%s%s' % (var, meth) add_dvar(call) app(call) else: add_dvar(var) - app("p('%s')"%var) + app("p('%s')" % var) if parm: - parm="%% (%s) "%(',\n\t\t'.join(parm)) + parm = "%% (%s) " % (',\n\t\t'.join(parm)) else: - parm='' - c=COMPILE_TEMPLATE_SHELL%(line,parm) - Logs.debug('action: %s',c.strip().splitlines()) - return(funex(c),dvars) -reg_act_noshell=re.compile(r"(?P\s+)|(?P\$\{(?P\w+)(?P.*?)\})|(?P([^$ \t\n\r\f\v]|\$\$)+)",re.M) + parm = '' + + c = COMPILE_TEMPLATE_SHELL % (line, parm) + Logs.debug('action: %s', c.strip().splitlines()) + return (funex(c), dvars) + +reg_act_noshell = re.compile(r"(?P\s+)|(?P\$\{(?P\w+)(?P.*?)\})|(?P([^$ \t\n\r\f\v]|\$\$)+)", re.M) def compile_fun_noshell(line): - buf=[] - dvars=[] - merge=False - app=buf.append + """ + Creates a compiled function to execute a process without a sub-shell + """ + buf = [] + dvars = [] + merge = False + app = buf.append + def add_dvar(x): if x not in dvars: dvars.append(x) + def replc(m): + # performs substitutions and populates dvars if m.group('and'): - return' and ' + return ' and ' elif m.group('or'): - return' or ' + return ' or ' else: - x=m.group('var') + x = m.group('var') add_dvar(x) - return'env[%r]'%x + return 'env[%r]' % x + for m in reg_act_noshell.finditer(line): if m.group('space'): - merge=False + merge = False continue elif m.group('text'): - app('[%r]'%m.group('text').replace('$$','$')) + app('[%r]' % m.group('text').replace('$$', '$')) elif m.group('subst'): - var=m.group('var') - code=m.group('code') - if var=='SRC': + var = m.group('var') + code = m.group('code') + if var == 'SRC': if code: - app('[tsk.inputs%s]'%code) + app('[tsk.inputs%s]' % code) else: app('[a.path_from(cwdx) for a in tsk.inputs]') - elif var=='TGT': + elif var == 'TGT': if code: - app('[tsk.outputs%s]'%code) + app('[tsk.outputs%s]' % code) else: app('[a.path_from(cwdx) for a in tsk.outputs]') elif code: if code.startswith(':'): + # a composed variable ${FOO:OUT} add_dvar(var) - m=code[1:] - if m=='SRC': - m='[a.path_from(cwdx) for a in tsk.inputs]' - elif m=='TGT': - m='[a.path_from(cwdx) for a in tsk.outputs]' + m = code[1:] + if m == 'SRC': + m = '[a.path_from(cwdx) for a in tsk.inputs]' + elif m == 'TGT': + m = '[a.path_from(cwdx) for a in tsk.outputs]' elif re_novar.match(m): - m='[tsk.inputs%s]'%m[3:] + m = '[tsk.inputs%s]' % m[3:] elif re_novar.match(m): - m='[tsk.outputs%s]'%m[3:] + m = '[tsk.outputs%s]' % m[3:] else: add_dvar(m) - if m[:3]not in('tsk','gen','bld'): - m='%r'%m - app('tsk.colon(%r, %s)'%(var,m)) + if m[:3] not in ('tsk', 'gen', 'bld'): + m = '%r' % m + app('tsk.colon(%r, %s)' % (var, m)) elif code.startswith('?'): - expr=re_cond.sub(replc,code[1:]) - app('to_list(env[%r] if (%s) else [])'%(var,expr)) + # In A?B|C output env.A if one of env.B or env.C is non-empty + expr = re_cond.sub(replc, code[1:]) + app('to_list(env[%r] if (%s) else [])' % (var, expr)) else: - call='%s%s'%(var,code) + # plain code such as ${tsk.inputs[0].abspath()} + call = '%s%s' % (var, code) add_dvar(call) - app('gen.to_list(%s)'%call) + app('to_list(%s)' % call) else: - app('to_list(env[%r])'%var) + # a plain variable such as # a plain variable like ${AR} + app('to_list(env[%r])' % var) add_dvar(var) if merge: - tmp='merge(%s, %s)'%(buf[-2],buf[-1]) + tmp = 'merge(%s, %s)' % (buf[-2], buf[-1]) del buf[-1] - buf[-1]=tmp - merge=True - buf=['lst.extend(%s)'%x for x in buf] - fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf) - Logs.debug('action: %s',fun.strip().splitlines()) - return(funex(fun),dvars) -def compile_fun(line,shell=False): - if isinstance(line,str): - if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0: - shell=True + buf[-1] = tmp + merge = True # next turn + + buf = ['lst.extend(%s)' % x for x in buf] + fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf) + Logs.debug('action: %s', fun.strip().splitlines()) + return (funex(fun), dvars) + +def compile_fun(line, shell=False): + """ + Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing: + + * The function created (compiled) for use as :py:meth:`waflib.Task.Task.run` + * The list of variables that must cause rebuilds when *env* data is modified + + for example:: + + from waflib.Task import compile_fun + compile_fun('cxx', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}') + + def build(bld): + bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"') + + The env variables (CXX, ..) on the task must not hold dicts so as to preserve a consistent order. + The reserved keywords ``TGT`` and ``SRC`` represent the task input and output nodes + + """ + if isinstance(line, str): + if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0: + shell = True else: - dvars_lst=[] - funs_lst=[] + dvars_lst = [] + funs_lst = [] for x in line: - if isinstance(x,str): - fun,dvars=compile_fun(x,shell) - dvars_lst+=dvars + if isinstance(x, str): + fun, dvars = compile_fun(x, shell) + dvars_lst += dvars funs_lst.append(fun) else: + # assume a function to let through funs_lst.append(x) def composed_fun(task): for x in funs_lst: - ret=x(task) + ret = x(task) if ret: return ret return None - return composed_fun,dvars_lst + return composed_fun, dvars_lst if shell: return compile_fun_shell(line) else: return compile_fun_noshell(line) + def compile_sig_vars(vars): - buf=[] + """ + This method produces a sig_vars method suitable for subclasses that provide + scriptlet code in their run_str code. + If no such method can be created, this method returns None. + + The purpose of the sig_vars method returned is to ensures + that rebuilds occur whenever the contents of the expression changes. + This is the case B below:: + + import time + # case A: regular variables + tg = bld(rule='echo ${FOO}') + tg.env.FOO = '%s' % time.time() + # case B + bld(rule='echo ${gen.foo}', foo='%s' % time.time()) + + :param vars: env variables such as CXXFLAGS or gen.foo + :type vars: list of string + :return: A sig_vars method relevant for dependencies if adequate, else None + :rtype: A function, or None in most cases + """ + buf = [] for x in sorted(vars): - if x[:3]in('tsk','gen','bld'): - buf.append('buf.append(%s)'%x) + if x[:3] in ('tsk', 'gen', 'bld'): + buf.append('buf.append(%s)' % x) if buf: - return funex(COMPILE_TEMPLATE_SIG_VARS%'\n\t'.join(buf)) + return funex(COMPILE_TEMPLATE_SIG_VARS % '\n\t'.join(buf)) return None -def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None): - params={'vars':vars or[],'color':color,'name':name,'shell':shell,'scan':scan,} - if isinstance(func,str)or isinstance(func,tuple): - params['run_str']=func + +def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None): + """ + Returns a new task subclass with the function ``run`` compiled from the line given. + + :param func: method run + :type func: string or function + :param vars: list of variables to hash + :type vars: list of string + :param color: color to use + :type color: string + :param shell: when *func* is a string, enable/disable the use of the shell + :type shell: bool + :param scan: method scan + :type scan: function + :rtype: :py:class:`waflib.Task.Task` + """ + + params = { + 'vars': vars or [], # function arguments are static, and this one may be modified by the class + 'color': color, + 'name': name, + 'shell': shell, + 'scan': scan, + } + + if isinstance(func, str) or isinstance(func, tuple): + params['run_str'] = func else: - params['run']=func - cls=type(Task)(name,(Task,),params) - classes[name]=cls + params['run'] = func + + cls = type(Task)(name, (Task,), params) + classes[name] = cls + if ext_in: - cls.ext_in=Utils.to_list(ext_in) + cls.ext_in = Utils.to_list(ext_in) if ext_out: - cls.ext_out=Utils.to_list(ext_out) + cls.ext_out = Utils.to_list(ext_out) if before: - cls.before=Utils.to_list(before) + cls.before = Utils.to_list(before) if after: - cls.after=Utils.to_list(after) + cls.after = Utils.to_list(after) + return cls + def deep_inputs(cls): + """ + Task class decorator to enable rebuilds on input files task signatures + """ def sig_explicit_deps(self): Task.sig_explicit_deps(self) Task.sig_deep_inputs(self) - cls.sig_explicit_deps=sig_explicit_deps + cls.sig_explicit_deps = sig_explicit_deps return cls -TaskBase=Task + +TaskBase = Task +"Provided for compatibility reasons, TaskBase should not be used" + +class TaskSemaphore(object): + """ + Task semaphores provide a simple and efficient way of throttling the amount of + a particular task to run concurrently. The throttling value is capped + by the amount of maximum jobs, so for example, a `TaskSemaphore(10)` + has no effect in a `-j2` build. + + Task semaphores are typically specified on the task class level:: + + class compile(waflib.Task.Task): + semaphore = waflib.Task.TaskSemaphore(2) + run_str = 'touch ${TGT}' + + Task semaphores are meant to be used by the build scheduler in the main + thread, so there are no guarantees of thread safety. + """ + def __init__(self, num): + """ + :param num: maximum value of concurrent tasks + :type num: int + """ + self.num = num + self.locking = set() + self.waiting = set() + + def is_locked(self): + """Returns True if this semaphore cannot be acquired by more tasks""" + return len(self.locking) >= self.num + + def acquire(self, tsk): + """ + Mark the semaphore as used by the given task (not re-entrant). + + :param tsk: task object + :type tsk: :py:class:`waflib.Task.Task` + :raises: :py:class:`IndexError` in case the resource is already acquired + """ + if self.is_locked(): + raise IndexError('Cannot lock more %r' % self.locking) + self.locking.add(tsk) + + def release(self, tsk): + """ + Mark the semaphore as unused by the given task. + + :param tsk: task object + :type tsk: :py:class:`waflib.Task.Task` + :raises: :py:class:`KeyError` in case the resource is not acquired by the task + """ + self.locking.remove(tsk) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/ar.py lilv-0.24.6/waflib/Tools/ar.py --- lilv-0.24.4~dfsg0/waflib/Tools/ar.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/ar.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,13 +1,24 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) +# Ralf Habacker, 2006 (rh) + +""" +The **ar** program creates static libraries. This tool is almost always loaded +from others (C, C++, D, etc) for static library support. +""" from waflib.Configure import conf + @conf def find_ar(conf): + """Configuration helper used by C/C++ tools to enable the support for static libraries""" conf.load('ar') + def configure(conf): - conf.find_program('ar',var='AR') + """Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``""" + conf.find_program('ar', var='AR') conf.add_os_flags('ARFLAGS') if not conf.env.ARFLAGS: - conf.env.ARFLAGS=['rcs'] + conf.env.ARFLAGS = ['rcs'] + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/asm.py lilv-0.24.6/waflib/Tools/asm.py --- lilv-0.24.4~dfsg0/waflib/Tools/asm.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/asm.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,23 +1,108 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2008-2018 (ita) -from waflib import Task -from waflib.Tools.ccroot import link_task,stlink_task +""" +Assembly support, used by tools such as gas and nasm + +To declare targets using assembly:: + + def configure(conf): + conf.load('gcc gas') + + def build(bld): + bld( + features='c cstlib asm', + source = 'test.S', + target = 'asmtest') + + bld( + features='asm asmprogram', + source = 'test.S', + target = 'asmtest') + +Support for pure asm programs and libraries should also work:: + + def configure(conf): + conf.load('nasm') + conf.find_program('ld', 'ASLINK') + + def build(bld): + bld( + features='asm asmprogram', + source = 'test.S', + target = 'asmtest') +""" + +import re +from waflib import Errors, Logs, Task +from waflib.Tools.ccroot import link_task, stlink_task from waflib.TaskGen import extension +from waflib.Tools import c_preproc + +re_lines = re.compile( + '^[ \t]*(?:%)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef)[ \t]*(.*)\r*$', + re.IGNORECASE | re.MULTILINE) + +class asm_parser(c_preproc.c_parser): + def filter_comments(self, node): + code = node.read() + code = c_preproc.re_nl.sub('', code) + code = c_preproc.re_cpp.sub(c_preproc.repl, code) + return re_lines.findall(code) + class asm(Task.Task): - color='BLUE' - run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' -@extension('.s','.S','.asm','.ASM','.spp','.SPP') -def asm_hook(self,node): - return self.create_compiled_task('asm',node) + """ + Compiles asm files by gas/nasm/yasm/... + """ + color = 'BLUE' + run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' + + def scan(self): + if self.env.ASM_NAME == 'gas': + return c_preproc.scan(self) + Logs.warn('There is no dependency scanner for Nasm!') + return [[], []] + elif self.env.ASM_NAME == 'nasm': + Logs.warn('The Nasm dependency scanner is incomplete!') + + try: + incn = self.generator.includes_nodes + except AttributeError: + raise Errors.WafError('%r is missing the "asm" feature' % self.generator) + + if c_preproc.go_absolute: + nodepaths = incn + else: + nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)] + + tmp = asm_parser(nodepaths) + tmp.start(self.inputs[0], self.env) + return (tmp.nodes, tmp.names) + +@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP') +def asm_hook(self, node): + """ + Binds the asm extension to the asm task + + :param node: input file + :type node: :py:class:`waflib.Node.Node` + """ + return self.create_compiled_task('asm', node) + class asmprogram(link_task): - run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' - ext_out=['.bin'] - inst_to='${BINDIR}' + "Links object files into a c program" + run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' + ext_out = ['.bin'] + inst_to = '${BINDIR}' + class asmshlib(asmprogram): - inst_to='${LIBDIR}' + "Links object files into a c shared library" + inst_to = '${LIBDIR}' + class asmstlib(stlink_task): - pass + "Links object files into a c static library" + pass # do not remove + def configure(conf): - conf.env.ASMPATH_ST='-I%s' + conf.env.ASMPATH_ST = '-I%s' diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/bison.py lilv-0.24.6/waflib/Tools/bison.py --- lilv-0.24.4~dfsg0/waflib/Tools/bison.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/bison.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,17 +1,30 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# John O'Meara, 2006 +# Thomas Nagy 2009-2018 (ita) + +""" +The **bison** program is a code generator which creates C or C++ files. +The generated files are compiled into object files. +""" from waflib import Task from waflib.TaskGen import extension + class bison(Task.Task): - color='BLUE' - run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' - ext_out=['.h'] -@extension('.y','.yc','.yy') -def big_bison(self,node): - has_h='-d'in self.env.BISONFLAGS - outs=[] + """Compiles bison files""" + color = 'BLUE' + run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' + ext_out = ['.h'] # just to make sure + +@extension('.y', '.yc', '.yy') +def big_bison(self, node): + """ + Creates a bison task, which must be executed from the directory of the output file. + """ + has_h = '-d' in self.env.BISONFLAGS + + outs = [] if node.name.endswith('.yc'): outs.append(node.change_ext('.tab.cc')) if has_h: @@ -20,9 +33,17 @@ outs.append(node.change_ext('.tab.c')) if has_h: outs.append(node.change_ext('.tab.h')) - tsk=self.create_task('bison',node,outs) - tsk.cwd=node.parent.get_bld() + + tsk = self.create_task('bison', node, outs) + tsk.cwd = node.parent.get_bld() + + # and the c/cxx file must be compiled too self.source.append(outs[0]) + def configure(conf): - conf.find_program('bison',var='BISON') - conf.env.BISONFLAGS=['-d'] + """ + Detects the *bison* program + """ + conf.find_program('bison', var='BISON') + conf.env.BISONFLAGS = ['-d'] + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/c_aliases.py lilv-0.24.6/waflib/Tools/c_aliases.py --- lilv-0.24.4~dfsg0/waflib/Tools/c_aliases.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/c_aliases.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,60 +1,146 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2015 (ita) -from waflib import Utils,Errors +"base for all c/c++ programs and libraries" + +from waflib import Utils, Errors from waflib.Configure import conf + def get_extensions(lst): - ret=[] + """ + Returns the file extensions for the list of files given as input + + :param lst: files to process + :list lst: list of string or :py:class:`waflib.Node.Node` + :return: list of file extensions + :rtype: list of string + """ + ret = [] for x in Utils.to_list(lst): - if not isinstance(x,str): - x=x.name - ret.append(x[x.rfind('.')+1:]) + if not isinstance(x, str): + x = x.name + ret.append(x[x.rfind('.') + 1:]) return ret + def sniff_features(**kw): - exts=get_extensions(kw['source']) - typ=kw['typ'] - feats=[] - for x in'cxx cpp c++ cc C'.split(): + """ + Computes and returns the features required for a task generator by + looking at the file extensions. This aimed for C/C++ mainly:: + + snif_features(source=['foo.c', 'foo.cxx'], type='shlib') + # returns ['cxx', 'c', 'cxxshlib', 'cshlib'] + + :param source: source files to process + :type source: list of string or :py:class:`waflib.Node.Node` + :param type: object type in *program*, *shlib* or *stlib* + :type type: string + :return: the list of features for a task generator processing the source files + :rtype: list of string + """ + exts = get_extensions(kw['source']) + typ = kw['typ'] + feats = [] + + # watch the order, cxx will have the precedence + for x in 'cxx cpp c++ cc C'.split(): if x in exts: feats.append('cxx') break - if'c'in exts or'vala'in exts or'gs'in exts: + if 'c' in exts or 'vala' in exts or 'gs' in exts: feats.append('c') - for x in'f f90 F F90 for FOR'.split(): + + if 's' in exts or 'S' in exts: + feats.append('asm') + + for x in 'f f90 F F90 for FOR'.split(): if x in exts: feats.append('fc') break - if'd'in exts: + + if 'd' in exts: feats.append('d') - if'java'in exts: + + if 'java' in exts: feats.append('java') - return'java' - if typ in('program','shlib','stlib'): - will_link=False + return 'java' + + if typ in ('program', 'shlib', 'stlib'): + will_link = False for x in feats: - if x in('cxx','d','fc','c'): - feats.append(x+typ) - will_link=True - if not will_link and not kw.get('features',[]): - raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?'%kw) + if x in ('cxx', 'd', 'fc', 'c', 'asm'): + feats.append(x + typ) + will_link = True + if not will_link and not kw.get('features', []): + raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw) return feats -def set_features(kw,typ): - kw['typ']=typ - kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw)) + +def set_features(kw, typ): + """ + Inserts data in the input dict *kw* based on existing data and on the type of target + required (typ). + + :param kw: task generator parameters + :type kw: dict + :param typ: type of target + :type typ: string + """ + kw['typ'] = typ + kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw)) + @conf -def program(bld,*k,**kw): - set_features(kw,'program') - return bld(*k,**kw) +def program(bld, *k, **kw): + """ + Alias for creating programs by looking at the file extensions:: + + def build(bld): + bld.program(source='foo.c', target='app') + # equivalent to: + # bld(features='c cprogram', source='foo.c', target='app') + + """ + set_features(kw, 'program') + return bld(*k, **kw) + @conf -def shlib(bld,*k,**kw): - set_features(kw,'shlib') - return bld(*k,**kw) +def shlib(bld, *k, **kw): + """ + Alias for creating shared libraries by looking at the file extensions:: + + def build(bld): + bld.shlib(source='foo.c', target='app') + # equivalent to: + # bld(features='c cshlib', source='foo.c', target='app') + + """ + set_features(kw, 'shlib') + return bld(*k, **kw) + @conf -def stlib(bld,*k,**kw): - set_features(kw,'stlib') - return bld(*k,**kw) +def stlib(bld, *k, **kw): + """ + Alias for creating static libraries by looking at the file extensions:: + + def build(bld): + bld.stlib(source='foo.cpp', target='app') + # equivalent to: + # bld(features='cxx cxxstlib', source='foo.cpp', target='app') + + """ + set_features(kw, 'stlib') + return bld(*k, **kw) + @conf -def objects(bld,*k,**kw): - set_features(kw,'objects') - return bld(*k,**kw) +def objects(bld, *k, **kw): + """ + Alias for creating object files by looking at the file extensions:: + + def build(bld): + bld.objects(source='foo.c', target='app') + # equivalent to: + # bld(features='c', source='foo.c', target='app') + + """ + set_features(kw, 'objects') + return bld(*k, **kw) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/c_config.py lilv-0.24.6/waflib/Tools/c_config.py --- lilv-0.24.4~dfsg0/waflib/Tools/c_config.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/c_config.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,805 +1,1352 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) + +""" +C/C++/D configuration helpers +""" from __future__ import with_statement -import os,re,shlex -from waflib import Build,Utils,Task,Options,Logs,Errors,Runner -from waflib.TaskGen import after_method,feature + +import os, re, shlex +from waflib import Build, Utils, Task, Options, Logs, Errors, Runner +from waflib.TaskGen import after_method, feature from waflib.Configure import conf -WAF_CONFIG_H='config.h' -DEFKEYS='define_key' -INCKEYS='include_key' -SNIP_EMPTY_PROGRAM=''' + +WAF_CONFIG_H = 'config.h' +"""default name for the config.h file""" + +DEFKEYS = 'define_key' +INCKEYS = 'include_key' + +SNIP_EMPTY_PROGRAM = ''' int main(int argc, char **argv) { (void)argc; (void)argv; return 0; } ''' -MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'cygwin','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'} -MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh','__xtensa__':'xtensa',} + +MACRO_TO_DESTOS = { +'__linux__' : 'linux', +'__GNU__' : 'gnu', # hurd +'__FreeBSD__' : 'freebsd', +'__NetBSD__' : 'netbsd', +'__OpenBSD__' : 'openbsd', +'__sun' : 'sunos', +'__hpux' : 'hpux', +'__sgi' : 'irix', +'_AIX' : 'aix', +'__CYGWIN__' : 'cygwin', +'__MSYS__' : 'cygwin', +'_UWIN' : 'uwin', +'_WIN64' : 'win32', +'_WIN32' : 'win32', +# Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file. +'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__' : 'darwin', +'__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone +'__QNX__' : 'qnx', +'__native_client__' : 'nacl' # google native client platform +} + +MACRO_TO_DEST_CPU = { +'__x86_64__' : 'x86_64', +'__amd64__' : 'x86_64', +'__i386__' : 'x86', +'__ia64__' : 'ia', +'__mips__' : 'mips', +'__sparc__' : 'sparc', +'__alpha__' : 'alpha', +'__aarch64__' : 'aarch64', +'__thumb__' : 'thumb', +'__arm__' : 'arm', +'__hppa__' : 'hppa', +'__powerpc__' : 'powerpc', +'__ppc__' : 'powerpc', +'__convex__' : 'convex', +'__m68k__' : 'm68k', +'__s390x__' : 's390x', +'__s390__' : 's390', +'__sh__' : 'sh', +'__xtensa__' : 'xtensa', +} + @conf -def parse_flags(self,line,uselib_store,env=None,force_static=False,posix=None): - assert(isinstance(line,str)) - env=env or self.env +def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=None): + """ + Parses flags from the input lines, and adds them to the relevant use variables:: + + def configure(conf): + conf.parse_flags('-O3', 'FOO') + # conf.env.CXXFLAGS_FOO = ['-O3'] + # conf.env.CFLAGS_FOO = ['-O3'] + + :param line: flags + :type line: string + :param uselib_store: where to add the flags + :type uselib_store: string + :param env: config set or conf.env by default + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + """ + + assert(isinstance(line, str)) + + env = env or self.env + + # Issue 811 and 1371 if posix is None: - posix=True - if'\\'in line: - posix=('\\ 'in line)or('\\\\'in line) - lex=shlex.shlex(line,posix=posix) - lex.whitespace_split=True - lex.commenters='' - lst=list(lex) - uselib=uselib_store - def app(var,val): - env.append_value('%s_%s'%(var,uselib),val) - def appu(var,val): - env.append_unique('%s_%s'%(var,uselib),val) - static=False + posix = True + if '\\' in line: + posix = ('\\ ' in line) or ('\\\\' in line) + + lex = shlex.shlex(line, posix=posix) + lex.whitespace_split = True + lex.commenters = '' + lst = list(lex) + + # append_unique is not always possible + # for example, apple flags may require both -arch i386 and -arch ppc + uselib = uselib_store + def app(var, val): + env.append_value('%s_%s' % (var, uselib), val) + def appu(var, val): + env.append_unique('%s_%s' % (var, uselib), val) + static = False while lst: - x=lst.pop(0) - st=x[:2] - ot=x[2:] - if st=='-I'or st=='/I': + x = lst.pop(0) + st = x[:2] + ot = x[2:] + + if st == '-I' or st == '/I': if not ot: - ot=lst.pop(0) - appu('INCLUDES',ot) - elif st=='-i': - tmp=[x,lst.pop(0)] - app('CFLAGS',tmp) - app('CXXFLAGS',tmp) - elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'): + ot = lst.pop(0) + appu('INCLUDES', ot) + elif st == '-i': + tmp = [x, lst.pop(0)] + app('CFLAGS', tmp) + app('CXXFLAGS', tmp) + elif st == '-D' or (env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but.. if not ot: - ot=lst.pop(0) - app('DEFINES',ot) - elif st=='-l': + ot = lst.pop(0) + app('DEFINES', ot) + elif st == '-l': if not ot: - ot=lst.pop(0) - prefix='STLIB'if(force_static or static)else'LIB' - app(prefix,ot) - elif st=='-L': + ot = lst.pop(0) + prefix = 'STLIB' if (force_static or static) else 'LIB' + app(prefix, ot) + elif st == '-L': if not ot: - ot=lst.pop(0) - prefix='STLIBPATH'if(force_static or static)else'LIBPATH' - appu(prefix,ot) + ot = lst.pop(0) + prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH' + appu(prefix, ot) elif x.startswith('/LIBPATH:'): - prefix='STLIBPATH'if(force_static or static)else'LIBPATH' - appu(prefix,x.replace('/LIBPATH:','')) + prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH' + appu(prefix, x.replace('/LIBPATH:', '')) elif x.startswith('-std='): - prefix='CXXFLAGS'if'++'in x else'CFLAGS' - app(prefix,x) - elif x.startswith('+')or x in('-pthread','-fPIC','-fpic','-fPIE','-fpie'): - app('CFLAGS',x) - app('CXXFLAGS',x) - app('LINKFLAGS',x) - elif x=='-framework': - appu('FRAMEWORK',lst.pop(0)) + prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS' + app(prefix, x) + elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie'): + app('CFLAGS', x) + app('CXXFLAGS', x) + app('LINKFLAGS', x) + elif x == '-framework': + appu('FRAMEWORK', lst.pop(0)) elif x.startswith('-F'): - appu('FRAMEWORKPATH',x[2:]) - elif x=='-Wl,-rpath'or x=='-Wl,-R': - app('RPATH',lst.pop(0).lstrip('-Wl,')) + appu('FRAMEWORKPATH', x[2:]) + elif x == '-Wl,-rpath' or x == '-Wl,-R': + app('RPATH', lst.pop(0).lstrip('-Wl,')) elif x.startswith('-Wl,-R,'): - app('RPATH',x[7:]) + app('RPATH', x[7:]) elif x.startswith('-Wl,-R'): - app('RPATH',x[6:]) + app('RPATH', x[6:]) elif x.startswith('-Wl,-rpath,'): - app('RPATH',x[11:]) - elif x=='-Wl,-Bstatic'or x=='-Bstatic': - static=True - elif x=='-Wl,-Bdynamic'or x=='-Bdynamic': - static=False - elif x.startswith('-Wl')or x in('-rdynamic','-pie'): - app('LINKFLAGS',x) - elif x.startswith(('-m','-f','-dynamic','-O','-g')): - app('CFLAGS',x) - app('CXXFLAGS',x) + app('RPATH', x[11:]) + elif x == '-Wl,-Bstatic' or x == '-Bstatic': + static = True + elif x == '-Wl,-Bdynamic' or x == '-Bdynamic': + static = False + elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'): + app('LINKFLAGS', x) + elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')): + # Adding the -W option breaks python builds on Openindiana + app('CFLAGS', x) + app('CXXFLAGS', x) elif x.startswith('-bundle'): - app('LINKFLAGS',x) - elif x.startswith(('-undefined','-Xlinker')): - arg=lst.pop(0) - app('LINKFLAGS',[x,arg]) - elif x.startswith(('-arch','-isysroot')): - tmp=[x,lst.pop(0)] - app('CFLAGS',tmp) - app('CXXFLAGS',tmp) - app('LINKFLAGS',tmp) - elif x.endswith(('.a','.so','.dylib','.lib')): - appu('LINKFLAGS',x) + app('LINKFLAGS', x) + elif x.startswith(('-undefined', '-Xlinker')): + arg = lst.pop(0) + app('LINKFLAGS', [x, arg]) + elif x.startswith(('-arch', '-isysroot')): + tmp = [x, lst.pop(0)] + app('CFLAGS', tmp) + app('CXXFLAGS', tmp) + app('LINKFLAGS', tmp) + elif x.endswith(('.a', '.so', '.dylib', '.lib')): + appu('LINKFLAGS', x) # not cool, #762 else: - self.to_log('Unhandled flag %r'%x) + self.to_log('Unhandled flag %r' % x) + @conf -def validate_cfg(self,kw): - if not'path'in kw: +def validate_cfg(self, kw): + """ + Searches for the program *pkg-config* if missing, and validates the + parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`. + + :param path: the **-config program to use** (default is *pkg-config*) + :type path: list of string + :param msg: message to display to describe the test executed + :type msg: string + :param okmsg: message to display when the test is successful + :type okmsg: string + :param errmsg: message to display in case of error + :type errmsg: string + """ + if not 'path' in kw: if not self.env.PKGCONFIG: - self.find_program('pkg-config',var='PKGCONFIG') - kw['path']=self.env.PKGCONFIG - s=('atleast_pkgconfig_version'in kw)+('modversion'in kw)+('package'in kw) - if s!=1: + self.find_program('pkg-config', var='PKGCONFIG') + kw['path'] = self.env.PKGCONFIG + + # verify that exactly one action is requested + s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw) + if s != 1: raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set') - if not'msg'in kw: - if'atleast_pkgconfig_version'in kw: - kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version'] - elif'modversion'in kw: - kw['msg']='Checking for %r version'%kw['modversion'] + if not 'msg' in kw: + if 'atleast_pkgconfig_version' in kw: + kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version'] + elif 'modversion' in kw: + kw['msg'] = 'Checking for %r version' % kw['modversion'] else: - kw['msg']='Checking for %r'%(kw['package']) - if not'okmsg'in kw and not'modversion'in kw: - kw['okmsg']='yes' - if not'errmsg'in kw: - kw['errmsg']='not found' - if'atleast_pkgconfig_version'in kw: + kw['msg'] = 'Checking for %r' %(kw['package']) + + # let the modversion check set the okmsg to the detected version + if not 'okmsg' in kw and not 'modversion' in kw: + kw['okmsg'] = 'yes' + if not 'errmsg' in kw: + kw['errmsg'] = 'not found' + + # pkg-config version + if 'atleast_pkgconfig_version' in kw: pass - elif'modversion'in kw: - if not'uselib_store'in kw: - kw['uselib_store']=kw['modversion'] - if not'define_name'in kw: - kw['define_name']='%s_VERSION'%Utils.quote_define_name(kw['uselib_store']) - else: - if not'uselib_store'in kw: - kw['uselib_store']=Utils.to_list(kw['package'])[0].upper() - if not'define_name'in kw: - kw['define_name']=self.have_define(kw['uselib_store']) -@conf -def exec_cfg(self,kw): - path=Utils.to_list(kw['path']) - env=self.env.env or None + elif 'modversion' in kw: + if not 'uselib_store' in kw: + kw['uselib_store'] = kw['modversion'] + if not 'define_name' in kw: + kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store']) + else: + if not 'uselib_store' in kw: + kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper() + if not 'define_name' in kw: + kw['define_name'] = self.have_define(kw['uselib_store']) + +@conf +def exec_cfg(self, kw): + """ + Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags: + + * if atleast_pkgconfig_version is given, check that pkg-config has the version n and return + * if modversion is given, then return the module version + * else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable + + :param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests) + :type atleast_pkgconfig_version: string + :param package: package name, for example *gtk+-2.0* + :type package: string + :param uselib_store: if the test is successful, define HAVE\\_*name*. It is also used to define *conf.env.FLAGS_name* variables. + :type uselib_store: string + :param modversion: if provided, return the version of the given module and define *name*\\_VERSION + :type modversion: string + :param args: arguments to give to *package* when retrieving flags + :type args: list of string + :param variables: return the values of particular variables + :type variables: list of string + :param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES) + :type define_variable: dict(string: string) + """ + + path = Utils.to_list(kw['path']) + env = self.env.env or None if kw.get('pkg_config_path'): if not env: - env=dict(self.environ) - env['PKG_CONFIG_PATH']=kw['pkg_config_path'] + env = dict(self.environ) + env['PKG_CONFIG_PATH'] = kw['pkg_config_path'] + def define_it(): - define_name=kw['define_name'] - if kw.get('global_define',1): - self.define(define_name,1,False) + define_name = kw['define_name'] + # by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X + if kw.get('global_define', 1): + self.define(define_name, 1, False) else: - self.env.append_unique('DEFINES_%s'%kw['uselib_store'],"%s=1"%define_name) - if kw.get('add_have_to_env',1): - self.env[define_name]=1 - if'atleast_pkgconfig_version'in kw: - cmd=path+['--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']] - self.cmd_and_log(cmd,env=env) + self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name) + + if kw.get('add_have_to_env', 1): + self.env[define_name] = 1 + + # pkg-config version + if 'atleast_pkgconfig_version' in kw: + cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']] + self.cmd_and_log(cmd, env=env) return - if'modversion'in kw: - version=self.cmd_and_log(path+['--modversion',kw['modversion']],env=env).strip() - if not'okmsg'in kw: - kw['okmsg']=version - self.define(kw['define_name'],version) + + # single version for a module + if 'modversion' in kw: + version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip() + if not 'okmsg' in kw: + kw['okmsg'] = version + self.define(kw['define_name'], version) return version - lst=[]+path - defi=kw.get('define_variable') + + lst = [] + path + + defi = kw.get('define_variable') if not defi: - defi=self.env.PKG_CONFIG_DEFINES or{} - for key,val in defi.items(): - lst.append('--define-variable=%s=%s'%(key,val)) - static=kw.get('force_static',False) - if'args'in kw: - args=Utils.to_list(kw['args']) - if'--static'in args or'--static-libs'in args: - static=True - lst+=args + defi = self.env.PKG_CONFIG_DEFINES or {} + for key, val in defi.items(): + lst.append('--define-variable=%s=%s' % (key, val)) + + static = kw.get('force_static', False) + if 'args' in kw: + args = Utils.to_list(kw['args']) + if '--static' in args or '--static-libs' in args: + static = True + lst += args + + # tools like pkgconf expect the package argument after the -- ones -_- lst.extend(Utils.to_list(kw['package'])) - if'variables'in kw: - v_env=kw.get('env',self.env) - vars=Utils.to_list(kw['variables']) + + # retrieving variables of a module + if 'variables' in kw: + v_env = kw.get('env', self.env) + vars = Utils.to_list(kw['variables']) for v in vars: - val=self.cmd_and_log(lst+['--variable='+v],env=env).strip() - var='%s_%s'%(kw['uselib_store'],v) - v_env[var]=val + val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip() + var = '%s_%s' % (kw['uselib_store'], v) + v_env[var] = val return - ret=self.cmd_and_log(lst,env=env) + + # so we assume the command-line will output flags to be parsed afterwards + ret = self.cmd_and_log(lst, env=env) + define_it() - self.parse_flags(ret,kw['uselib_store'],kw.get('env',self.env),force_static=static,posix=kw.get('posix')) + self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix')) return ret + @conf -def check_cfg(self,*k,**kw): +def check_cfg(self, *k, **kw): + """ + Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc). + This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg` + + A few examples:: + + def configure(conf): + conf.load('compiler_c') + conf.check_cfg(package='glib-2.0', args='--libs --cflags') + conf.check_cfg(package='pango') + conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs']) + conf.check_cfg(package='pango', + args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'], + msg="Checking for 'pango 0.1.0'") + conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL') + conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', + package='', uselib_store='OPEN_MPI', mandatory=False) + # variables + conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO') + print(conf.env.FOO_includedir) + """ self.validate_cfg(kw) - if'msg'in kw: - self.start_msg(kw['msg'],**kw) - ret=None + if 'msg' in kw: + self.start_msg(kw['msg'], **kw) + ret = None try: - ret=self.exec_cfg(kw) - except self.errors.WafError: - if'errmsg'in kw: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - if Logs.verbose>1: - raise - else: - self.fatal('The configuration failed') + ret = self.exec_cfg(kw) + except self.errors.WafError as e: + if 'errmsg' in kw: + self.end_msg(kw['errmsg'], 'YELLOW', **kw) + if Logs.verbose > 1: + self.to_log('Command failure: %s' % e) + self.fatal('The configuration failed') else: if not ret: - ret=True - kw['success']=ret - if'okmsg'in kw: - self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + ret = True + kw['success'] = ret + if 'okmsg' in kw: + self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) + return ret + def build_fun(bld): + """ + Build function that is used for running configuration tests with ``conf.check()`` + """ if bld.kw['compile_filename']: - node=bld.srcnode.make_node(bld.kw['compile_filename']) + node = bld.srcnode.make_node(bld.kw['compile_filename']) node.write(bld.kw['code']) - o=bld(features=bld.kw['features'],source=bld.kw['compile_filename'],target='testprog') - for k,v in bld.kw.items(): - setattr(o,k,v) + + o = bld(features=bld.kw['features'], source=bld.kw['compile_filename'], target='testprog') + + for k, v in bld.kw.items(): + setattr(o, k, v) + if not bld.kw.get('quiet'): - bld.conf.to_log("==>\n%s\n<=="%bld.kw['code']) + bld.conf.to_log("==>\n%s\n<==" % bld.kw['code']) + @conf -def validate_c(self,kw): - for x in('type_name','field_name','function_name'): +def validate_c(self, kw): + """ + Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build` + + :param compiler: c or cxx (tries to guess what is best) + :type compiler: string + :param type: cprogram, cshlib, cstlib - not required if *features are given directly* + :type type: binary to create + :param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib`` + :type feature: list of string + :param fragment: provide a piece of code for the test (default is to let the system create one) + :type fragment: string + :param uselib_store: define variables after the test is executed (IMPORTANT!) + :type uselib_store: string + :param use: parameters to use for building (just like the normal *use* keyword) + :type use: list of string + :param define_name: define to set when the check is over + :type define_name: string + :param execute: execute the resulting binary + :type execute: bool + :param define_ret: if execute is set to True, use the execution output in both the define and the return value + :type define_ret: bool + :param header_name: check for a particular header + :type header_name: string + :param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers + :type auto_add_header_name: bool + """ + for x in ('type_name', 'field_name', 'function_name'): if x in kw: - Logs.warn('Invalid argument %r in test'%x) - if not'build_fun'in kw: - kw['build_fun']=build_fun - if not'env'in kw: - kw['env']=self.env.derive() - env=kw['env'] - if not'compiler'in kw and not'features'in kw: - kw['compiler']='c' + Logs.warn('Invalid argument %r in test' % x) + + if not 'build_fun' in kw: + kw['build_fun'] = build_fun + + if not 'env' in kw: + kw['env'] = self.env.derive() + env = kw['env'] + + if not 'compiler' in kw and not 'features' in kw: + kw['compiler'] = 'c' if env.CXX_NAME and Task.classes.get('cxx'): - kw['compiler']='cxx' + kw['compiler'] = 'cxx' if not self.env.CXX: self.fatal('a c++ compiler is required') else: if not self.env.CC: self.fatal('a c compiler is required') - if not'compile_mode'in kw: - kw['compile_mode']='c' - if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler')=='cxx': - kw['compile_mode']='cxx' - if not'type'in kw: - kw['type']='cprogram' - if not'features'in kw: - if not'header_name'in kw or kw.get('link_header_test',True): - kw['features']=[kw['compile_mode'],kw['type']] + + if not 'compile_mode' in kw: + kw['compile_mode'] = 'c' + if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx': + kw['compile_mode'] = 'cxx' + + if not 'type' in kw: + kw['type'] = 'cprogram' + + if not 'features' in kw: + if not 'header_name' in kw or kw.get('link_header_test', True): + kw['features'] = [kw['compile_mode'], kw['type']] # "c ccprogram" else: - kw['features']=[kw['compile_mode']] + kw['features'] = [kw['compile_mode']] else: - kw['features']=Utils.to_list(kw['features']) - if not'compile_filename'in kw: - kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'') + kw['features'] = Utils.to_list(kw['features']) + + if not 'compile_filename' in kw: + kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '') + def to_header(dct): - if'header_name'in dct: - dct=Utils.to_list(dct['header_name']) - return''.join(['#include <%s>\n'%x for x in dct]) - return'' - if'framework_name'in kw: - fwkname=kw['framework_name'] - if not'uselib_store'in kw: - kw['uselib_store']=fwkname.upper() + if 'header_name' in dct: + dct = Utils.to_list(dct['header_name']) + return ''.join(['#include <%s>\n' % x for x in dct]) + return '' + + if 'framework_name' in kw: + # OSX, not sure this is used anywhere + fwkname = kw['framework_name'] + if not 'uselib_store' in kw: + kw['uselib_store'] = fwkname.upper() if not kw.get('no_header'): - fwk='%s/%s.h'%(fwkname,fwkname) + fwk = '%s/%s.h' % (fwkname, fwkname) if kw.get('remove_dot_h'): - fwk=fwk[:-2] - val=kw.get('header_name',[]) - kw['header_name']=Utils.to_list(val)+[fwk] - kw['msg']='Checking for framework %s'%fwkname - kw['framework']=fwkname - elif'header_name'in kw: - if not'msg'in kw: - kw['msg']='Checking for header %s'%kw['header_name'] - l=Utils.to_list(kw['header_name']) - assert len(l),'list of headers in header_name is empty' - kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM - if not'uselib_store'in kw: - kw['uselib_store']=l[0].upper() - if not'define_name'in kw: - kw['define_name']=self.have_define(l[0]) - if'lib'in kw: - if not'msg'in kw: - kw['msg']='Checking for library %s'%kw['lib'] - if not'uselib_store'in kw: - kw['uselib_store']=kw['lib'].upper() - if'stlib'in kw: - if not'msg'in kw: - kw['msg']='Checking for static library %s'%kw['stlib'] - if not'uselib_store'in kw: - kw['uselib_store']=kw['stlib'].upper() - if'fragment'in kw: - kw['code']=kw['fragment'] - if not'msg'in kw: - kw['msg']='Checking for code snippet' - if not'errmsg'in kw: - kw['errmsg']='no' - for(flagsname,flagstype)in(('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')): + fwk = fwk[:-2] + val = kw.get('header_name', []) + kw['header_name'] = Utils.to_list(val) + [fwk] + kw['msg'] = 'Checking for framework %s' % fwkname + kw['framework'] = fwkname + + elif 'header_name' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for header %s' % kw['header_name'] + + l = Utils.to_list(kw['header_name']) + assert len(l), 'list of headers in header_name is empty' + + kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM + if not 'uselib_store' in kw: + kw['uselib_store'] = l[0].upper() + if not 'define_name' in kw: + kw['define_name'] = self.have_define(l[0]) + + if 'lib' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for library %s' % kw['lib'] + if not 'uselib_store' in kw: + kw['uselib_store'] = kw['lib'].upper() + + if 'stlib' in kw: + if not 'msg' in kw: + kw['msg'] = 'Checking for static library %s' % kw['stlib'] + if not 'uselib_store' in kw: + kw['uselib_store'] = kw['stlib'].upper() + + if 'fragment' in kw: + # an additional code fragment may be provided to replace the predefined code + # in custom headers + kw['code'] = kw['fragment'] + if not 'msg' in kw: + kw['msg'] = 'Checking for code snippet' + if not 'errmsg' in kw: + kw['errmsg'] = 'no' + + for (flagsname,flagstype) in (('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')): if flagsname in kw: - if not'msg'in kw: - kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname]) - if not'errmsg'in kw: - kw['errmsg']='no' - if not'execute'in kw: - kw['execute']=False + if not 'msg' in kw: + kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname]) + if not 'errmsg' in kw: + kw['errmsg'] = 'no' + + if not 'execute' in kw: + kw['execute'] = False if kw['execute']: kw['features'].append('test_exec') - kw['chmod']=Utils.O755 - if not'errmsg'in kw: - kw['errmsg']='not found' - if not'okmsg'in kw: - kw['okmsg']='yes' - if not'code'in kw: - kw['code']=SNIP_EMPTY_PROGRAM + kw['chmod'] = Utils.O755 + + if not 'errmsg' in kw: + kw['errmsg'] = 'not found' + + if not 'okmsg' in kw: + kw['okmsg'] = 'yes' + + if not 'code' in kw: + kw['code'] = SNIP_EMPTY_PROGRAM + + # if there are headers to append automatically to the next tests if self.env[INCKEYS]: - kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code'] - if kw.get('merge_config_header')or env.merge_config_header: - kw['code']='%s\n\n%s'%(self.get_config_header(),kw['code']) - env.DEFINES=[] + kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code'] + + # in case defines lead to very long command-lines + if kw.get('merge_config_header') or env.merge_config_header: + kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code']) + env.DEFINES = [] # modify the copy + if not kw.get('success'): - kw['success']=None - if'define_name'in kw: + kw['success'] = None + + if 'define_name' in kw: self.undefine(kw['define_name']) - if not'msg'in kw: + if not 'msg' in kw: self.fatal('missing "msg" in conf.check(...)') + @conf -def post_check(self,*k,**kw): - is_success=0 +def post_check(self, *k, **kw): + """ + Sets the variables after a test executed in + :py:func:`waflib.Tools.c_config.check` was run successfully + """ + is_success = 0 if kw['execute']: - if kw['success']is not None: + if kw['success'] is not None: if kw.get('define_ret'): - is_success=kw['success'] + is_success = kw['success'] else: - is_success=(kw['success']==0) + is_success = (kw['success'] == 0) else: - is_success=(kw['success']==0) + is_success = (kw['success'] == 0) + if kw.get('define_name'): - comment=kw.get('comment','') - define_name=kw['define_name'] - if kw['execute']and kw.get('define_ret')and isinstance(is_success,str): - if kw.get('global_define',1): - self.define(define_name,is_success,quote=kw.get('quote',1),comment=comment) + comment = kw.get('comment', '') + define_name = kw['define_name'] + if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str): + if kw.get('global_define', 1): + self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment) else: - if kw.get('quote',1): - succ='"%s"'%is_success + if kw.get('quote', 1): + succ = '"%s"' % is_success else: - succ=int(is_success) - val='%s=%s'%(define_name,succ) - var='DEFINES_%s'%kw['uselib_store'] - self.env.append_value(var,val) + succ = int(is_success) + val = '%s=%s' % (define_name, succ) + var = 'DEFINES_%s' % kw['uselib_store'] + self.env.append_value(var, val) else: - if kw.get('global_define',1): - self.define_cond(define_name,is_success,comment=comment) + if kw.get('global_define', 1): + self.define_cond(define_name, is_success, comment=comment) else: - var='DEFINES_%s'%kw['uselib_store'] - self.env.append_value(var,'%s=%s'%(define_name,int(is_success))) - if kw.get('add_have_to_env',1): + var = 'DEFINES_%s' % kw['uselib_store'] + self.env.append_value(var, '%s=%s' % (define_name, int(is_success))) + + # define conf.env.HAVE_X to 1 + if kw.get('add_have_to_env', 1): if kw.get('uselib_store'): - self.env[self.have_define(kw['uselib_store'])]=1 - elif kw['execute']and kw.get('define_ret'): - self.env[define_name]=is_success + self.env[self.have_define(kw['uselib_store'])] = 1 + elif kw['execute'] and kw.get('define_ret'): + self.env[define_name] = is_success else: - self.env[define_name]=int(is_success) - if'header_name'in kw: + self.env[define_name] = int(is_success) + + if 'header_name' in kw: if kw.get('auto_add_header_name'): - self.env.append_value(INCKEYS,Utils.to_list(kw['header_name'])) - if is_success and'uselib_store'in kw: + self.env.append_value(INCKEYS, Utils.to_list(kw['header_name'])) + + if is_success and 'uselib_store' in kw: from waflib.Tools import ccroot - _vars=set() + # See get_uselib_vars in ccroot.py + _vars = set() for x in kw['features']: if x in ccroot.USELIB_VARS: - _vars|=ccroot.USELIB_VARS[x] + _vars |= ccroot.USELIB_VARS[x] + for k in _vars: - x=k.lower() + x = k.lower() if x in kw: - self.env.append_value(k+'_'+kw['uselib_store'],kw[x]) + self.env.append_value(k + '_' + kw['uselib_store'], kw[x]) return is_success + @conf -def check(self,*k,**kw): +def check(self, *k, **kw): + """ + Performs a configuration test by calling :py:func:`waflib.Configure.run_build`. + For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`. + To force a specific compiler, pass ``compiler='c'`` or ``compiler='cxx'`` to the list of arguments + + Besides build targets, complete builds can be given through a build function. All files will + be written to a temporary directory:: + + def build(bld): + lib_node = bld.srcnode.make_node('libdir/liblc1.c') + lib_node.parent.mkdir() + lib_node.write('#include \\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w') + bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc') + conf.check(build_fun=build, msg=msg) + """ self.validate_c(kw) - self.start_msg(kw['msg'],**kw) - ret=None + self.start_msg(kw['msg'], **kw) + ret = None try: - ret=self.run_build(*k,**kw) + ret = self.run_build(*k, **kw) except self.errors.ConfigurationError: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - if Logs.verbose>1: + self.end_msg(kw['errmsg'], 'YELLOW', **kw) + if Logs.verbose > 1: raise else: self.fatal('The configuration failed') else: - kw['success']=ret - ret=self.post_check(*k,**kw) + kw['success'] = ret + + ret = self.post_check(*k, **kw) if not ret: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - self.fatal('The configuration failed %r'%ret) + self.end_msg(kw['errmsg'], 'YELLOW', **kw) + self.fatal('The configuration failed %r' % ret) else: - self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) return ret + class test_exec(Task.Task): - color='PINK' + """ + A task that runs programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`. + """ + color = 'PINK' def run(self): - if getattr(self.generator,'rpath',None): - if getattr(self.generator,'define_ret',False): - self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) + cmd = [self.inputs[0].abspath()] + getattr(self.generator, 'test_args', []) + if getattr(self.generator, 'rpath', None): + if getattr(self.generator, 'define_ret', False): + self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd) else: - self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()]) + self.generator.bld.retval = self.generator.bld.exec_command(cmd) else: - env=self.env.env or{} + env = self.env.env or {} env.update(dict(os.environ)) - for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'): - env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'') - if getattr(self.generator,'define_ret',False): - self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env) + for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'): + env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '') + if getattr(self.generator, 'define_ret', False): + self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd, env=env) else: - self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env) + self.generator.bld.retval = self.generator.bld.exec_command(cmd, env=env) + @feature('test_exec') @after_method('apply_link') def test_exec_fun(self): - self.create_task('test_exec',self.link_task.outputs[0]) + """ + The feature **test_exec** is used to create a task that will to execute the binary + created (link task output) during the build. The exit status will be set + on the build context, so only one program may have the feature *test_exec*. + This is used by configuration tests:: + + def configure(conf): + conf.check(execute=True) + """ + self.create_task('test_exec', self.link_task.outputs[0]) + +@conf +def check_cxx(self, *k, **kw): + """ + Runs a test with a task generator of the form:: + + conf.check(features='cxx cxxprogram', ...) + """ + kw['compiler'] = 'cxx' + return self.check(*k, **kw) + @conf -def check_cxx(self,*k,**kw): - kw['compiler']='cxx' - return self.check(*k,**kw) -@conf -def check_cc(self,*k,**kw): - kw['compiler']='c' - return self.check(*k,**kw) +def check_cc(self, *k, **kw): + """ + Runs a test with a task generator of the form:: + + conf.check(features='c cprogram', ...) + """ + kw['compiler'] = 'c' + return self.check(*k, **kw) + @conf -def set_define_comment(self,key,comment): - coms=self.env.DEFINE_COMMENTS +def set_define_comment(self, key, comment): + """ + Sets a comment that will appear in the configuration header + + :type key: string + :type comment: string + """ + coms = self.env.DEFINE_COMMENTS if not coms: - coms=self.env.DEFINE_COMMENTS={} - coms[key]=comment or'' + coms = self.env.DEFINE_COMMENTS = {} + coms[key] = comment or '' + @conf -def get_define_comment(self,key): - coms=self.env.DEFINE_COMMENTS or{} - return coms.get(key,'') +def get_define_comment(self, key): + """ + Returns the comment associated to a define + + :type key: string + """ + coms = self.env.DEFINE_COMMENTS or {} + return coms.get(key, '') + @conf -def define(self,key,val,quote=True,comment=''): - assert isinstance(key,str) +def define(self, key, val, quote=True, comment=''): + """ + Stores a single define and its state into ``conf.env.DEFINES``. The value is cast to an integer (0/1). + + :param key: define name + :type key: string + :param val: value + :type val: int or string + :param quote: enclose strings in quotes (yes by default) + :type quote: bool + """ + assert isinstance(key, str) if not key: return if val is True: - val=1 - elif val in(False,None): - val=0 - if isinstance(val,int)or isinstance(val,float): - s='%s=%s' - else: - s=quote and'%s="%s"'or'%s=%s' - app=s%(key,str(val)) - ban=key+'=' - lst=self.env.DEFINES + val = 1 + elif val in (False, None): + val = 0 + + if isinstance(val, int) or isinstance(val, float): + s = '%s=%s' + else: + s = quote and '%s="%s"' or '%s=%s' + app = s % (key, str(val)) + + ban = key + '=' + lst = self.env.DEFINES for x in lst: if x.startswith(ban): - lst[lst.index(x)]=app + lst[lst.index(x)] = app break else: - self.env.append_value('DEFINES',app) - self.env.append_unique(DEFKEYS,key) - self.set_define_comment(key,comment) + self.env.append_value('DEFINES', app) + + self.env.append_unique(DEFKEYS, key) + self.set_define_comment(key, comment) + @conf -def undefine(self,key,comment=''): - assert isinstance(key,str) +def undefine(self, key, comment=''): + """ + Removes a global define from ``conf.env.DEFINES`` + + :param key: define name + :type key: string + """ + assert isinstance(key, str) if not key: return - ban=key+'=' - lst=[x for x in self.env.DEFINES if not x.startswith(ban)] - self.env.DEFINES=lst - self.env.append_unique(DEFKEYS,key) - self.set_define_comment(key,comment) + ban = key + '=' + lst = [x for x in self.env.DEFINES if not x.startswith(ban)] + self.env.DEFINES = lst + self.env.append_unique(DEFKEYS, key) + self.set_define_comment(key, comment) + @conf -def define_cond(self,key,val,comment=''): - assert isinstance(key,str) +def define_cond(self, key, val, comment=''): + """ + Conditionally defines a name:: + + def configure(conf): + conf.define_cond('A', True) + # equivalent to: + # if val: conf.define('A', 1) + # else: conf.undefine('A') + + :param key: define name + :type key: string + :param val: value + :type val: int or string + """ + assert isinstance(key, str) if not key: return if val: - self.define(key,1,comment=comment) + self.define(key, 1, comment=comment) else: - self.undefine(key,comment=comment) + self.undefine(key, comment=comment) + @conf -def is_defined(self,key): - assert key and isinstance(key,str) - ban=key+'=' +def is_defined(self, key): + """ + Indicates whether a particular define is globally set in ``conf.env.DEFINES``. + + :param key: define name + :type key: string + :return: True if the define is set + :rtype: bool + """ + assert key and isinstance(key, str) + + ban = key + '=' for x in self.env.DEFINES: if x.startswith(ban): return True return False + @conf -def get_define(self,key): - assert key and isinstance(key,str) - ban=key+'=' +def get_define(self, key): + """ + Returns the value of an existing define, or None if not found + + :param key: define name + :type key: string + :rtype: string + """ + assert key and isinstance(key, str) + + ban = key + '=' for x in self.env.DEFINES: if x.startswith(ban): return x[len(ban):] return None + @conf -def have_define(self,key): - return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key) +def have_define(self, key): + """ + Returns a variable suitable for command-line or header use by removing invalid characters + and prefixing it with ``HAVE_`` + + :param key: define name + :type key: string + :return: the input key prefixed by *HAVE_* and substitute any invalid characters. + :rtype: string + """ + return (self.env.HAVE_PAT or 'HAVE_%s') % Utils.quote_define_name(key) + @conf -def write_config_header(self,configfile='',guard='',top=False,defines=True,headers=False,remove=True,define_prefix=''): +def write_config_header(self, configfile='', guard='', top=False, defines=True, headers=False, remove=True, define_prefix=''): + """ + Writes a configuration header containing defines and includes:: + + def configure(cnf): + cnf.define('A', 1) + cnf.write_config_header('config.h') + + This function only adds include guards (if necessary), consult + :py:func:`waflib.Tools.c_config.get_config_header` for details on the body. + + :param configfile: path to the file to create (relative or absolute) + :type configfile: string + :param guard: include guard name to add, by default it is computed from the file name + :type guard: string + :param top: write the configuration header from the build directory (default is from the current path) + :type top: bool + :param defines: add the defines (yes by default) + :type defines: bool + :param headers: add #include in the file + :type headers: bool + :param remove: remove the defines after they are added (yes by default, works like in autoconf) + :type remove: bool + :type define_prefix: string + :param define_prefix: prefix all the defines in the file with a particular prefix + """ if not configfile: - configfile=WAF_CONFIG_H - waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile) - node=top and self.bldnode or self.path.get_bld() - node=node.make_node(configfile) + configfile = WAF_CONFIG_H + waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile) + + node = top and self.bldnode or self.path.get_bld() + node = node.make_node(configfile) node.parent.mkdir() - lst=['/* WARNING! All changes made to this file will be lost! */\n'] - lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard)) - lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix)) - lst.append('\n#endif /* %s */\n'%waf_guard) + + lst = ['/* WARNING! All changes made to this file will be lost! */\n'] + lst.append('#ifndef %s\n#define %s\n' % (waf_guard, waf_guard)) + lst.append(self.get_config_header(defines, headers, define_prefix=define_prefix)) + lst.append('\n#endif /* %s */\n' % waf_guard) + node.write('\n'.join(lst)) - self.env.append_unique(Build.CFG_FILES,[node.abspath()]) + + # config files must not be removed on "waf clean" + self.env.append_unique(Build.CFG_FILES, [node.abspath()]) + if remove: for key in self.env[DEFKEYS]: self.undefine(key) - self.env[DEFKEYS]=[] + self.env[DEFKEYS] = [] + @conf -def get_config_header(self,defines=True,headers=False,define_prefix=''): - lst=[] +def get_config_header(self, defines=True, headers=False, define_prefix=''): + """ + Creates the contents of a ``config.h`` file from the defines and includes + set in conf.env.define_key / conf.env.include_key. No include guards are added. + + A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This + can be used to insert complex macros or include guards:: + + def configure(conf): + conf.env.WAF_CONFIG_H_PRELUDE = '#include \\n' + conf.write_config_header('config.h') + + :param defines: write the defines values + :type defines: bool + :param headers: write include entries for each element in self.env.INCKEYS + :type headers: bool + :type define_prefix: string + :param define_prefix: prefix all the defines with a particular prefix + :return: the contents of a ``config.h`` file + :rtype: string + """ + lst = [] + if self.env.WAF_CONFIG_H_PRELUDE: lst.append(self.env.WAF_CONFIG_H_PRELUDE) + if headers: for x in self.env[INCKEYS]: - lst.append('#include <%s>'%x) + lst.append('#include <%s>' % x) + if defines: - tbl={} + tbl = {} for k in self.env.DEFINES: - a,_,b=k.partition('=') - tbl[a]=b + a, _, b = k.partition('=') + tbl[a] = b + for k in self.env[DEFKEYS]: - caption=self.get_define_comment(k) + caption = self.get_define_comment(k) if caption: - caption=' /* %s */'%caption + caption = ' /* %s */' % caption try: - txt='#define %s%s %s%s'%(define_prefix,k,tbl[k],caption) + txt = '#define %s%s %s%s' % (define_prefix, k, tbl[k], caption) except KeyError: - txt='/* #undef %s%s */%s'%(define_prefix,k,caption) + txt = '/* #undef %s%s */%s' % (define_prefix, k, caption) lst.append(txt) - return"\n".join(lst) + return "\n".join(lst) + @conf def cc_add_flags(conf): - conf.add_os_flags('CPPFLAGS',dup=False) - conf.add_os_flags('CFLAGS',dup=False) + """ + Adds CFLAGS / CPPFLAGS from os.environ to conf.env + """ + conf.add_os_flags('CPPFLAGS', dup=False) + conf.add_os_flags('CFLAGS', dup=False) + @conf def cxx_add_flags(conf): - conf.add_os_flags('CPPFLAGS',dup=False) - conf.add_os_flags('CXXFLAGS',dup=False) + """ + Adds CXXFLAGS / CPPFLAGS from os.environ to conf.env + """ + conf.add_os_flags('CPPFLAGS', dup=False) + conf.add_os_flags('CXXFLAGS', dup=False) + @conf def link_add_flags(conf): - conf.add_os_flags('LINKFLAGS',dup=False) - conf.add_os_flags('LDFLAGS',dup=False) + """ + Adds LINKFLAGS / LDFLAGS from os.environ to conf.env + """ + conf.add_os_flags('LINKFLAGS', dup=False) + conf.add_os_flags('LDFLAGS', dup=False) + @conf def cc_load_tools(conf): + """ + Loads the Waf c extensions + """ if not conf.env.DEST_OS: - conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.env.DEST_OS = Utils.unversioned_sys_platform() conf.load('c') + @conf def cxx_load_tools(conf): + """ + Loads the Waf c++ extensions + """ if not conf.env.DEST_OS: - conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.env.DEST_OS = Utils.unversioned_sys_platform() conf.load('cxx') + @conf -def get_cc_version(conf,cc,gcc=False,icc=False,clang=False): - cmd=cc+['-dM','-E','-'] - env=conf.env.env or None +def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): + """ + Runs the preprocessor to determine the gcc/icc/clang version + + The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env* + + :raise: :py:class:`waflib.Errors.ConfigurationError` + """ + cmd = cc + ['-dM', '-E', '-'] + env = conf.env.env or None try: - out,err=conf.cmd_and_log(cmd,output=0,input='\n'.encode(),env=env) + out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env) except Errors.WafError: - conf.fatal('Could not determine the compiler version %r'%cmd) + conf.fatal('Could not determine the compiler version %r' % cmd) + if gcc: - if out.find('__INTEL_COMPILER')>=0: + if out.find('__INTEL_COMPILER') >= 0: conf.fatal('The intel compiler pretends to be gcc') - if out.find('__GNUC__')<0 and out.find('__clang__')<0: + if out.find('__GNUC__') < 0 and out.find('__clang__') < 0: conf.fatal('Could not determine the compiler type') - if icc and out.find('__INTEL_COMPILER')<0: + + if icc and out.find('__INTEL_COMPILER') < 0: conf.fatal('Not icc/icpc') - if clang and out.find('__clang__')<0: + + if clang and out.find('__clang__') < 0: conf.fatal('Not clang/clang++') - if not clang and out.find('__clang__')>=0: + if not clang and out.find('__clang__') >= 0: conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') - k={} + + k = {} if icc or gcc or clang: - out=out.splitlines() + out = out.splitlines() for line in out: - lst=shlex.split(line) + lst = shlex.split(line) if len(lst)>2: - key=lst[1] - val=lst[2] - k[key]=val + key = lst[1] + val = lst[2] + k[key] = val + def isD(var): return var in k + + # Some documentation is available at http://predef.sourceforge.net + # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. if not conf.env.DEST_OS: - conf.env.DEST_OS='' + conf.env.DEST_OS = '' for i in MACRO_TO_DESTOS: if isD(i): - conf.env.DEST_OS=MACRO_TO_DESTOS[i] + conf.env.DEST_OS = MACRO_TO_DESTOS[i] break else: - if isD('__APPLE__')and isD('__MACH__'): - conf.env.DEST_OS='darwin' - elif isD('__unix__'): - conf.env.DEST_OS='generic' + if isD('__APPLE__') and isD('__MACH__'): + conf.env.DEST_OS = 'darwin' + elif isD('__unix__'): # unix must be tested last as it's a generic fallback + conf.env.DEST_OS = 'generic' + if isD('__ELF__'): - conf.env.DEST_BINFMT='elf' - elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'): - conf.env.DEST_BINFMT='pe' + conf.env.DEST_BINFMT = 'elf' + elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'): + conf.env.DEST_BINFMT = 'pe' if not conf.env.IMPLIBDIR: - conf.env.IMPLIBDIR=conf.env.LIBDIR - conf.env.LIBDIR=conf.env.BINDIR + conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files + conf.env.LIBDIR = conf.env.BINDIR elif isD('__APPLE__'): - conf.env.DEST_BINFMT='mac-o' + conf.env.DEST_BINFMT = 'mac-o' + if not conf.env.DEST_BINFMT: - conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS) + # Infer the binary format from the os name. + conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS) + for i in MACRO_TO_DEST_CPU: if isD(i): - conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i] + conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i] break - Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')])) + + Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')])) if icc: - ver=k['__INTEL_COMPILER'] - conf.env.CC_VERSION=(ver[:-2],ver[-2],ver[-1]) + ver = k['__INTEL_COMPILER'] + conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1]) else: - if isD('__clang__')and isD('__clang_major__'): - conf.env.CC_VERSION=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__']) + if isD('__clang__') and isD('__clang_major__'): + conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) else: - conf.env.CC_VERSION=(k['__GNUC__'],k['__GNUC_MINOR__'],k.get('__GNUC_PATCHLEVEL__','0')) + # older clang versions and gcc + conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0')) return k + @conf -def get_xlc_version(conf,cc): - cmd=cc+['-qversion'] +def get_xlc_version(conf, cc): + """ + Returns the Aix compiler version + + :raise: :py:class:`waflib.Errors.ConfigurationError` + """ + cmd = cc + ['-qversion'] try: - out,err=conf.cmd_and_log(cmd,output=0) + out, err = conf.cmd_and_log(cmd, output=0) except Errors.WafError: - conf.fatal('Could not find xlc %r'%cmd) - for v in(r"IBM XL C/C\+\+.* V(?P\d*)\.(?P\d*)",): - version_re=re.compile(v,re.I).search - match=version_re(out or err) + conf.fatal('Could not find xlc %r' % cmd) + + # the intention is to catch the 8.0 in "IBM XL C/C++ Enterprise Edition V8.0 for AIX..." + for v in (r"IBM XL C/C\+\+.* V(?P\d*)\.(?P\d*)",): + version_re = re.compile(v, re.I).search + match = version_re(out or err) if match: - k=match.groupdict() - conf.env.CC_VERSION=(k['major'],k['minor']) + k = match.groupdict() + conf.env.CC_VERSION = (k['major'], k['minor']) break else: conf.fatal('Could not determine the XLC version.') + @conf -def get_suncc_version(conf,cc): - cmd=cc+['-V'] +def get_suncc_version(conf, cc): + """ + Returns the Sun compiler version + + :raise: :py:class:`waflib.Errors.ConfigurationError` + """ + cmd = cc + ['-V'] try: - out,err=conf.cmd_and_log(cmd,output=0) + out, err = conf.cmd_and_log(cmd, output=0) except Errors.WafError as e: - if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')): - conf.fatal('Could not find suncc %r'%cmd) - out=e.stdout - err=e.stderr - version=(out or err) - version=version.splitlines()[0] - version_re=re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P\d*)\.(?P\d*)',re.I).search - match=version_re(version) + # Older versions of the compiler exit with non-zero status when reporting their version + if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')): + conf.fatal('Could not find suncc %r' % cmd) + out = e.stdout + err = e.stderr + + version = (out or err) + version = version.splitlines()[0] + + # cc: Sun C 5.10 SunOS_i386 2009/06/03 + # cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17 + # cc: WorkShop Compilers 5.0 98/12/15 C 5.0 + version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P\d*)\.(?P\d*)', re.I).search + match = version_re(version) if match: - k=match.groupdict() - conf.env.CC_VERSION=(k['major'],k['minor']) + k = match.groupdict() + conf.env.CC_VERSION = (k['major'], k['minor']) else: conf.fatal('Could not determine the suncc version.') + +# ============ the --as-needed flag should added during the configuration, not at runtime ========= + @conf def add_as_needed(self): - if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME): - self.env.append_unique('LINKFLAGS','-Wl,--as-needed') + """ + Adds ``--as-needed`` to the *LINKFLAGS* + On some platforms, it is a default flag. In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag. + """ + if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME): + self.env.append_unique('LINKFLAGS', '-Wl,--as-needed') + +# ============ parallel configuration + class cfgtask(Task.Task): - def __init__(self,*k,**kw): - Task.Task.__init__(self,*k,**kw) - self.run_after=set() + """ + A task that executes build configuration tests (calls conf.check) + + Make sure to use locks if concurrent access to the same conf.env data is necessary. + """ + def __init__(self, *k, **kw): + Task.Task.__init__(self, *k, **kw) + self.run_after = set() + def display(self): - return'' + return '' + def runnable_status(self): for x in self.run_after: if not x.hasrun: return Task.ASK_LATER return Task.RUN_ME + def uid(self): return Utils.SIG_NIL + def signature(self): return Utils.SIG_NIL + def run(self): - conf=self.conf - bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath()) - bld.env=conf.env + conf = self.conf + bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath()) + bld.env = conf.env bld.init_dirs() - bld.in_msg=1 - bld.logger=self.logger - bld.multicheck_task=self - args=self.args + bld.in_msg = 1 # suppress top-level start_msg + bld.logger = self.logger + bld.multicheck_task = self + args = self.args try: - if'func'in args: - bld.test(build_fun=args['func'],msg=args.get('msg',''),okmsg=args.get('okmsg',''),errmsg=args.get('errmsg',''),) + if 'func' in args: + bld.test(build_fun=args['func'], + msg=args.get('msg', ''), + okmsg=args.get('okmsg', ''), + errmsg=args.get('errmsg', ''), + ) else: - args['multicheck_mandatory']=args.get('mandatory',True) - args['mandatory']=True + args['multicheck_mandatory'] = args.get('mandatory', True) + args['mandatory'] = True try: bld.check(**args) finally: - args['mandatory']=args['multicheck_mandatory'] + args['mandatory'] = args['multicheck_mandatory'] except Exception: return 1 + def process(self): Task.Task.process(self) - if'msg'in self.args: + if 'msg' in self.args: with self.generator.bld.multicheck_lock: self.conf.start_msg(self.args['msg']) - if self.hasrun==Task.NOT_RUN: - self.conf.end_msg('test cancelled','YELLOW') - elif self.hasrun!=Task.SUCCESS: - self.conf.end_msg(self.args.get('errmsg','no'),'YELLOW') + if self.hasrun == Task.NOT_RUN: + self.conf.end_msg('test cancelled', 'YELLOW') + elif self.hasrun != Task.SUCCESS: + self.conf.end_msg(self.args.get('errmsg', 'no'), 'YELLOW') else: - self.conf.end_msg(self.args.get('okmsg','yes'),'GREEN') + self.conf.end_msg(self.args.get('okmsg', 'yes'), 'GREEN') + @conf -def multicheck(self,*k,**kw): - self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw) - for var in('DEFINES',DEFKEYS): - self.env.append_value(var,[]) - self.env.DEFINE_COMMENTS=self.env.DEFINE_COMMENTS or{} +def multicheck(self, *k, **kw): + """ + Runs configuration tests in parallel; results are printed sequentially at the end of the build + but each test must provide its own msg value to display a line:: + + def test_build(ctx): + ctx.in_msg = True # suppress console outputs + ctx.check_large_file(mandatory=False) + + conf.multicheck( + {'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False}, + {'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False}, + {'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'}, + {'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'}, + msg = 'Checking for headers in parallel', + mandatory = True, # mandatory tests raise an error at the end + run_all_tests = True, # try running all tests + ) + + The configuration tests may modify the values in conf.env in any order, and the define + values can affect configuration tests being executed. It is hence recommended + to provide `uselib_store` values with `global_define=False` to prevent such issues. + """ + self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw) + + # Force a copy so that threads append to the same list at least + # no order is guaranteed, but the values should not disappear at least + for var in ('DEFINES', DEFKEYS): + self.env.append_value(var, []) + self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {} + + # define a task object that will execute our tests class par(object): def __init__(self): - self.keep=False - self.task_sigs={} - self.progress_bar=0 + self.keep = False + self.task_sigs = {} + self.progress_bar = 0 def total(self): return len(tasks) - def to_log(self,*k,**kw): + def to_log(self, *k, **kw): return - bld=par() - bld.keep=kw.get('run_all_tests',True) - bld.imp_sigs={} - tasks=[] - id_to_task={} + + bld = par() + bld.keep = kw.get('run_all_tests', True) + bld.imp_sigs = {} + tasks = [] + + id_to_task = {} for dct in k: - x=Task.classes['cfgtask'](bld=bld,env=None) + x = Task.classes['cfgtask'](bld=bld, env=None) tasks.append(x) - x.args=dct - x.bld=bld - x.conf=self - x.args=dct - x.logger=Logs.make_mem_logger(str(id(x)),self.logger) - if'id'in dct: - id_to_task[dct['id']]=x + x.args = dct + x.bld = bld + x.conf = self + x.args = dct + + # bind a logger that will keep the info in memory + x.logger = Logs.make_mem_logger(str(id(x)), self.logger) + + if 'id' in dct: + id_to_task[dct['id']] = x + + # second pass to set dependencies with after_test/before_test for x in tasks: - for key in Utils.to_list(x.args.get('before_tests',[])): - tsk=id_to_task[key] + for key in Utils.to_list(x.args.get('before_tests', [])): + tsk = id_to_task[key] if not tsk: - raise ValueError('No test named %r'%key) + raise ValueError('No test named %r' % key) tsk.run_after.add(x) - for key in Utils.to_list(x.args.get('after_tests',[])): - tsk=id_to_task[key] + for key in Utils.to_list(x.args.get('after_tests', [])): + tsk = id_to_task[key] if not tsk: - raise ValueError('No test named %r'%key) + raise ValueError('No test named %r' % key) x.run_after.add(tsk) + def it(): yield tasks while 1: - yield[] - bld.producer=p=Runner.Parallel(bld,Options.options.jobs) - bld.multicheck_lock=Utils.threading.Lock() - p.biter=it() + yield [] + bld.producer = p = Runner.Parallel(bld, Options.options.jobs) + bld.multicheck_lock = Utils.threading.Lock() + p.biter = it() + self.end_msg('started') p.start() + + # flush the logs in order into the config.log for x in tasks: x.logger.memhandler.flush() + self.start_msg('-> processing test results') if p.error: for x in p.error: - if getattr(x,'err_msg',None): + if getattr(x, 'err_msg', None): self.to_log(x.err_msg) - self.end_msg('fail',color='RED') + self.end_msg('fail', color='RED') raise Errors.WafError('There is an error in the library, read config.log for more information') - failure_count=0 + + failure_count = 0 for x in tasks: - if x.hasrun not in(Task.SUCCESS,Task.NOT_RUN): - failure_count+=1 + if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN): + failure_count += 1 + if failure_count: - self.end_msg(kw.get('errmsg','%s test failed'%failure_count),color='YELLOW',**kw) + self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw) else: - self.end_msg('all ok',**kw) + self.end_msg('all ok', **kw) + for x in tasks: - if x.hasrun!=Task.SUCCESS: - if x.args.get('mandatory',True): - self.fatal(kw.get('fatalmsg')or'One of the tests has failed, read config.log for more information') + if x.hasrun != Task.SUCCESS: + if x.args.get('mandatory', True): + self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information') + @conf -def check_gcc_o_space(self,mode='c'): - if int(self.env.CC_VERSION[0])>4: +def check_gcc_o_space(self, mode='c'): + if int(self.env.CC_VERSION[0]) > 4: + # this is for old compilers return self.env.stash() - if mode=='c': - self.env.CCLNK_TGT_F=['-o',''] - elif mode=='cxx': - self.env.CXXLNK_TGT_F=['-o',''] - features='%s %sshlib'%(mode,mode) + if mode == 'c': + self.env.CCLNK_TGT_F = ['-o', ''] + elif mode == 'cxx': + self.env.CXXLNK_TGT_F = ['-o', ''] + features = '%s %sshlib' % (mode, mode) try: - self.check(msg='Checking if the -o link must be split from arguments',fragment=SNIP_EMPTY_PROGRAM,features=features) + self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features) except self.errors.ConfigurationError: self.env.revert() else: self.env.commit() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/ccroot.py lilv-0.24.6/waflib/Tools/ccroot.py --- lilv-0.24.4~dfsg0/waflib/Tools/ccroot.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/ccroot.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,220 +1,383 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) -import os,re -from waflib import Task,Utils,Node,Errors,Logs -from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension -from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests +""" +Classes and methods shared by tools providing support for C-like language such +as C/C++/D/Assembly/Go (this support module is almost never used alone). +""" + +import os, re +from waflib import Task, Utils, Node, Errors, Logs +from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension +from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests from waflib.Configure import conf -SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] -USELIB_VARS=Utils.defaultdict(set) -USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) -USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) -USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) -USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) -USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) -USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) -USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) -USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) -USELIB_VARS['asm']=set(['ASFLAGS']) + +SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib'] + +USELIB_VARS = Utils.defaultdict(set) +""" +Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`. +""" + +USELIB_VARS['c'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH']) +USELIB_VARS['cxx'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH']) +USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS']) +USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH']) + +USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS']) +USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS']) +USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS']) + +USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) +USELIB_VARS['dshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) +USELIB_VARS['dstlib'] = set(['ARFLAGS', 'LINKDEPS']) + +USELIB_VARS['asm'] = set(['ASFLAGS']) + +# ================================================================================================= + @taskgen_method -def create_compiled_task(self,name,node): - out='%s.%d.o'%(node.name,self.idx) - task=self.create_task(name,node,node.parent.find_or_declare(out)) +def create_compiled_task(self, name, node): + """ + Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension). + The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link` + + :param name: name of the task class + :type name: string + :param node: the file to compile + :type node: :py:class:`waflib.Node.Node` + :return: The task created + :rtype: :py:class:`waflib.Task.Task` + """ + out = '%s.%d.o' % (node.name, self.idx) + task = self.create_task(name, node, node.parent.find_or_declare(out)) try: self.compiled_tasks.append(task) except AttributeError: - self.compiled_tasks=[task] + self.compiled_tasks = [task] return task + @taskgen_method -def to_incnodes(self,inlst): - lst=[] - seen=set() +def to_incnodes(self, inlst): + """ + Task generator method provided to convert a list of string/nodes into a list of includes folders. + + The paths are assumed to be relative to the task generator path, except if they begin by **#** + in which case they are searched from the top-level directory (``bld.srcnode``). + The folders are simply assumed to be existing. + + The node objects in the list are returned in the output list. The strings are converted + into node objects if possible. The node is searched from the source directory, and if a match is found, + the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored. + + :param inlst: list of folders + :type inlst: space-delimited string or a list of string/nodes + :rtype: list of :py:class:`waflib.Node.Node` + :return: list of include folders as nodes + """ + lst = [] + seen = set() for x in self.to_list(inlst): if x in seen or not x: continue seen.add(x) - if isinstance(x,Node.Node): + + # with a real lot of targets, it is sometimes interesting to cache the results below + if isinstance(x, Node.Node): lst.append(x) else: if os.path.isabs(x): - lst.append(self.bld.root.make_node(x)or x) + lst.append(self.bld.root.make_node(x) or x) else: - if x[0]=='#': - p=self.bld.bldnode.make_node(x[1:]) - v=self.bld.srcnode.make_node(x[1:]) + if x[0] == '#': + p = self.bld.bldnode.make_node(x[1:]) + v = self.bld.srcnode.make_node(x[1:]) else: - p=self.path.get_bld().make_node(x) - v=self.path.make_node(x) + p = self.path.get_bld().make_node(x) + v = self.path.make_node(x) if p.is_child_of(self.bld.bldnode): p.mkdir() lst.append(p) lst.append(v) return lst -@feature('c','cxx','d','asm','fc','includes') -@after_method('propagate_uselib_vars','process_source') + +@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes') +@after_method('propagate_uselib_vars', 'process_source') def apply_incpaths(self): - lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env.INCLUDES) - self.includes_nodes=lst - cwd=self.get_cwd() - self.env.INCPATHS=[x.path_from(cwd)for x in lst] + """ + Task generator method that processes the attribute *includes*:: + + tg = bld(features='includes', includes='.') + + The folders only need to be relative to the current directory, the equivalent build directory is + added automatically (for headers created in the build directory). This enables using a build directory + or not (``top == out``). + + This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``, + and the list of include paths in ``tg.env.INCLUDES``. + """ + + lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES) + self.includes_nodes = lst + cwd = self.get_cwd() + self.env.INCPATHS = [x.path_from(cwd) for x in lst] + class link_task(Task.Task): - color='YELLOW' - weight=3 - inst_to=None - chmod=Utils.O755 - def add_target(self,target): - if isinstance(target,str): - base=self.generator.path + """ + Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`. + + .. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib + """ + color = 'YELLOW' + + weight = 3 + """Try to process link tasks as early as possible""" + + inst_to = None + """Default installation path for the link task outputs, or None to disable""" + + chmod = Utils.O755 + """Default installation mode for the link task outputs""" + + def add_target(self, target): + """ + Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*. + The settings are retrieved from ``env.clsname_PATTERN`` + """ + if isinstance(target, str): + base = self.generator.path if target.startswith('#'): - target=target[1:] - base=self.generator.bld.bldnode - pattern=self.env[self.__class__.__name__+'_PATTERN'] + # for those who like flat structures + target = target[1:] + base = self.generator.bld.bldnode + + pattern = self.env[self.__class__.__name__ + '_PATTERN'] if not pattern: - pattern='%s' - folder,name=os.path.split(target) - if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None): - nums=self.generator.vnum.split('.') - if self.env.DEST_BINFMT=='pe': - name=name+'-'+nums[0] - elif self.env.DEST_OS=='openbsd': - pattern='%s.%s'%(pattern,nums[0]) - if len(nums)>=2: - pattern+='.%s'%nums[1] + pattern = '%s' + folder, name = os.path.split(target) + + if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None): + nums = self.generator.vnum.split('.') + if self.env.DEST_BINFMT == 'pe': + # include the version in the dll file name, + # the import lib file name stays unversioned. + name = name + '-' + nums[0] + elif self.env.DEST_OS == 'openbsd': + pattern = '%s.%s' % (pattern, nums[0]) + if len(nums) >= 2: + pattern += '.%s' % nums[1] + if folder: - tmp=folder+os.sep+pattern%name + tmp = folder + os.sep + pattern % name else: - tmp=pattern%name - target=base.find_or_declare(tmp) + tmp = pattern % name + target = base.find_or_declare(tmp) self.set_outputs(target) - def exec_command(self,*k,**kw): - ret=super(link_task,self).exec_command(*k,**kw) + + def exec_command(self, *k, **kw): + ret = super(link_task, self).exec_command(*k, **kw) if not ret and self.env.DO_MANIFEST: - ret=self.exec_mf() + ret = self.exec_mf() return ret + def exec_mf(self): + """ + Create manifest files for VS-like compilers (msvc, ifort, ...) + """ if not self.env.MT: return 0 - manifest=None + + manifest = None for out_node in self.outputs: if out_node.name.endswith('.manifest'): - manifest=out_node.abspath() + manifest = out_node.abspath() break else: + # Should never get here. If we do, it means the manifest file was + # never added to the outputs list, thus we don't have a manifest file + # to embed, so we just return. return 0 - mode='' + + # embedding mode. Different for EXE's and DLL's. + # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx + mode = '' for x in Utils.to_list(self.generator.features): - if x in('cprogram','cxxprogram','fcprogram','fcprogram_test'): - mode=1 - elif x in('cshlib','cxxshlib','fcshlib'): - mode=2 - Logs.debug('msvc: embedding manifest in mode %r',mode) - lst=[]+self.env.MT + if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'): + mode = 1 + elif x in ('cshlib', 'cxxshlib', 'fcshlib'): + mode = 2 + + Logs.debug('msvc: embedding manifest in mode %r', mode) + + lst = [] + self.env.MT lst.extend(Utils.to_list(self.env.MTFLAGS)) - lst.extend(['-manifest',manifest]) - lst.append('-outputresource:%s;%s'%(self.outputs[0].abspath(),mode)) - return super(link_task,self).exec_command(lst) + lst.extend(['-manifest', manifest]) + lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode)) + + return super(link_task, self).exec_command(lst) + class stlink_task(link_task): - run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' - chmod=Utils.O644 + """ + Base for static link tasks, which use *ar* most of the time. + The target is always removed before being written. + """ + run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' + + chmod = Utils.O644 + """Default installation mode for the static libraries""" + def rm_tgt(cls): - old=cls.run + old = cls.run def wrap(self): try: os.remove(self.outputs[0].abspath()) except OSError: pass return old(self) - setattr(cls,'run',wrap) + setattr(cls, 'run', wrap) rm_tgt(stlink_task) -@feature('c','cxx','d','fc','asm') + +@feature('skip_stlib_link_deps') +@before_method('process_use') +def apply_skip_stlib_link_deps(self): + """ + This enables an optimization in the :py:func:wafilb.Tools.ccroot.processes_use: method that skips dependency and + link flag optimizations for targets that generate static libraries (via the :py:class:Tools.ccroot.stlink_task task). + The actual behavior is implemented in :py:func:wafilb.Tools.ccroot.processes_use: method so this feature only tells waf + to enable the new behavior. + """ + self.env.SKIP_STLIB_LINK_DEPS = True + +@feature('c', 'cxx', 'd', 'fc', 'asm') @after_method('process_source') def apply_link(self): + """ + Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and + use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task + matching a name from the attribute *features*, for example:: + + def build(bld): + tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app') + + will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram` + """ + for x in self.features: - if x=='cprogram'and'cxx'in self.features: - x='cxxprogram' - elif x=='cshlib'and'cxx'in self.features: - x='cxxshlib' + if x == 'cprogram' and 'cxx' in self.features: # limited compat + x = 'cxxprogram' + elif x == 'cshlib' and 'cxx' in self.features: + x = 'cxxshlib' + if x in Task.classes: - if issubclass(Task.classes[x],link_task): - link=x + if issubclass(Task.classes[x], link_task): + link = x break else: return - objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])] - self.link_task=self.create_task(link,objs) + + objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])] + self.link_task = self.create_task(link, objs) self.link_task.add_target(self.target) + + # remember that the install paths are given by the task generators try: - inst_to=self.install_path + inst_to = self.install_path except AttributeError: - inst_to=self.link_task.inst_to + inst_to = self.link_task.inst_to if inst_to: - self.install_task=self.add_install_files(install_to=inst_to,install_from=self.link_task.outputs[:],chmod=self.link_task.chmod,task=self.link_task) + # install a copy of the node list we have at this moment (implib not added) + self.install_task = self.add_install_files( + install_to=inst_to, install_from=self.link_task.outputs[:], + chmod=self.link_task.chmod, task=self.link_task) + @taskgen_method -def use_rec(self,name,**kw): +def use_rec(self, name, **kw): + """ + Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use`` + """ + if name in self.tmp_use_not or name in self.tmp_use_seen: return + try: - y=self.bld.get_tgen_by_name(name) + y = self.bld.get_tgen_by_name(name) except Errors.WafError: self.uselib.append(name) self.tmp_use_not.add(name) return + self.tmp_use_seen.append(name) y.post() - y.tmp_use_objects=objects=kw.get('objects',True) - y.tmp_use_stlib=stlib=kw.get('stlib',True) + + # bind temporary attributes on the task generator + y.tmp_use_objects = objects = kw.get('objects', True) + y.tmp_use_stlib = stlib = kw.get('stlib', True) try: - link_task=y.link_task + link_task = y.link_task except AttributeError: - y.tmp_use_var='' + y.tmp_use_var = '' else: - objects=False - if not isinstance(link_task,stlink_task): - stlib=False - y.tmp_use_var='LIB' + objects = False + if not isinstance(link_task, stlink_task): + stlib = False + y.tmp_use_var = 'LIB' else: - y.tmp_use_var='STLIB' - p=self.tmp_use_prec - for x in self.to_list(getattr(y,'use',[])): - if self.env["STLIB_"+x]: + y.tmp_use_var = 'STLIB' + + p = self.tmp_use_prec + for x in self.to_list(getattr(y, 'use', [])): + if self.env["STLIB_" + x]: continue try: p[x].append(name) except KeyError: - p[x]=[name] - self.use_rec(x,objects=objects,stlib=stlib) -@feature('c','cxx','d','use','fc') -@before_method('apply_incpaths','propagate_uselib_vars') -@after_method('apply_link','process_source') + p[x] = [name] + self.use_rec(x, objects=objects, stlib=stlib) + +@feature('c', 'cxx', 'd', 'use', 'fc') +@before_method('apply_incpaths', 'propagate_uselib_vars') +@after_method('apply_link', 'process_source') def process_use(self): - use_not=self.tmp_use_not=set() - self.tmp_use_seen=[] - use_prec=self.tmp_use_prec={} - self.uselib=self.to_list(getattr(self,'uselib',[])) - self.includes=self.to_list(getattr(self,'includes',[])) - names=self.to_list(getattr(self,'use',[])) + """ + Process the ``use`` attribute which contains a list of task generator names:: + + def build(bld): + bld.shlib(source='a.c', target='lib1') + bld.program(source='main.c', target='app', use='lib1') + + See :py:func:`waflib.Tools.ccroot.use_rec`. + """ + + use_not = self.tmp_use_not = set() + self.tmp_use_seen = [] # we would like an ordered set + use_prec = self.tmp_use_prec = {} + self.uselib = self.to_list(getattr(self, 'uselib', [])) + self.includes = self.to_list(getattr(self, 'includes', [])) + names = self.to_list(getattr(self, 'use', [])) + for x in names: self.use_rec(x) + for x in use_not: if x in use_prec: del use_prec[x] - out=self.tmp_use_sorted=[] - tmp=[] + + # topological sort + out = self.tmp_use_sorted = [] + tmp = [] for x in self.tmp_use_seen: for k in use_prec.values(): if x in k: break else: tmp.append(x) + while tmp: - e=tmp.pop() + e = tmp.pop() out.append(e) try: - nlst=use_prec[e] + nlst = use_prec[e] except KeyError: pass else: @@ -226,254 +389,403 @@ else: tmp.append(x) if use_prec: - raise Errors.WafError('Cycle detected in the use processing %r'%use_prec) + raise Errors.WafError('Cycle detected in the use processing %r' % use_prec) out.reverse() - link_task=getattr(self,'link_task',None) + + link_task = getattr(self, 'link_task', None) for x in out: - y=self.bld.get_tgen_by_name(x) - var=y.tmp_use_var + y = self.bld.get_tgen_by_name(x) + var = y.tmp_use_var if var and link_task: - if var=='LIB'or y.tmp_use_stlib or x in names: - self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]]) + if self.env.SKIP_STLIB_LINK_DEPS and isinstance(link_task, stlink_task): + # If the skip_stlib_link_deps feature is enabled then we should + # avoid adding lib deps to the stlink_task instance. + pass + elif var == 'LIB' or y.tmp_use_stlib or x in names: + self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]]) self.link_task.dep_nodes.extend(y.link_task.outputs) - tmp_path=y.link_task.outputs[0].parent.path_from(self.get_cwd()) - self.env.append_unique(var+'PATH',[tmp_path]) + tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd()) + self.env.append_unique(var + 'PATH', [tmp_path]) else: if y.tmp_use_objects: self.add_objects_from_tgen(y) - if getattr(y,'export_includes',None): - self.includes=self.includes+y.to_incnodes(y.export_includes) - if getattr(y,'export_defines',None): - self.env.append_value('DEFINES',self.to_list(y.export_defines)) + + if getattr(y, 'export_includes', None): + # self.includes may come from a global variable #2035 + self.includes = self.includes + y.to_incnodes(y.export_includes) + + if getattr(y, 'export_defines', None): + self.env.append_value('DEFINES', self.to_list(y.export_defines)) + + + # and finally, add the use variables (no recursion needed) for x in names: try: - y=self.bld.get_tgen_by_name(x) + y = self.bld.get_tgen_by_name(x) except Errors.WafError: - if not self.env['STLIB_'+x]and not x in self.uselib: + if not self.env['STLIB_' + x] and not x in self.uselib: self.uselib.append(x) else: - for k in self.to_list(getattr(y,'use',[])): - if not self.env['STLIB_'+k]and not k in self.uselib: + for k in self.to_list(getattr(y, 'use', [])): + if not self.env['STLIB_' + k] and not k in self.uselib: self.uselib.append(k) + @taskgen_method -def accept_node_to_link(self,node): +def accept_node_to_link(self, node): + """ + PRIVATE INTERNAL USE ONLY + """ return not node.name.endswith('.pdb') + @taskgen_method -def add_objects_from_tgen(self,tg): +def add_objects_from_tgen(self, tg): + """ + Add the objects from the depending compiled tasks as link task inputs. + + Some objects are filtered: for instance, .pdb files are added + to the compiled tasks but not to the link tasks (to avoid errors) + PRIVATE INTERNAL USE ONLY + """ try: - link_task=self.link_task + link_task = self.link_task except AttributeError: pass else: - for tsk in getattr(tg,'compiled_tasks',[]): + for tsk in getattr(tg, 'compiled_tasks', []): for x in tsk.outputs: if self.accept_node_to_link(x): link_task.inputs.append(x) + @taskgen_method def get_uselib_vars(self): - _vars=set() + """ + :return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`) + :rtype: list of string + """ + _vars = set() for x in self.features: if x in USELIB_VARS: - _vars|=USELIB_VARS[x] + _vars |= USELIB_VARS[x] return _vars -@feature('c','cxx','d','fc','javac','cs','uselib','asm') + +@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm') @after_method('process_use') def propagate_uselib_vars(self): - _vars=self.get_uselib_vars() - env=self.env - app=env.append_value - feature_uselib=self.features+self.to_list(getattr(self,'uselib',[])) + """ + Process uselib variables for adding flags. For example, the following target:: + + def build(bld): + bld.env.AFLAGS_aaa = ['bar'] + from waflib.Tools.ccroot import USELIB_VARS + USELIB_VARS['aaa'] = ['AFLAGS'] + + tg = bld(features='aaa', aflags='test') + + The *aflags* attribute will be processed and this method will set:: + + tg.env.AFLAGS = ['bar', 'test'] + """ + _vars = self.get_uselib_vars() + env = self.env + app = env.append_value + feature_uselib = self.features + self.to_list(getattr(self, 'uselib', [])) for var in _vars: - y=var.lower() - val=getattr(self,y,[]) + y = var.lower() + val = getattr(self, y, []) if val: - app(var,self.to_list(val)) + app(var, self.to_list(val)) + for x in feature_uselib: - val=env['%s_%s'%(var,x)] + val = env['%s_%s' % (var, x)] if val: - app(var,val) -@feature('cshlib','cxxshlib','fcshlib') + app(var, val) + +# ============ the code above must not know anything about import libs ========== + +@feature('cshlib', 'cxxshlib', 'fcshlib') @after_method('apply_link') def apply_implib(self): - if not self.env.DEST_BINFMT=='pe': + """ + Handle dlls and their import libs on Windows-like systems. + + A ``.dll.a`` file called *import library* is generated. + It must be installed as it is required for linking the library. + """ + if not self.env.DEST_BINFMT == 'pe': return - dll=self.link_task.outputs[0] - if isinstance(self.target,Node.Node): - name=self.target.name + + dll = self.link_task.outputs[0] + if isinstance(self.target, Node.Node): + name = self.target.name else: - name=os.path.split(self.target)[1] - implib=self.env.implib_PATTERN%name - implib=dll.parent.find_or_declare(implib) - self.env.append_value('LINKFLAGS',self.env.IMPLIB_ST%implib.bldpath()) + name = os.path.split(self.target)[1] + implib = self.env.implib_PATTERN % name + implib = dll.parent.find_or_declare(implib) + self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath()) self.link_task.outputs.append(implib) - if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe': - node=self.path.find_resource(self.defs) + + if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe': + node = self.path.find_resource(self.defs) if not node: - raise Errors.WafError('invalid def file %r'%self.defs) + raise Errors.WafError('invalid def file %r' % self.defs) if self.env.def_PATTERN: - self.env.append_value('LINKFLAGS',self.env.def_PATTERN%node.path_from(self.get_cwd())) + self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd())) self.link_task.dep_nodes.append(node) else: + # gcc for windows takes *.def file as input without any special flag self.link_task.inputs.append(node) - if getattr(self,'install_task',None): + + # where to put the import library + if getattr(self, 'install_task', None): try: - inst_to=self.install_path_implib + # user has given a specific installation path for the import library + inst_to = self.install_path_implib except AttributeError: try: - inst_to=self.install_path + # user has given an installation path for the main library, put the import library in it + inst_to = self.install_path except AttributeError: - inst_to='${IMPLIBDIR}' - self.install_task.install_to='${BINDIR}' + # else, put the library in BINDIR and the import library in LIBDIR + inst_to = '${IMPLIBDIR}' + self.install_task.install_to = '${BINDIR}' if not self.env.IMPLIBDIR: - self.env.IMPLIBDIR=self.env.LIBDIR - self.implib_install_task=self.add_install_files(install_to=inst_to,install_from=implib,chmod=self.link_task.chmod,task=self.link_task) -re_vnum=re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$') -@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') -@after_method('apply_link','propagate_uselib_vars') + self.env.IMPLIBDIR = self.env.LIBDIR + self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib, + chmod=self.link_task.chmod, task=self.link_task) + +# ============ the code above must not know anything about vnum processing on unix platforms ========= + +re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$') +@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum') +@after_method('apply_link', 'propagate_uselib_vars') def apply_vnum(self): - if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'): + """ + Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots:: + + def build(bld): + bld.shlib(source='a.c', target='foo', vnum='14.15.16') + + In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created: + + * ``libfoo.so → libfoo.so.14.15.16`` + * ``libfoo.so.14 → libfoo.so.14.15.16`` + + By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter: + + def build(bld): + bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15') + + In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library. + + On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library. + """ + if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): return - link=self.link_task + + link = self.link_task if not re_vnum.match(self.vnum): - raise Errors.WafError('Invalid vnum %r for target %r'%(self.vnum,getattr(self,'name',self))) - nums=self.vnum.split('.') - node=link.outputs[0] - cnum=getattr(self,'cnum',str(nums[0])) - cnums=cnum.split('.') - if len(cnums)>len(nums)or nums[0:len(cnums)]!=cnums: - raise Errors.WafError('invalid compatibility version %s'%cnum) - libname=node.name + raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self))) + nums = self.vnum.split('.') + node = link.outputs[0] + + cnum = getattr(self, 'cnum', str(nums[0])) + cnums = cnum.split('.') + if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums: + raise Errors.WafError('invalid compatibility version %s' % cnum) + + libname = node.name if libname.endswith('.dylib'): - name3=libname.replace('.dylib','.%s.dylib'%self.vnum) - name2=libname.replace('.dylib','.%s.dylib'%cnum) + name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) + name2 = libname.replace('.dylib', '.%s.dylib' % cnum) else: - name3=libname+'.'+self.vnum - name2=libname+'.'+cnum + name3 = libname + '.' + self.vnum + name2 = libname + '.' + cnum + + # add the so name for the ld linker - to disable, just unset env.SONAME_ST if self.env.SONAME_ST: - v=self.env.SONAME_ST%name2 - self.env.append_value('LINKFLAGS',v.split()) - if self.env.DEST_OS!='openbsd': - outs=[node.parent.make_node(name3)] - if name2!=name3: + v = self.env.SONAME_ST % name2 + self.env.append_value('LINKFLAGS', v.split()) + + # the following task is just to enable execution from the build dir :-/ + if self.env.DEST_OS != 'openbsd': + outs = [node.parent.make_node(name3)] + if name2 != name3: outs.append(node.parent.make_node(name2)) - self.create_task('vnum',node,outs) - if getattr(self,'install_task',None): - self.install_task.hasrun=Task.SKIPPED - path=self.install_task.install_to - if self.env.DEST_OS=='openbsd': - libname=self.link_task.outputs[0].name - t1=self.add_install_as(install_to='%s/%s'%(path,libname),install_from=node,chmod=self.link_task.chmod) - self.vnum_install_task=(t1,) + self.create_task('vnum', node, outs) + + if getattr(self, 'install_task', None): + self.install_task.hasrun = Task.SKIPPED + self.install_task.no_errcheck_out = True + path = self.install_task.install_to + if self.env.DEST_OS == 'openbsd': + libname = self.link_task.outputs[0].name + t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod) + self.vnum_install_task = (t1,) else: - t1=self.add_install_as(install_to=path+os.sep+name3,install_from=node,chmod=self.link_task.chmod) - t3=self.add_symlink_as(install_to=path+os.sep+libname,install_from=name3) - if name2!=name3: - t2=self.add_symlink_as(install_to=path+os.sep+name2,install_from=name3) - self.vnum_install_task=(t1,t2,t3) + t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod) + t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3) + if name2 != name3: + t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3) + self.vnum_install_task = (t1, t2, t3) else: - self.vnum_install_task=(t1,t3) - if'-dynamiclib'in self.env.LINKFLAGS: + self.vnum_install_task = (t1, t3) + + if '-dynamiclib' in self.env.LINKFLAGS: + # this requires after(propagate_uselib_vars) try: - inst_to=self.install_path + inst_to = self.install_path except AttributeError: - inst_to=self.link_task.inst_to + inst_to = self.link_task.inst_to if inst_to: - p=Utils.subst_vars(inst_to,self.env) - path=os.path.join(p,name2) - self.env.append_value('LINKFLAGS',['-install_name',path]) - self.env.append_value('LINKFLAGS','-Wl,-compatibility_version,%s'%cnum) - self.env.append_value('LINKFLAGS','-Wl,-current_version,%s'%self.vnum) + p = Utils.subst_vars(inst_to, self.env) + path = os.path.join(p, name2) + self.env.append_value('LINKFLAGS', ['-install_name', path]) + self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum) + self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum) + class vnum(Task.Task): - color='CYAN' - ext_in=['.bin'] + """ + Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum` + """ + color = 'CYAN' + ext_in = ['.bin'] def keyword(self): - return'Symlinking' + return 'Symlinking' def run(self): for x in self.outputs: - path=x.abspath() + path = x.abspath() try: os.remove(path) except OSError: pass + try: - os.symlink(self.inputs[0].name,path) + os.symlink(self.inputs[0].name, path) except OSError: return 1 + class fake_shlib(link_task): + """ + Task used for reading a system library and adding the dependency on it + """ def runnable_status(self): for t in self.run_after: if not t.hasrun: return Task.ASK_LATER return Task.SKIP_ME + class fake_stlib(stlink_task): + """ + Task used for reading a system library and adding the dependency on it + """ def runnable_status(self): for t in self.run_after: if not t.hasrun: return Task.ASK_LATER return Task.SKIP_ME + @conf -def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]): - return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines) +def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]): + """ + Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes:: + + def build(bld): + bld.read_shlib('m') + bld.program(source='main.c', use='m') + """ + return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines) + @conf -def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]): - return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines) -lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],} +def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]): + """ + Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes. + """ + return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines) + +lib_patterns = { + 'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'], + 'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'], +} + @feature('fake_lib') def process_lib(self): - node=None - names=[x%self.name for x in lib_patterns[self.lib_type]] - for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS: - if not isinstance(x,Node.Node): - x=self.bld.root.find_node(x)or self.path.find_node(x) + """ + Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`. + """ + node = None + + names = [x % self.name for x in lib_patterns[self.lib_type]] + for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS: + if not isinstance(x, Node.Node): + x = self.bld.root.find_node(x) or self.path.find_node(x) if not x: continue + for y in names: - node=x.find_node(y) + node = x.find_node(y) if node: try: Utils.h_file(node.abspath()) except EnvironmentError: - raise ValueError('Could not read %r'%y) + raise ValueError('Could not read %r' % y) break else: continue break else: - raise Errors.WafError('could not find library %r'%self.name) - self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node]) - self.target=self.name + raise Errors.WafError('could not find library %r' % self.name) + self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node]) + self.target = self.name + + class fake_o(Task.Task): def runnable_status(self): return Task.SKIP_ME -@extension('.o','.obj') -def add_those_o_files(self,node): - tsk=self.create_task('fake_o',[],node) + +@extension('.o', '.obj') +def add_those_o_files(self, node): + tsk = self.create_task('fake_o', [], node) try: self.compiled_tasks.append(tsk) except AttributeError: - self.compiled_tasks=[tsk] + self.compiled_tasks = [tsk] + @feature('fake_obj') @before_method('process_source') def process_objs(self): + """ + Puts object files in the task generator outputs + """ for node in self.to_nodes(self.source): self.add_those_o_files(node) - self.source=[] + self.source = [] + @conf -def read_object(self,obj): - if not isinstance(obj,self.path.__class__): - obj=self.path.find_resource(obj) - return self(features='fake_obj',source=obj,name=obj.name) -@feature('cxxprogram','cprogram') -@after_method('apply_link','process_use') +def read_object(self, obj): + """ + Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes. + + :param obj: object file path, as string or Node + """ + if not isinstance(obj, self.path.__class__): + obj = self.path.find_resource(obj) + return self(features='fake_obj', source=obj, name=obj.name) + +@feature('cxxprogram', 'cprogram') +@after_method('apply_link', 'process_use') def set_full_paths_hpux(self): - if self.env.DEST_OS!='hp-ux': + """ + On hp-ux, extend the libpaths and static library paths to absolute paths + """ + if self.env.DEST_OS != 'hp-ux': return - base=self.bld.bldnode.abspath() - for var in['LIBPATH','STLIBPATH']: - lst=[] + base = self.bld.bldnode.abspath() + for var in ['LIBPATH', 'STLIBPATH']: + lst = [] for x in self.env[var]: if x.startswith('/'): lst.append(x) else: - lst.append(os.path.normpath(os.path.join(base,x))) - self.env[var]=lst + lst.append(os.path.normpath(os.path.join(base, x))) + self.env[var] = lst + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/clang.py lilv-0.24.6/waflib/Tools/clang.py --- lilv-0.24.4~dfsg0/waflib/Tools/clang.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/clang.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,17 +1,26 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Krzysztof Kosiński 2014 -from waflib.Tools import ccroot,ar,gcc +""" +Detect the Clang C compiler +""" + +from waflib.Tools import ccroot, ar, gcc from waflib.Configure import conf + @conf def find_clang(conf): - cc=conf.find_program('clang',var='CC') - conf.get_cc_version(cc,clang=True) - conf.env.CC_NAME='clang' + """ + Finds the program clang and executes it to ensure it really is clang + """ + cc = conf.find_program('clang', var='CC') + conf.get_cc_version(cc, clang=True) + conf.env.CC_NAME = 'clang' + def configure(conf): conf.find_clang() - conf.find_program(['llvm-ar','ar'],var='AR') + conf.find_program(['llvm-ar', 'ar'], var='AR') conf.find_ar() conf.gcc_common_flags() conf.gcc_modifier_platform() diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/clangxx.py lilv-0.24.6/waflib/Tools/clangxx.py --- lilv-0.24.4~dfsg0/waflib/Tools/clangxx.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/clangxx.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,20 +1,30 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy 2009-2018 (ita) -from waflib.Tools import ccroot,ar,gxx +""" +Detect the Clang++ C++ compiler +""" + +from waflib.Tools import ccroot, ar, gxx from waflib.Configure import conf + @conf def find_clangxx(conf): - cxx=conf.find_program('clang++',var='CXX') - conf.get_cc_version(cxx,clang=True) - conf.env.CXX_NAME='clang' + """ + Finds the program clang++, and executes it to ensure it really is clang++ + """ + cxx = conf.find_program('clang++', var='CXX') + conf.get_cc_version(cxx, clang=True) + conf.env.CXX_NAME = 'clang' + def configure(conf): conf.find_clangxx() - conf.find_program(['llvm-ar','ar'],var='AR') + conf.find_program(['llvm-ar', 'ar'], var='AR') conf.find_ar() conf.gxx_common_flags() conf.gxx_modifier_platform() conf.cxx_load_tools() conf.cxx_add_flags() conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/compiler_c.py lilv-0.24.6/waflib/Tools/compiler_c.py --- lilv-0.24.4~dfsg0/waflib/Tools/compiler_c.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/compiler_c.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,44 +1,110 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat) + +""" +Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc):: + + def options(opt): + opt.load('compiler_c') + def configure(cnf): + cnf.load('compiler_c') + def build(bld): + bld.program(source='main.c', target='app') + +The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register +a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use:: + + from waflib.Tools.compiler_c import c_compiler + c_compiler['win32'] = ['cfoo', 'msvc', 'gcc'] + + def options(opt): + opt.load('compiler_c') + def configure(cnf): + cnf.load('compiler_c') + def build(bld): + bld.program(source='main.c', target='app') + +Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using:: + + $ CC=clang waf configure +""" import re from waflib.Tools import ccroot from waflib import Utils from waflib.Logs import debug -c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'osf1V':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['clang','gcc'],} + +c_compiler = { +'win32': ['msvc', 'gcc', 'clang'], +'cygwin': ['gcc'], +'darwin': ['clang', 'gcc'], +'aix': ['xlc', 'gcc', 'clang'], +'linux': ['gcc', 'clang', 'icc'], +'sunos': ['suncc', 'gcc'], +'irix': ['gcc', 'irixcc'], +'hpux': ['gcc'], +'osf1V': ['gcc'], +'gnu': ['gcc', 'clang'], +'java': ['gcc', 'msvc', 'clang', 'icc'], +'default':['clang', 'gcc'], +} +""" +Dict mapping platform names to Waf tools finding specific C compilers:: + + from waflib.Tools.compiler_c import c_compiler + c_compiler['linux'] = ['gcc', 'icc', 'suncc'] +""" + def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=c_compiler.get(build_platform,c_compiler['default']) - return' '.join(possible_compiler_list) + build_platform = Utils.unversioned_sys_platform() + possible_compiler_list = c_compiler.get(build_platform, c_compiler['default']) + return ' '.join(possible_compiler_list) + def configure(conf): + """ + Detects a suitable C compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found + """ try: - test_for_compiler=conf.options.check_c_compiler or default_compilers() + test_for_compiler = conf.options.check_c_compiler or default_compilers() except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')") - for compiler in re.split('[ ,]+',test_for_compiler): + + for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() - conf.start_msg('Checking for %r (C compiler)'%compiler) + conf.start_msg('Checking for %r (C compiler)' % compiler) try: conf.load(compiler) except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - debug('compiler_c: %r',e) + debug('compiler_c: %r', e) else: if conf.env.CC: conf.end_msg(conf.env.get_flat('CC')) - conf.env.COMPILER_CC=compiler + conf.env.COMPILER_CC = compiler conf.env.commit() break conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a C compiler!') + def options(opt): - test_for_compiler=default_compilers() - opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py']) - cc_compiler_opts=opt.add_option_group('Configuration options') - cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler") + """ + This is how to provide compiler preferences on the command-line:: + + $ waf configure --check-c-compiler=gcc + """ + test_for_compiler = default_compilers() + opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py']) + cc_compiler_opts = opt.add_option_group('Configuration options') + cc_compiler_opts.add_option('--check-c-compiler', default=None, + help='list of C compilers to try [%s]' % test_for_compiler, + dest="check_c_compiler") + for x in test_for_compiler.split(): - opt.load('%s'%x) + opt.load('%s' % x) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/compiler_cxx.py lilv-0.24.6/waflib/Tools/compiler_cxx.py --- lilv-0.24.4~dfsg0/waflib/Tools/compiler_cxx.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/compiler_cxx.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,44 +1,111 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat) + +""" +Try to detect a C++ compiler from the list of supported compilers (g++, msvc, etc):: + + def options(opt): + opt.load('compiler_cxx') + def configure(cnf): + cnf.load('compiler_cxx') + def build(bld): + bld.program(source='main.cpp', target='app') + +The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register +a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use:: + + from waflib.Tools.compiler_cxx import cxx_compiler + cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc'] + + def options(opt): + opt.load('compiler_cxx') + def configure(cnf): + cnf.load('compiler_cxx') + def build(bld): + bld.program(source='main.c', target='app') + +Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using:: + + $ CXX=clang waf configure +""" + import re from waflib.Tools import ccroot from waflib import Utils from waflib.Logs import debug -cxx_compiler={'win32':['msvc','g++','clang++'],'cygwin':['g++'],'darwin':['clang++','g++'],'aix':['xlc++','g++','clang++'],'linux':['g++','clang++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'osf1V':['g++'],'gnu':['g++','clang++'],'java':['g++','msvc','clang++','icpc'],'default':['clang++','g++']} + +cxx_compiler = { +'win32': ['msvc', 'g++', 'clang++'], +'cygwin': ['g++'], +'darwin': ['clang++', 'g++'], +'aix': ['xlc++', 'g++', 'clang++'], +'linux': ['g++', 'clang++', 'icpc'], +'sunos': ['sunc++', 'g++'], +'irix': ['g++'], +'hpux': ['g++'], +'osf1V': ['g++'], +'gnu': ['g++', 'clang++'], +'java': ['g++', 'msvc', 'clang++', 'icpc'], +'default': ['clang++', 'g++'] +} +""" +Dict mapping the platform names to Waf tools finding specific C++ compilers:: + + from waflib.Tools.compiler_cxx import cxx_compiler + cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx'] +""" + def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=cxx_compiler.get(build_platform,cxx_compiler['default']) - return' '.join(possible_compiler_list) + build_platform = Utils.unversioned_sys_platform() + possible_compiler_list = cxx_compiler.get(build_platform, cxx_compiler['default']) + return ' '.join(possible_compiler_list) + def configure(conf): + """ + Detects a suitable C++ compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found + """ try: - test_for_compiler=conf.options.check_cxx_compiler or default_compilers() + test_for_compiler = conf.options.check_cxx_compiler or default_compilers() except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')") - for compiler in re.split('[ ,]+',test_for_compiler): + + for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() - conf.start_msg('Checking for %r (C++ compiler)'%compiler) + conf.start_msg('Checking for %r (C++ compiler)' % compiler) try: conf.load(compiler) except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - debug('compiler_cxx: %r',e) + debug('compiler_cxx: %r', e) else: if conf.env.CXX: conf.end_msg(conf.env.get_flat('CXX')) - conf.env.COMPILER_CXX=compiler + conf.env.COMPILER_CXX = compiler conf.env.commit() break conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a C++ compiler!') + def options(opt): - test_for_compiler=default_compilers() + """ + This is how to provide compiler preferences on the command-line:: + + $ waf configure --check-cxx-compiler=gxx + """ + test_for_compiler = default_compilers() opt.load_special_tools('cxx_*.py') - cxx_compiler_opts=opt.add_option_group('Configuration options') - cxx_compiler_opts.add_option('--check-cxx-compiler',default=None,help='list of C++ compilers to try [%s]'%test_for_compiler,dest="check_cxx_compiler") + cxx_compiler_opts = opt.add_option_group('Configuration options') + cxx_compiler_opts.add_option('--check-cxx-compiler', default=None, + help='list of C++ compilers to try [%s]' % test_for_compiler, + dest="check_cxx_compiler") + for x in test_for_compiler.split(): - opt.load('%s'%x) + opt.load('%s' % x) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/compiler_d.py lilv-0.24.6/waflib/Tools/compiler_d.py --- lilv-0.24.4~dfsg0/waflib/Tools/compiler_d.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/compiler_d.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,41 +1,85 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Carlos Rafael Giani, 2007 (dv) +# Thomas Nagy, 2016-2018 (ita) + +""" +Try to detect a D compiler from the list of supported compilers:: + + def options(opt): + opt.load('compiler_d') + def configure(cnf): + cnf.load('compiler_d') + def build(bld): + bld.program(source='main.d', target='app') + +Only three D compilers are really present at the moment: + +* gdc +* dmd, the ldc compiler having a very similar command-line interface +* ldc2 +""" import re -from waflib import Utils,Logs -d_compiler={'default':['gdc','dmd','ldc2']} +from waflib import Utils, Logs + +d_compiler = { +'default' : ['gdc', 'dmd', 'ldc2'] +} +""" +Dict mapping the platform names to lists of names of D compilers to try, in order of preference:: + + from waflib.Tools.compiler_d import d_compiler + d_compiler['default'] = ['gdc', 'dmd', 'ldc2'] +""" + def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=d_compiler.get(build_platform,d_compiler['default']) - return' '.join(possible_compiler_list) + build_platform = Utils.unversioned_sys_platform() + possible_compiler_list = d_compiler.get(build_platform, d_compiler['default']) + return ' '.join(possible_compiler_list) + def configure(conf): + """ + Detects a suitable D compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found + """ try: - test_for_compiler=conf.options.check_d_compiler or default_compilers() + test_for_compiler = conf.options.check_d_compiler or default_compilers() except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_d')") - for compiler in re.split('[ ,]+',test_for_compiler): + + for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() - conf.start_msg('Checking for %r (D compiler)'%compiler) + conf.start_msg('Checking for %r (D compiler)' % compiler) try: conf.load(compiler) except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - Logs.debug('compiler_d: %r',e) + Logs.debug('compiler_d: %r', e) else: if conf.env.D: conf.end_msg(conf.env.get_flat('D')) - conf.env.COMPILER_D=compiler + conf.env.COMPILER_D = compiler conf.env.commit() break conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a D compiler!') + def options(opt): - test_for_compiler=default_compilers() - d_compiler_opts=opt.add_option_group('Configuration options') - d_compiler_opts.add_option('--check-d-compiler',default=None,help='list of D compilers to try [%s]'%test_for_compiler,dest='check_d_compiler') + """ + This is how to provide compiler preferences on the command-line:: + + $ waf configure --check-d-compiler=dmd + """ + test_for_compiler = default_compilers() + d_compiler_opts = opt.add_option_group('Configuration options') + d_compiler_opts.add_option('--check-d-compiler', default=None, + help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler') + for x in test_for_compiler.split(): - opt.load('%s'%x) + opt.load('%s' % x) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/compiler_fc.py lilv-0.24.6/waflib/Tools/compiler_fc.py --- lilv-0.24.4~dfsg0/waflib/Tools/compiler_fc.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/compiler_fc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,43 +1,73 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file import re -from waflib import Utils,Logs +from waflib import Utils, Logs from waflib.Tools import fc -fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']} + +fc_compiler = { + 'win32' : ['gfortran','ifort'], + 'darwin' : ['gfortran', 'g95', 'ifort'], + 'linux' : ['gfortran', 'g95', 'ifort'], + 'java' : ['gfortran', 'g95', 'ifort'], + 'default': ['gfortran'], + 'aix' : ['gfortran'] +} +""" +Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference:: + + from waflib.Tools.compiler_c import c_compiler + c_compiler['linux'] = ['gfortran', 'g95', 'ifort'] +""" + def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=fc_compiler.get(build_platform,fc_compiler['default']) - return' '.join(possible_compiler_list) + build_platform = Utils.unversioned_sys_platform() + possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default']) + return ' '.join(possible_compiler_list) + def configure(conf): + """ + Detects a suitable Fortran compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found + """ try: - test_for_compiler=conf.options.check_fortran_compiler or default_compilers() + test_for_compiler = conf.options.check_fortran_compiler or default_compilers() except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_fc')") - for compiler in re.split('[ ,]+',test_for_compiler): + for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() - conf.start_msg('Checking for %r (Fortran compiler)'%compiler) + conf.start_msg('Checking for %r (Fortran compiler)' % compiler) try: conf.load(compiler) except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - Logs.debug('compiler_fortran: %r',e) + Logs.debug('compiler_fortran: %r', e) else: if conf.env.FC: conf.end_msg(conf.env.get_flat('FC')) - conf.env.COMPILER_FORTRAN=compiler + conf.env.COMPILER_FORTRAN = compiler conf.env.commit() break conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a Fortran compiler!') + def options(opt): - test_for_compiler=default_compilers() + """ + This is how to provide compiler preferences on the command-line:: + + $ waf configure --check-fortran-compiler=ifort + """ + test_for_compiler = default_compilers() opt.load_special_tools('fc_*.py') - fortran_compiler_opts=opt.add_option_group('Configuration options') - fortran_compiler_opts.add_option('--check-fortran-compiler',default=None,help='list of Fortran compiler to try [%s]'%test_for_compiler,dest="check_fortran_compiler") + fortran_compiler_opts = opt.add_option_group('Configuration options') + fortran_compiler_opts.add_option('--check-fortran-compiler', default=None, + help='list of Fortran compiler to try [%s]' % test_for_compiler, + dest="check_fortran_compiler") + for x in test_for_compiler.split(): - opt.load('%s'%x) + opt.load('%s' % x) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/c_osx.py lilv-0.24.6/waflib/Tools/c_osx.py --- lilv-0.24.4~dfsg0/waflib/Tools/c_osx.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/c_osx.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,11 +1,16 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy 2008-2018 (ita) -import os,shutil,platform -from waflib import Task,Utils -from waflib.TaskGen import taskgen_method,feature,after_method,before_method -app_info=''' +""" +MacOSX related tools +""" + +import os, shutil, platform +from waflib import Task, Utils +from waflib.TaskGen import taskgen_method, feature, after_method, before_method + +app_info = ''' @@ -23,99 +28,166 @@ ''' -@feature('c','cxx') +""" +plist template +""" + +@feature('c', 'cxx') def set_macosx_deployment_target(self): + """ + see WAF issue 285 and also and also http://trac.macports.org/ticket/17059 + """ if self.env.MACOSX_DEPLOYMENT_TARGET: - os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env.MACOSX_DEPLOYMENT_TARGET - elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ: - if Utils.unversioned_sys_platform()=='darwin': - os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2]) + os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET + elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ: + if Utils.unversioned_sys_platform() == 'darwin': + os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2]) + @taskgen_method -def create_bundle_dirs(self,name,out): - dir=out.parent.find_or_declare(name) +def create_bundle_dirs(self, name, out): + """ + Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp` + """ + dir = out.parent.find_or_declare(name) dir.mkdir() - macos=dir.find_or_declare(['Contents','MacOS']) + macos = dir.find_or_declare(['Contents', 'MacOS']) macos.mkdir() return dir + def bundle_name_for_output(out): - name=out.name - k=name.rfind('.') - if k>=0: - name=name[:k]+'.app' + name = out.name + k = name.rfind('.') + if k >= 0: + name = name[:k] + '.app' else: - name=name+'.app' + name = name + '.app' return name -@feature('cprogram','cxxprogram') + +@feature('cprogram', 'cxxprogram') @after_method('apply_link') def create_task_macapp(self): - if self.env.MACAPP or getattr(self,'mac_app',False): - out=self.link_task.outputs[0] - name=bundle_name_for_output(out) - dir=self.create_bundle_dirs(name,out) - n1=dir.find_or_declare(['Contents','MacOS',out.name]) - self.apptask=self.create_task('macapp',self.link_task.outputs,n1) - inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name - self.add_install_files(install_to=inst_to,install_from=n1,chmod=Utils.O755) - if getattr(self,'mac_files',None): - mac_files_root=getattr(self,'mac_files_root',None) - if isinstance(mac_files_root,str): - mac_files_root=self.path.find_node(mac_files_root) + """ + To compile an executable into a Mac application (a .app), set its *mac_app* attribute:: + + def build(bld): + bld.shlib(source='a.c', target='foo', mac_app=True) + + To force *all* executables to be transformed into Mac applications:: + + def build(bld): + bld.env.MACAPP = True + bld.shlib(source='a.c', target='foo') + """ + if self.env.MACAPP or getattr(self, 'mac_app', False): + out = self.link_task.outputs[0] + + name = bundle_name_for_output(out) + dir = self.create_bundle_dirs(name, out) + + n1 = dir.find_or_declare(['Contents', 'MacOS', out.name]) + + self.apptask = self.create_task('macapp', self.link_task.outputs, n1) + inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name + self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755) + + if getattr(self, 'mac_files', None): + # this only accepts files; they will be installed as seen from mac_files_root + mac_files_root = getattr(self, 'mac_files_root', None) + if isinstance(mac_files_root, str): + mac_files_root = self.path.find_node(mac_files_root) if not mac_files_root: - self.bld.fatal('Invalid mac_files_root %r'%self.mac_files_root) - res_dir=n1.parent.parent.make_node('Resources') - inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name + self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root) + res_dir = n1.parent.parent.make_node('Resources') + inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name for node in self.to_nodes(self.mac_files): - relpath=node.path_from(mac_files_root or node.parent) - self.create_task('macapp',node,res_dir.make_node(relpath)) - self.add_install_as(install_to=os.path.join(inst_to,relpath),install_from=node) - if getattr(self.bld,'is_install',None): - self.install_task.hasrun=Task.SKIP_ME -@feature('cprogram','cxxprogram') + relpath = node.path_from(mac_files_root or node.parent) + self.create_task('macapp', node, res_dir.make_node(relpath)) + self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node) + + if getattr(self.bld, 'is_install', None): + # disable regular binary installation + self.install_task.hasrun = Task.SKIP_ME + +@feature('cprogram', 'cxxprogram') @after_method('apply_link') def create_task_macplist(self): - if self.env.MACAPP or getattr(self,'mac_app',False): - out=self.link_task.outputs[0] - name=bundle_name_for_output(out) - dir=self.create_bundle_dirs(name,out) - n1=dir.find_or_declare(['Contents','Info.plist']) - self.plisttask=plisttask=self.create_task('macplist',[],n1) - plisttask.context={'app_name':self.link_task.outputs[0].name,'env':self.env} - plist_ctx=getattr(self,'plist_context',None) - if(plist_ctx): + """ + Creates a :py:class:`waflib.Tools.c_osx.macplist` instance. + """ + if self.env.MACAPP or getattr(self, 'mac_app', False): + out = self.link_task.outputs[0] + + name = bundle_name_for_output(out) + + dir = self.create_bundle_dirs(name, out) + n1 = dir.find_or_declare(['Contents', 'Info.plist']) + self.plisttask = plisttask = self.create_task('macplist', [], n1) + plisttask.context = { + 'app_name': self.link_task.outputs[0].name, + 'env': self.env + } + + plist_ctx = getattr(self, 'plist_context', None) + if (plist_ctx): plisttask.context.update(plist_ctx) - if getattr(self,'mac_plist',False): - node=self.path.find_resource(self.mac_plist) + + if getattr(self, 'mac_plist', False): + node = self.path.find_resource(self.mac_plist) if node: plisttask.inputs.append(node) else: - plisttask.code=self.mac_plist + plisttask.code = self.mac_plist else: - plisttask.code=app_info - inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name - self.add_install_files(install_to=inst_to,install_from=n1) -@feature('cshlib','cxxshlib') -@before_method('apply_link','propagate_uselib_vars') + plisttask.code = app_info + + inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name + self.add_install_files(install_to=inst_to, install_from=n1) + +@feature('cshlib', 'cxxshlib') +@before_method('apply_link', 'propagate_uselib_vars') def apply_bundle(self): - if self.env.MACBUNDLE or getattr(self,'mac_bundle',False): - self.env.LINKFLAGS_cshlib=self.env.LINKFLAGS_cxxshlib=[] - self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN - use=self.use=self.to_list(getattr(self,'use',[])) - if not'MACBUNDLE'in use: + """ + To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute:: + + def build(bld): + bld.shlib(source='a.c', target='foo', mac_bundle = True) + + To force *all* executables to be transformed into bundles:: + + def build(bld): + bld.env.MACBUNDLE = True + bld.shlib(source='a.c', target='foo') + """ + if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False): + self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag + self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN + use = self.use = self.to_list(getattr(self, 'use', [])) + if not 'MACBUNDLE' in use: use.append('MACBUNDLE') -app_dirs=['Contents','Contents/MacOS','Contents/Resources'] + +app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources'] + class macapp(Task.Task): - color='PINK' + """ + Creates mac applications + """ + color = 'PINK' def run(self): self.outputs[0].parent.mkdir() - shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath()) + shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath()) + class macplist(Task.Task): - color='PINK' - ext_in=['.bin'] + """ + Creates plist files + """ + color = 'PINK' + ext_in = ['.bin'] def run(self): - if getattr(self,'code',None): - txt=self.code + if getattr(self, 'code', None): + txt = self.code else: - txt=self.inputs[0].read() - context=getattr(self,'context',{}) - txt=txt.format(**context) + txt = self.inputs[0].read() + context = getattr(self, 'context', {}) + txt = txt.format(**context) self.outputs[0].write(txt) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/c_preproc.py lilv-0.24.6/waflib/Tools/c_preproc.py --- lilv-0.24.4~dfsg0/waflib/Tools/c_preproc.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/c_preproc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,543 +1,903 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) + +""" +C/C++ preprocessor for finding dependencies + +Reasons for using the Waf preprocessor by default + +#. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files) +#. Not all compilers provide .d files for obtaining the dependencies (portability) +#. A naive file scanner will not catch the constructs such as "#include foo()" +#. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything) + +Regarding the speed concerns: + +* the preprocessing is performed only when files must be compiled +* the macros are evaluated only for #if/#elif/#include +* system headers are not scanned by default + +Now if you do not want the Waf preprocessor, the tool +gccdeps* uses the .d files produced +during the compilation to track the dependencies (useful when used with the boost libraries). +It only works with gcc >= 4.4 though. + +A dumb preprocessor is also available in the tool *c_dumbpreproc* +""" +# TODO: more varargs, pragma once + +import re, string, traceback +from waflib import Logs, Utils, Errors -import re,string,traceback -from waflib import Logs,Utils,Errors class PreprocError(Errors.WafError): pass -FILE_CACHE_SIZE=100000 -LINE_CACHE_SIZE=100000 -POPFILE='-' -recursion_limit=150 -go_absolute=False -standard_includes=['/usr/local/include','/usr/include'] + +FILE_CACHE_SIZE = 100000 +LINE_CACHE_SIZE = 100000 + +POPFILE = '-' +"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously" + +recursion_limit = 150 +"Limit on the amount of files to read in the dependency scanner" + +go_absolute = False +"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)" + +standard_includes = ['/usr/local/include', '/usr/include'] if Utils.is_win32: - standard_includes=[] -use_trigraphs=0 -strict_quotes=0 -g_optrans={'not':'!','not_eq':'!','and':'&&','and_eq':'&=','or':'||','or_eq':'|=','xor':'^','xor_eq':'^=','bitand':'&','bitor':'|','compl':'~',} -re_lines=re.compile('^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) -re_mac=re.compile("^[a-zA-Z_]\w*") -re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') -re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE) -re_nl=re.compile('\\\\\r*\n',re.MULTILINE) -re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE) -trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')] -chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39} -NUM='i' -OP='O' -IDENT='T' -STR='s' -CHAR='c' -tok_types=[NUM,STR,IDENT,OP] -exp_types=[r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',] -re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M) -accepted='a' -ignored='i' -undefined='u' -skipped='s' + standard_includes = [] + +use_trigraphs = 0 +"""Apply trigraph rules (False by default)""" + +# obsolete, do not use +strict_quotes = 0 + +g_optrans = { +'not':'!', +'not_eq':'!', +'and':'&&', +'and_eq':'&=', +'or':'||', +'or_eq':'|=', +'xor':'^', +'xor_eq':'^=', +'bitand':'&', +'bitor':'|', +'compl':'~', +} +"""Operators such as and/or/xor for c++. Set an empty dict to disable.""" + +# ignore #warning and #error +re_lines = re.compile( + '^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$', + re.IGNORECASE | re.MULTILINE) +"""Match #include lines""" + +re_mac = re.compile(r"^[a-zA-Z_]\w*") +"""Match macro definitions""" + +re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') +"""Match macro functions""" + +re_pragma_once = re.compile(r'^\s*once\s*', re.IGNORECASE) +"""Match #pragma once statements""" + +re_nl = re.compile('\\\\\r*\n', re.MULTILINE) +"""Match newlines""" + +re_cpp = re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', re.DOTALL | re.MULTILINE ) +"""Filter C/C++ comments""" + +trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')] +"""Trigraph definitions""" + +chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39} +"""Escape characters""" + +NUM = 'i' +"""Number token""" + +OP = 'O' +"""Operator token""" + +IDENT = 'T' +"""Identifier token""" + +STR = 's' +"""String token""" + +CHAR = 'c' +"""Character token""" + +tok_types = [NUM, STR, IDENT, OP] +"""Token types""" + +exp_types = [ + r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""", + r'L?"([^"\\]|\\.)*"', + r'[a-zA-Z_]\w*', + r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]', +] +"""Expression types""" + +re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M) +"""Match expressions into tokens""" + +accepted = 'a' +"""Parser state is *accepted*""" + +ignored = 'i' +"""Parser state is *ignored*, for example preprocessor lines in an #if 0 block""" + +undefined = 'u' +"""Parser state is *undefined* at the moment""" + +skipped = 's' +"""Parser state is *skipped*, for example preprocessor lines in a #elif 0 block""" + def repl(m): - s=m.group() - if s[0]=='/': - return' ' + """Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`""" + s = m.group() + if s[0] == '/': + return ' ' return s -prec={} -ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',','] -for x,syms in enumerate(ops): + +prec = {} +""" +Operator precedence rules required for parsing expressions of the form:: + + #if 1 && 2 != 0 +""" +ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ','] +for x, syms in enumerate(ops): for u in syms.split(): - prec[u]=x -def reduce_nums(val_1,val_2,val_op): + prec[u] = x + +def reduce_nums(val_1, val_2, val_op): + """ + Apply arithmetic rules to compute a result + + :param val1: input parameter + :type val1: int or string + :param val2: input parameter + :type val2: int or string + :param val_op: C operator in *+*, */*, *-*, etc + :type val_op: string + :rtype: int + """ + #print val_1, val_2, val_op + + # now perform the operation, make certain a and b are numeric try: - a=0+val_1 + a = 0 + val_1 except TypeError: - a=int(val_1) + a = int(val_1) try: - b=0+val_2 + b = 0 + val_2 except TypeError: - b=int(val_2) - d=val_op - if d=='%': - c=a%b + b = int(val_2) + + d = val_op + if d == '%': + c = a % b elif d=='+': - c=a+b + c = a + b elif d=='-': - c=a-b + c = a - b elif d=='*': - c=a*b + c = a * b elif d=='/': - c=a/b + c = a / b elif d=='^': - c=a^b + c = a ^ b elif d=='==': - c=int(a==b) - elif d=='|'or d=='bitor': - c=a|b - elif d=='||'or d=='or': - c=int(a or b) - elif d=='&'or d=='bitand': - c=a&b - elif d=='&&'or d=='and': - c=int(a and b) - elif d=='!='or d=='not_eq': - c=int(a!=b) - elif d=='^'or d=='xor': - c=int(a^b) + c = int(a == b) + elif d=='|' or d == 'bitor': + c = a | b + elif d=='||' or d == 'or' : + c = int(a or b) + elif d=='&' or d == 'bitand': + c = a & b + elif d=='&&' or d == 'and': + c = int(a and b) + elif d=='!=' or d == 'not_eq': + c = int(a != b) + elif d=='^' or d == 'xor': + c = int(a^b) elif d=='<=': - c=int(a<=b) + c = int(a <= b) elif d=='<': - c=int(a': - c=int(a>b) + c = int(a > b) elif d=='>=': - c=int(a>=b) + c = int(a >= b) elif d=='<<': - c=a<>': - c=a>>b + c = a >> b else: - c=0 + c = 0 return c + def get_num(lst): + """ + Try to obtain a number from a list of tokens. The token types are defined in :py:attr:`waflib.Tools.ccroot.tok_types`. + + :param lst: list of preprocessor tokens + :type lst: list of tuple (tokentype, value) + :return: a pair containing the number and the rest of the list + :rtype: tuple(value, list) + """ if not lst: raise PreprocError('empty list for get_num') - (p,v)=lst[0] - if p==OP: - if v=='(': - count_par=1 - i=1 - while i 2+1 -> 3 + + :param lst: list of tokens + :type lst: list of tuple(token, value) + :return: the value and the remaining tokens + :rtype: value, list + """ + if not lst: raise PreprocError('empty list for get_term') - num,lst=get_num(lst) + num, lst = get_num(lst) if not lst: - return(num,[]) - (p,v)=lst[0] - if p==OP: - if v==',': + return (num, []) + (p, v) = lst[0] + if p == OP: + if v == ',': + # skip return get_term(lst[1:]) - elif v=='?': - count_par=0 - i=1 - while i=prec[v]: - num2=reduce_nums(num,num2,v) - return get_term([(NUM,num2)]+lst) + # no more tokens to process + num2 = reduce_nums(num, num2, v) + return get_term([(NUM, num2)] + lst) + + # operator precedence + p2, v2 = lst[0] + if p2 != OP: + raise PreprocError('op expected %r' % lst) + + if prec[v2] >= prec[v]: + num2 = reduce_nums(num, num2, v) + return get_term([(NUM, num2)] + lst) else: - num3,lst=get_num(lst[1:]) - num3=reduce_nums(num2,num3,v2) - return get_term([(NUM,num),(p,v),(NUM,num3)]+lst) - raise PreprocError('cannot reduce %r'%lst) + num3, lst = get_num(lst[1:]) + num3 = reduce_nums(num2, num3, v2) + return get_term([(NUM, num), (p, v), (NUM, num3)] + lst) + + + raise PreprocError('cannot reduce %r' % lst) + def reduce_eval(lst): - num,lst=get_term(lst) - return(NUM,num) + """ + Take a list of tokens and output true or false for #if/#elif conditions. + + :param lst: a list of tokens + :type lst: list of tuple(token, value) + :return: a token + :rtype: tuple(NUM, int) + """ + num, lst = get_term(lst) + return (NUM, num) + def stringize(lst): - lst=[str(v2)for(p2,v2)in lst] - return"".join(lst) -def paste_tokens(t1,t2): - p1=None - if t1[0]==OP and t2[0]==OP: - p1=OP - elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM): - p1=IDENT - elif t1[0]==NUM and t2[0]==NUM: - p1=NUM + """ + Merge a list of tokens into a string + + :param lst: a list of tokens + :type lst: list of tuple(token, value) + :rtype: string + """ + lst = [str(v2) for (p2, v2) in lst] + return "".join(lst) + +def paste_tokens(t1, t2): + """ + Token pasting works between identifiers, particular operators, and identifiers and numbers:: + + a ## b -> ab + > ## = -> >= + a ## 2 -> a2 + + :param t1: token + :type t1: tuple(type, value) + :param t2: token + :type t2: tuple(type, value) + """ + p1 = None + if t1[0] == OP and t2[0] == OP: + p1 = OP + elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM): + p1 = IDENT + elif t1[0] == NUM and t2[0] == NUM: + p1 = NUM if not p1: - raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2)) - return(p1,t1[1]+t2[1]) -def reduce_tokens(lst,defs,ban=[]): - i=0 - while i=len(lst): - raise PreprocError('expected ( after %r (got nothing)'%v) - (p2,v2)=lst[i] - if p2!=OP or v2!='(': - raise PreprocError('expected ( after %r'%v) + + if i >= len(lst): + raise PreprocError('expected ( after %r (got nothing)' % v) + + (p2, v2) = lst[i] + if p2 != OP or v2 != '(': + raise PreprocError('expected ( after %r' % v) + del lst[i] - one_param=[] - count_paren=0 - while i1: - (p3,v3)=accu[-1] - (p4,v4)=accu[-2] - if v3=='##': + (p3, v3) = accu[-1] + (p4, v4) = accu[-2] + if v3 == '##': + # remove the token paste accu.pop() - if v4==','and pt1: - return(v,[[],t[1:]]) + (p, v) = t[0] + if len(t) > 1: + return (v, [[], t[1:]]) else: - return(v,[[],[('T','')]]) -re_include=re.compile('^\s*(<(?:.*)>|"(?:.*)")') -def extract_include(txt,defs): - m=re_include.search(txt) + # empty define, assign an empty token + return (v, [[], [('T','')]]) + +re_include = re.compile(r'^\s*(<(?:.*)>|"(?:.*)")') +def extract_include(txt, defs): + """ + Process a line in the form:: + + #include foo + + :param txt: include line to process + :type txt: string + :param defs: macro definitions + :type defs: dict + :return: the file name + :rtype: string + """ + m = re_include.search(txt) if m: - txt=m.group(1) - return txt[0],txt[1:-1] - toks=tokenize(txt) - reduce_tokens(toks,defs,['waf_include']) + txt = m.group(1) + return txt[0], txt[1:-1] + + # perform preprocessing and look at the result, it must match an include + toks = tokenize(txt) + reduce_tokens(toks, defs, ['waf_include']) + if not toks: - raise PreprocError('could not parse include %r'%txt) - if len(toks)==1: - if toks[0][0]==STR: - return'"',toks[0][1] + raise PreprocError('could not parse include %r' % txt) + + if len(toks) == 1: + if toks[0][0] == STR: + return '"', toks[0][1] else: - if toks[0][1]=='<'and toks[-1][1]=='>': - ret='<',stringize(toks).lstrip('<').rstrip('>') + if toks[0][1] == '<' and toks[-1][1] == '>': + ret = '<', stringize(toks).lstrip('<').rstrip('>') return ret - raise PreprocError('could not parse include %r'%txt) + + raise PreprocError('could not parse include %r' % txt) + def parse_char(txt): + """ + Parse a c character + + :param txt: character to parse + :type txt: string + :return: a character literal + :rtype: string + """ + if not txt: raise PreprocError('attempted to parse a null char') - if txt[0]!='\\': + if txt[0] != '\\': return ord(txt) - c=txt[1] - if c=='x': - if len(txt)==4 and txt[3]in string.hexdigits: - return int(txt[2:],16) - return int(txt[2:],16) + c = txt[1] + if c == 'x': + if len(txt) == 4 and txt[3] in string.hexdigits: + return int(txt[2:], 16) + return int(txt[2:], 16) elif c.isdigit(): - if c=='0'and len(txt)==2: + if c == '0' and len(txt)==2: return 0 - for i in 3,2,1: - if len(txt)>i and txt[1:1+i].isdigit(): - return(1+i,int(txt[1:1+i],8)) + for i in 3, 2, 1: + if len(txt) > i and txt[1:1+i].isdigit(): + return (1+i, int(txt[1:1+i], 8)) else: try: return chr_esc[c] except KeyError: - raise PreprocError('could not parse char literal %r'%txt) + raise PreprocError('could not parse char literal %r' % txt) + def tokenize(s): - return tokenize_private(s)[:] + """ + Convert a string into a list of tokens (shlex.split does not apply to c/c++/d) + + :param s: input to tokenize + :type s: string + :return: a list of tokens + :rtype: list of tuple(token, value) + """ + return tokenize_private(s)[:] # force a copy of the results + def tokenize_private(s): - ret=[] + ret = [] for match in re_clexer.finditer(s): - m=match.group + m = match.group for name in tok_types: - v=m(name) + v = m(name) if v: - if name==IDENT: + if name == IDENT: if v in g_optrans: - name=OP - elif v.lower()=="true": - v=1 - name=NUM - elif v.lower()=="false": - v=0 - name=NUM - elif name==NUM: + name = OP + elif v.lower() == "true": + v = 1 + name = NUM + elif v.lower() == "false": + v = 0 + name = NUM + elif name == NUM: if m('oct'): - v=int(v,8) + v = int(v, 8) elif m('hex'): - v=int(m('hex'),16) + v = int(m('hex'), 16) elif m('n0'): - v=m('n0') + v = m('n0') else: - v=m('char') + v = m('char') if v: - v=parse_char(v) + v = parse_char(v) else: - v=m('n2')or m('n4') - elif name==OP: - if v=='%:': - v='#' - elif v=='%:%:': - v='##' - elif name==STR: - v=v[1:-1] - ret.append((name,v)) + v = m('n2') or m('n4') + elif name == OP: + if v == '%:': + v = '#' + elif v == '%:%:': + v = '##' + elif name == STR: + # remove the quotes around the string + v = v[1:-1] + ret.append((name, v)) break return ret + def format_defines(lst): - ret=[] + ret = [] for y in lst: if y: - pos=y.find('=') - if pos==-1: + pos = y.find('=') + if pos == -1: + # "-DFOO" should give "#define FOO 1" ret.append(y) - elif pos>0: - ret.append('%s %s'%(y[:pos],y[pos+1:])) + elif pos > 0: + # all others are assumed to be -DX=Y + ret.append('%s %s' % (y[:pos], y[pos+1:])) else: - raise ValueError('Invalid define expression %r'%y) + raise ValueError('Invalid define expression %r' % y) return ret + class c_parser(object): - def __init__(self,nodepaths=None,defines=None): - self.lines=[] + """ + Used by :py:func:`waflib.Tools.c_preproc.scan` to parse c/h files. Note that by default, + only project headers are parsed. + """ + def __init__(self, nodepaths=None, defines=None): + self.lines = [] + """list of lines read""" + if defines is None: - self.defs={} + self.defs = {} else: - self.defs=dict(defines) - self.state=[] - self.count_files=0 - self.currentnode_stack=[] - self.nodepaths=nodepaths or[] - self.nodes=[] - self.names=[] - self.curfile='' - self.ban_includes=set() - self.listed=set() - def cached_find_resource(self,node,filename): + self.defs = dict(defines) # make a copy + self.state = [] + + self.count_files = 0 + self.currentnode_stack = [] + + self.nodepaths = nodepaths or [] + """Include paths""" + + self.nodes = [] + """List of :py:class:`waflib.Node.Node` found so far""" + + self.names = [] + """List of file names that could not be matched by any file""" + + self.curfile = '' + """Current file""" + + self.ban_includes = set() + """Includes that must not be read (#pragma once)""" + + self.listed = set() + """Include nodes/names already listed to avoid duplicates in self.nodes/self.names""" + + def cached_find_resource(self, node, filename): + """ + Find a file from the input directory + + :param node: directory + :type node: :py:class:`waflib.Node.Node` + :param filename: header to find + :type filename: string + :return: the node if found, or None + :rtype: :py:class:`waflib.Node.Node` + """ try: - cache=node.ctx.preproc_cache_node + cache = node.ctx.preproc_cache_node except AttributeError: - cache=node.ctx.preproc_cache_node=Utils.lru_cache(FILE_CACHE_SIZE) - key=(node,filename) + cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE) + + key = (node, filename) try: return cache[key] except KeyError: - ret=node.find_resource(filename) + ret = node.find_resource(filename) if ret: - if getattr(ret,'children',None): - ret=None + if getattr(ret, 'children', None): + ret = None elif ret.is_child_of(node.ctx.bldnode): - tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) - if tmp and getattr(tmp,'children',None): - ret=None - cache[key]=ret + tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) + if tmp and getattr(tmp, 'children', None): + ret = None + cache[key] = ret return ret - def tryfind(self,filename,kind='"',env=None): + + def tryfind(self, filename, kind='"', env=None): + """ + Try to obtain a node from the filename based from the include paths. Will add + the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to + :py:attr:`waflib.Tools.c_preproc.c_parser.names` if no corresponding file is found. Called by + :py:attr:`waflib.Tools.c_preproc.c_parser.start`. + + :param filename: header to find + :type filename: string + :return: the node if found + :rtype: :py:class:`waflib.Node.Node` + """ if filename.endswith('.moc'): + # we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated + # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. self.names.append(filename) return None - self.curfile=filename - found=None - if kind=='"': + + self.curfile = filename + + found = None + if kind == '"': if env.MSVC_VERSION: for n in reversed(self.currentnode_stack): - found=self.cached_find_resource(n,filename) + found = self.cached_find_resource(n, filename) if found: break else: - found=self.cached_find_resource(self.currentnode_stack[-1],filename) + found = self.cached_find_resource(self.currentnode_stack[-1], filename) + if not found: for n in self.nodepaths: - found=self.cached_find_resource(n,filename) + found = self.cached_find_resource(n, filename) if found: break - listed=self.listed + + listed = self.listed if found and not found in self.ban_includes: if found not in listed: listed.add(found) @@ -548,125 +908,184 @@ listed.add(filename) self.names.append(filename) return found - def filter_comments(self,node): - code=node.read() + + def filter_comments(self, node): + """ + Filter the comments from a c/h file, and return the preprocessor lines. + The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally. + + :return: the preprocessor directives as a list of (keyword, line) + :rtype: a list of string pairs + """ + # return a list of tuples : keyword, line + code = node.read() if use_trigraphs: - for(a,b)in trig_def: - code=code.split(a).join(b) - code=re_nl.sub('',code) - code=re_cpp.sub(repl,code) + for (a, b) in trig_def: + code = code.split(a).join(b) + code = re_nl.sub('', code) + code = re_cpp.sub(repl, code) return re_lines.findall(code) - def parse_lines(self,node): + + def parse_lines(self, node): try: - cache=node.ctx.preproc_cache_lines + cache = node.ctx.preproc_cache_lines except AttributeError: - cache=node.ctx.preproc_cache_lines=Utils.lru_cache(LINE_CACHE_SIZE) + cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE) try: return cache[node] except KeyError: - cache[node]=lines=self.filter_comments(node) - lines.append((POPFILE,'')) + cache[node] = lines = self.filter_comments(node) + lines.append((POPFILE, '')) lines.reverse() return lines - def addlines(self,node): + + def addlines(self, node): + """ + Add the lines from a header in the list of preprocessor lines to parse + + :param node: header + :type node: :py:class:`waflib.Node.Node` + """ + self.currentnode_stack.append(node.parent) - self.count_files+=1 - if self.count_files>recursion_limit: + + self.count_files += 1 + if self.count_files > recursion_limit: + # issue #812 raise PreprocError('recursion limit exceeded') + if Logs.verbose: - Logs.debug('preproc: reading file %r',node) + Logs.debug('preproc: reading file %r', node) try: - lines=self.parse_lines(node) + lines = self.parse_lines(node) except EnvironmentError: - raise PreprocError('could not read the file %r'%node) + raise PreprocError('could not read the file %r' % node) except Exception: - if Logs.verbose>0: - Logs.error('parsing %r failed %s',node,traceback.format_exc()) + if Logs.verbose > 0: + Logs.error('parsing %r failed %s', node, traceback.format_exc()) else: self.lines.extend(lines) - def start(self,node,env): - Logs.debug('preproc: scanning %s (in %s)',node.name,node.parent.name) - self.current_file=node + + def start(self, node, env): + """ + Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` + and :py:attr:`waflib.Tools.c_preproc.c_parser.names`. + + :param node: source file + :type node: :py:class:`waflib.Node.Node` + :param env: config set containing additional defines to take into account + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + """ + Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name) + + self.current_file = node self.addlines(node) + + # macros may be defined on the command-line, so they must be parsed as if they were part of the file if env.DEFINES: - lst=format_defines(env.DEFINES) + lst = format_defines(env.DEFINES) lst.reverse() - self.lines.extend([('define',x)for x in lst]) + self.lines.extend([('define', x) for x in lst]) + while self.lines: - (token,line)=self.lines.pop() - if token==POPFILE: - self.count_files-=1 + (token, line) = self.lines.pop() + if token == POPFILE: + self.count_files -= 1 self.currentnode_stack.pop() continue + try: - state=self.state - if token[:2]=='if': + state = self.state + + # make certain we define the state if we are about to enter in an if block + if token[:2] == 'if': state.append(undefined) - elif token=='endif': + elif token == 'endif': state.pop() - if token[0]!='e': + + # skip lines when in a dead 'if' branch, wait for the endif + if token[0] != 'e': if skipped in self.state or ignored in self.state: continue - if token=='if': - ret=eval_macro(tokenize(line),self.defs) + + if token == 'if': + ret = eval_macro(tokenize(line), self.defs) if ret: - state[-1]=accepted + state[-1] = accepted else: - state[-1]=ignored - elif token=='ifdef': - m=re_mac.match(line) - if m and m.group()in self.defs: - state[-1]=accepted + state[-1] = ignored + elif token == 'ifdef': + m = re_mac.match(line) + if m and m.group() in self.defs: + state[-1] = accepted else: - state[-1]=ignored - elif token=='ifndef': - m=re_mac.match(line) - if m and m.group()in self.defs: - state[-1]=ignored + state[-1] = ignored + elif token == 'ifndef': + m = re_mac.match(line) + if m and m.group() in self.defs: + state[-1] = ignored else: - state[-1]=accepted - elif token=='include'or token=='import': - (kind,inc)=extract_include(line,self.defs) - self.current_file=self.tryfind(inc,kind,env) - if token=='import': + state[-1] = accepted + elif token == 'include' or token == 'import': + (kind, inc) = extract_include(line, self.defs) + self.current_file = self.tryfind(inc, kind, env) + if token == 'import': self.ban_includes.add(self.current_file) - elif token=='elif': - if state[-1]==accepted: - state[-1]=skipped - elif state[-1]==ignored: - if eval_macro(tokenize(line),self.defs): - state[-1]=accepted - elif token=='else': - if state[-1]==accepted: - state[-1]=skipped - elif state[-1]==ignored: - state[-1]=accepted - elif token=='define': + elif token == 'elif': + if state[-1] == accepted: + state[-1] = skipped + elif state[-1] == ignored: + if eval_macro(tokenize(line), self.defs): + state[-1] = accepted + elif token == 'else': + if state[-1] == accepted: + state[-1] = skipped + elif state[-1] == ignored: + state[-1] = accepted + elif token == 'define': try: - self.defs[self.define_name(line)]=line + self.defs[self.define_name(line)] = line except AttributeError: - raise PreprocError('Invalid define line %r'%line) - elif token=='undef': - m=re_mac.match(line) - if m and m.group()in self.defs: + raise PreprocError('Invalid define line %r' % line) + elif token == 'undef': + m = re_mac.match(line) + if m and m.group() in self.defs: self.defs.__delitem__(m.group()) - elif token=='pragma': + #print "undef %s" % name + elif token == 'pragma': if re_pragma_once.match(line.lower()): self.ban_includes.add(self.current_file) except Exception as e: if Logs.verbose: - Logs.debug('preproc: line parsing failed (%s): %s %s',e,line,traceback.format_exc()) - def define_name(self,line): + Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc()) + + def define_name(self, line): + """ + :param line: define line + :type line: string + :rtype: string + :return: the define name + """ return re_mac.match(line).group() + def scan(task): + """ + Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind:: + + #include some_macro() + + This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example + """ try: - incn=task.generator.includes_nodes + incn = task.generator.includes_nodes except AttributeError: - raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator) + raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": ' % task.generator) + if go_absolute: - nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes] + nodepaths = incn + [task.generator.bld.root.find_dir(x) for x in standard_includes] else: - nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)] - tmp=c_parser(nodepaths) - tmp.start(task.inputs[0],task.env) - return(tmp.nodes,tmp.names) + nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)] + + tmp = c_parser(nodepaths) + tmp.start(task.inputs[0], task.env) + return (tmp.nodes, tmp.names) diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/c.py lilv-0.24.6/waflib/Tools/c.py --- lilv-0.24.4~dfsg0/waflib/Tools/c.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/c.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,26 +1,39 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) -from waflib import TaskGen,Task +"Base for c programs/libraries" + +from waflib import TaskGen, Task from waflib.Tools import c_preproc -from waflib.Tools.ccroot import link_task,stlink_task +from waflib.Tools.ccroot import link_task, stlink_task + @TaskGen.extension('.c') -def c_hook(self,node): +def c_hook(self, node): + "Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances" if not self.env.CC and self.env.CXX: - return self.create_compiled_task('cxx',node) - return self.create_compiled_task('c',node) + return self.create_compiled_task('cxx', node) + return self.create_compiled_task('c', node) + class c(Task.Task): - run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' - vars=['CCDEPS'] - ext_in=['.h'] - scan=c_preproc.scan + "Compiles C files into object files" + run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' + vars = ['CCDEPS'] # unused variable to depend on, just in case + ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] + scan = c_preproc.scan + class cprogram(link_task): - run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' - ext_out=['.bin'] - vars=['LINKDEPS'] - inst_to='${BINDIR}' + "Links object files into c programs" + run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' + ext_out = ['.bin'] + vars = ['LINKDEPS'] + inst_to = '${BINDIR}' + class cshlib(cprogram): - inst_to='${LIBDIR}' + "Links object files into c shared libraries" + inst_to = '${LIBDIR}' + class cstlib(stlink_task): - pass + "Links object files into a c static libraries" + pass # do not remove + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/cs.py lilv-0.24.6/waflib/Tools/cs.py --- lilv-0.24.4~dfsg0/waflib/Tools/cs.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/cs.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,113 +1,211 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) -from waflib import Utils,Task,Options,Errors -from waflib.TaskGen import before_method,after_method,feature +""" +C# support. A simple example:: + + def configure(conf): + conf.load('cs') + def build(bld): + bld(features='cs', source='main.cs', gen='foo') + +Note that the configuration may compile C# snippets:: + + FRAG = ''' + namespace Moo { + public class Test { public static int Main(string[] args) { return 0; } } + }''' + def configure(conf): + conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe', + bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support') +""" + +from waflib import Utils, Task, Options, Errors +from waflib.TaskGen import before_method, after_method, feature from waflib.Tools import ccroot from waflib.Configure import conf -ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES']) -ccroot.lib_patterns['csshlib']=['%s'] + +ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES']) +ccroot.lib_patterns['csshlib'] = ['%s'] + @feature('cs') @before_method('process_source') def apply_cs(self): - cs_nodes=[] - no_nodes=[] + """ + Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator. + """ + cs_nodes = [] + no_nodes = [] for x in self.to_nodes(self.source): if x.name.endswith('.cs'): cs_nodes.append(x) else: no_nodes.append(x) - self.source=no_nodes - bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe') - self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen)) - tsk.env.CSTYPE='/target:%s'%bintype - tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath() - self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu')) - inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}') + self.source = no_nodes + + bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe') + self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen)) + tsk.env.CSTYPE = '/target:%s' % bintype + tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath() + self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu')) + + inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}') if inst_to: - mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644) - self.install_task=self.add_install_files(install_to=inst_to,install_from=self.cs_task.outputs[:],chmod=mod) + # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically + mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644) + self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod) + @feature('cs') @after_method('apply_cs') def use_cs(self): - names=self.to_list(getattr(self,'use',[])) - get=self.bld.get_tgen_by_name + """ + C# applications honor the **use** keyword:: + + def build(bld): + bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib') + bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi') + """ + names = self.to_list(getattr(self, 'use', [])) + get = self.bld.get_tgen_by_name for x in names: try: - y=get(x) + y = get(x) except Errors.WafError: - self.env.append_value('CSFLAGS','/reference:%s'%x) + self.env.append_value('CSFLAGS', '/reference:%s' % x) continue y.post() - tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None) + + tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None) if not tsk: - self.bld.fatal('cs task has no link task for use %r'%self) - self.cs_task.dep_nodes.extend(tsk.outputs) - self.cs_task.set_run_after(tsk) - self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath()) + self.bld.fatal('cs task has no link task for use %r' % self) + self.cs_task.dep_nodes.extend(tsk.outputs) # dependency + self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs) + self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath()) + @feature('cs') -@after_method('apply_cs','use_cs') +@after_method('apply_cs', 'use_cs') def debug_cs(self): - csdebug=getattr(self,'csdebug',self.env.CSDEBUG) + """ + The C# targets may create .mdb or .pdb files:: + + def build(bld): + bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full') + # csdebug is a value in (True, 'full', 'pdbonly') + """ + csdebug = getattr(self, 'csdebug', self.env.CSDEBUG) if not csdebug: return - node=self.cs_task.outputs[0] - if self.env.CS_NAME=='mono': - out=node.parent.find_or_declare(node.name+'.mdb') + + node = self.cs_task.outputs[0] + if self.env.CS_NAME == 'mono': + out = node.parent.find_or_declare(node.name + '.mdb') else: - out=node.change_ext('.pdb') + out = node.change_ext('.pdb') self.cs_task.outputs.append(out) - if getattr(self,'install_task',None): - self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=out) - if csdebug=='pdbonly': - val=['/debug+','/debug:pdbonly'] - elif csdebug=='full': - val=['/debug+','/debug:full'] + + if getattr(self, 'install_task', None): + self.pdb_install_task = self.add_install_files( + install_to=self.install_task.install_to, install_from=out) + + if csdebug == 'pdbonly': + val = ['/debug+', '/debug:pdbonly'] + elif csdebug == 'full': + val = ['/debug+', '/debug:full'] else: - val=['/debug-'] - self.env.append_value('CSFLAGS',val) + val = ['/debug-'] + self.env.append_value('CSFLAGS', val) + @feature('cs') @after_method('debug_cs') def doc_cs(self): - csdoc=getattr(self,'csdoc',self.env.CSDOC) + """ + The C# targets may create .xml documentation files:: + + def build(bld): + bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True) + # csdoc is a boolean value + """ + csdoc = getattr(self, 'csdoc', self.env.CSDOC) if not csdoc: return - node=self.cs_task.outputs[0] - out=node.change_ext('.xml') + + node = self.cs_task.outputs[0] + out = node.change_ext('.xml') self.cs_task.outputs.append(out) - if getattr(self,'install_task',None): - self.doc_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=out) - self.env.append_value('CSFLAGS','/doc:%s'%out.abspath()) + + if getattr(self, 'install_task', None): + self.doc_install_task = self.add_install_files( + install_to=self.install_task.install_to, install_from=out) + + self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath()) + class mcs(Task.Task): - color='YELLOW' - run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' - def split_argfile(self,cmd): - inline=[cmd[0]] - infile=[] + """ + Compile C# files + """ + color = 'YELLOW' + run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' + + def split_argfile(self, cmd): + inline = [cmd[0]] + infile = [] for x in cmd[1:]: - if x.lower()=='/noconfig': + # csc doesn't want /noconfig in @file + if x.lower() == '/noconfig': inline.append(x) else: infile.append(self.quote_flag(x)) - return(inline,infile) + return (inline, infile) + def configure(conf): - csc=getattr(Options.options,'cscbinary',None) + """ + Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc) + """ + csc = getattr(Options.options, 'cscbinary', None) if csc: - conf.env.MCS=csc - conf.find_program(['csc','mcs','gmcs'],var='MCS') - conf.env.ASS_ST='/r:%s' - conf.env.RES_ST='/resource:%s' - conf.env.CS_NAME='csc' - if str(conf.env.MCS).lower().find('mcs')>-1: - conf.env.CS_NAME='mono' + conf.env.MCS = csc + conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS') + conf.env.ASS_ST = '/r:%s' + conf.env.RES_ST = '/resource:%s' + + conf.env.CS_NAME = 'csc' + if str(conf.env.MCS).lower().find('mcs') > -1: + conf.env.CS_NAME = 'mono' + def options(opt): - opt.add_option('--with-csc-binary',type='string',dest='cscbinary') + """ + Add a command-line option for the configuration:: + + $ waf configure --with-csc-binary=/foo/bar/mcs + """ + opt.add_option('--with-csc-binary', type='string', dest='cscbinary') + class fake_csshlib(Task.Task): - color='YELLOW' - inst_to=None + """ + Task used for reading a foreign .net assembly and adding the dependency on it + """ + color = 'YELLOW' + inst_to = None + def runnable_status(self): return Task.SKIP_ME + @conf -def read_csshlib(self,name,paths=[]): - return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib') +def read_csshlib(self, name, paths=[]): + """ + Read a foreign .net assembly for the *use* system:: + + def build(bld): + bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath]) + bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll') + + :param name: Name of the library + :type name: string + :param paths: Folders in which the library may be found + :type paths: list of string + :return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib` + :rtype: :py:class:`waflib.TaskGen.task_gen` + """ + return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/c_tests.py lilv-0.24.6/waflib/Tools/c_tests.py --- lilv-0.24.4~dfsg0/waflib/Tools/c_tests.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/c_tests.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,11 +1,16 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2016-2018 (ita) + +""" +Various configuration tests. +""" from waflib import Task from waflib.Configure import conf -from waflib.TaskGen import feature,before_method,after_method -LIB_CODE=''' +from waflib.TaskGen import feature, before_method, after_method + +LIB_CODE = ''' #ifdef _MSC_VER #define testEXPORT __declspec(dllexport) #else @@ -13,7 +18,8 @@ #endif testEXPORT int lib_func(void) { return 9; } ''' -MAIN_CODE=''' + +MAIN_CODE = ''' #ifdef _MSC_VER #define testEXPORT __declspec(dllimport) #else @@ -25,98 +31,155 @@ return !(lib_func() == 9); } ''' + @feature('link_lib_test') @before_method('process_source') def link_lib_test_fun(self): + """ + The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator, + so we need to create other task generators from here to check if the linker is able to link libraries. + """ def write_test_file(task): task.outputs[0].write(task.generator.code) - rpath=[] - if getattr(self,'add_rpath',False): - rpath=[self.bld.path.get_bld().abspath()] - mode=self.mode - m='%s %s'%(mode,mode) - ex=self.test_exec and'test_exec'or'' - bld=self.bld - bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE) - bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE) - bld(features='%sshlib'%m,source='test.'+mode,target='test') - bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath) + + rpath = [] + if getattr(self, 'add_rpath', False): + rpath = [self.bld.path.get_bld().abspath()] + + mode = self.mode + m = '%s %s' % (mode, mode) + ex = self.test_exec and 'test_exec' or '' + bld = self.bld + bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE) + bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE) + bld(features='%sshlib' % m, source='test.' + mode, target='test') + bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath) + @conf -def check_library(self,mode=None,test_exec=True): +def check_library(self, mode=None, test_exec=True): + """ + Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`. + + :param mode: c or cxx or d + :type mode: string + """ if not mode: - mode='c' + mode = 'c' if self.env.CXX: - mode='cxx' - self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec) -INLINE_CODE=''' + mode = 'cxx' + self.check( + compile_filename = [], + features = 'link_lib_test', + msg = 'Checking for libraries', + mode = mode, + test_exec = test_exec) + +######################################################################################## + +INLINE_CODE = ''' typedef int foo_t; static %s foo_t static_foo () {return 0; } %s foo_t foo () { return 0; } ''' -INLINE_VALUES=['inline','__inline__','__inline'] +INLINE_VALUES = ['inline', '__inline__', '__inline'] + @conf -def check_inline(self,**kw): +def check_inline(self, **kw): + """ + Checks for the right value for inline macro. + Define INLINE_MACRO to 1 if the define is found. + If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__) + + :param define_name: define INLINE_MACRO by default to 1 if the macro is defined + :type define_name: string + :param features: by default *c* or *cxx* depending on the compiler present + :type features: list of string + """ self.start_msg('Checking for inline') - if not'define_name'in kw: - kw['define_name']='INLINE_MACRO' - if not'features'in kw: + + if not 'define_name' in kw: + kw['define_name'] = 'INLINE_MACRO' + if not 'features' in kw: if self.env.CXX: - kw['features']=['cxx'] + kw['features'] = ['cxx'] else: - kw['features']=['c'] + kw['features'] = ['c'] + for x in INLINE_VALUES: - kw['fragment']=INLINE_CODE%(x,x) + kw['fragment'] = INLINE_CODE % (x, x) + try: self.check(**kw) except self.errors.ConfigurationError: continue else: self.end_msg(x) - if x!='inline': - self.define('inline',x,quote=False) + if x != 'inline': + self.define('inline', x, quote=False) return x self.fatal('could not use inline functions') -LARGE_FRAGMENT='''#include + +######################################################################################## + +LARGE_FRAGMENT = '''#include int main(int argc, char **argv) { (void)argc; (void)argv; return !(sizeof(off_t) >= 8); } ''' + @conf -def check_large_file(self,**kw): - if not'define_name'in kw: - kw['define_name']='HAVE_LARGEFILE' - if not'execute'in kw: - kw['execute']=True - if not'features'in kw: +def check_large_file(self, **kw): + """ + Checks for large file support and define the macro HAVE_LARGEFILE + The test is skipped on win32 systems (DEST_BINFMT == pe). + + :param define_name: define to set, by default *HAVE_LARGEFILE* + :type define_name: string + :param execute: execute the test (yes by default) + :type execute: bool + """ + if not 'define_name' in kw: + kw['define_name'] = 'HAVE_LARGEFILE' + if not 'execute' in kw: + kw['execute'] = True + + if not 'features' in kw: if self.env.CXX: - kw['features']=['cxx','cxxprogram'] + kw['features'] = ['cxx', 'cxxprogram'] else: - kw['features']=['c','cprogram'] - kw['fragment']=LARGE_FRAGMENT - kw['msg']='Checking for large file support' - ret=True + kw['features'] = ['c', 'cprogram'] + + kw['fragment'] = LARGE_FRAGMENT + + kw['msg'] = 'Checking for large file support' + ret = True try: - if self.env.DEST_BINFMT!='pe': - ret=self.check(**kw) + if self.env.DEST_BINFMT != 'pe': + ret = self.check(**kw) except self.errors.ConfigurationError: pass else: if ret: return True - kw['msg']='Checking for -D_FILE_OFFSET_BITS=64' - kw['defines']=['_FILE_OFFSET_BITS=64'] + + kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64' + kw['defines'] = ['_FILE_OFFSET_BITS=64'] try: - ret=self.check(**kw) + ret = self.check(**kw) except self.errors.ConfigurationError: pass else: - self.define('_FILE_OFFSET_BITS',64) + self.define('_FILE_OFFSET_BITS', 64) return ret + self.fatal('There is no support for large files') -ENDIAN_FRAGMENT=''' + +######################################################################################## + +ENDIAN_FRAGMENT = ''' short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; int use_ascii (int i) { @@ -129,24 +192,39 @@ } extern int foo; ''' + class grep_for_endianness(Task.Task): - color='PINK' + """ + Task that reads a binary and tries to determine the endianness + """ + color = 'PINK' def run(self): - txt=self.inputs[0].read(flags='rb').decode('latin-1') - if txt.find('LiTTleEnDian')>-1: + txt = self.inputs[0].read(flags='rb').decode('latin-1') + if txt.find('LiTTleEnDian') > -1: self.generator.tmp.append('little') - elif txt.find('BIGenDianSyS')>-1: + elif txt.find('BIGenDianSyS') > -1: self.generator.tmp.append('big') else: - return-1 + return -1 + @feature('grep_for_endianness') @after_method('process_source') def grep_for_endianness_fun(self): - self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0]) + """ + Used by the endianness configuration test + """ + self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0]) + @conf def check_endianness(self): - tmp=[] + """ + Executes a configuration test to determine the endianness + """ + tmp = [] def check_msg(self): return tmp[0] - self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg='Checking for endianness',define='ENDIANNESS',tmp=tmp,okmsg=check_msg) + self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', + msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, + okmsg=check_msg, confcache=None) return tmp[0] + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/cxx.py lilv-0.24.6/waflib/Tools/cxx.py --- lilv-0.24.4~dfsg0/waflib/Tools/cxx.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/cxx.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,26 +1,40 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) -from waflib import TaskGen,Task +"Base for c++ programs and libraries" + +from waflib import TaskGen, Task from waflib.Tools import c_preproc -from waflib.Tools.ccroot import link_task,stlink_task +from waflib.Tools.ccroot import link_task, stlink_task + @TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') -def cxx_hook(self,node): - return self.create_compiled_task('cxx',node) -if not'.c'in TaskGen.task_gen.mappings: - TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp'] +def cxx_hook(self, node): + "Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances" + return self.create_compiled_task('cxx', node) + +if not '.c' in TaskGen.task_gen.mappings: + TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp'] + class cxx(Task.Task): - run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' - vars=['CXXDEPS'] - ext_in=['.h'] - scan=c_preproc.scan + "Compiles C++ files into object files" + run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' + vars = ['CXXDEPS'] # unused variable to depend on, just in case + ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] + scan = c_preproc.scan + class cxxprogram(link_task): - run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' - vars=['LINKDEPS'] - ext_out=['.bin'] - inst_to='${BINDIR}' + "Links object files into c++ programs" + run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' + vars = ['LINKDEPS'] + ext_out = ['.bin'] + inst_to = '${BINDIR}' + class cxxshlib(cxxprogram): - inst_to='${LIBDIR}' + "Links object files into c++ shared libraries" + inst_to = '${LIBDIR}' + class cxxstlib(stlink_task): - pass + "Links object files into c++ static libraries" + pass # do not remove + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/dbus.py lilv-0.24.6/waflib/Tools/dbus.py --- lilv-0.24.4~dfsg0/waflib/Tools/dbus.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/dbus.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,29 +1,70 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Ali Sabil, 2007 + +""" +Compiles dbus files with **dbus-binding-tool** + +Typical usage:: + + def options(opt): + opt.load('compiler_c dbus') + def configure(conf): + conf.load('compiler_c dbus') + def build(bld): + tg = bld.program( + includes = '.', + source = bld.path.ant_glob('*.c'), + target = 'gnome-hello') + tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server') +""" + +from waflib import Task, Errors +from waflib.TaskGen import taskgen_method, before_method -from waflib import Task,Errors -from waflib.TaskGen import taskgen_method,before_method @taskgen_method -def add_dbus_file(self,filename,prefix,mode): - if not hasattr(self,'dbus_lst'): - self.dbus_lst=[] - if not'process_dbus'in self.meths: +def add_dbus_file(self, filename, prefix, mode): + """ + Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*. + + :param filename: xml file to compile + :type filename: string + :param prefix: dbus binding tool prefix (--prefix=prefix) + :type prefix: string + :param mode: dbus binding tool mode (--mode=mode) + :type mode: string + """ + if not hasattr(self, 'dbus_lst'): + self.dbus_lst = [] + if not 'process_dbus' in self.meths: self.meths.append('process_dbus') - self.dbus_lst.append([filename,prefix,mode]) + self.dbus_lst.append([filename, prefix, mode]) + @before_method('process_source') def process_dbus(self): - for filename,prefix,mode in getattr(self,'dbus_lst',[]): - node=self.path.find_resource(filename) + """ + Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances. + """ + for filename, prefix, mode in getattr(self, 'dbus_lst', []): + node = self.path.find_resource(filename) if not node: - raise Errors.WafError('file not found '+filename) - tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h')) - tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix - tsk.env.DBUS_BINDING_TOOL_MODE=mode + raise Errors.WafError('file not found ' + filename) + tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h')) + tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix + tsk.env.DBUS_BINDING_TOOL_MODE = mode + class dbus_binding_tool(Task.Task): - color='BLUE' - ext_out=['.h'] - run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}' - shell=True + """ + Compiles a dbus file + """ + color = 'BLUE' + ext_out = ['.h'] + run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}' + shell = True # temporary workaround for #795 + def configure(conf): - conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL') + """ + Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL`` + """ + conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/d_config.py lilv-0.24.6/waflib/Tools/d_config.py --- lilv-0.24.4~dfsg0/waflib/Tools/d_config.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/d_config.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,28 +1,33 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2016-2018 (ita) from waflib import Utils from waflib.Configure import conf + @conf def d_platform_flags(self): - v=self.env + """ + Sets the extensions dll/so for d programs and libraries + """ + v = self.env if not v.DEST_OS: - v.DEST_OS=Utils.unversioned_sys_platform() - binfmt=Utils.destos_to_binfmt(self.env.DEST_OS) - if binfmt=='pe': - v.dprogram_PATTERN='%s.exe' - v.dshlib_PATTERN='lib%s.dll' - v.dstlib_PATTERN='lib%s.a' - elif binfmt=='mac-o': - v.dprogram_PATTERN='%s' - v.dshlib_PATTERN='lib%s.dylib' - v.dstlib_PATTERN='lib%s.a' + v.DEST_OS = Utils.unversioned_sys_platform() + binfmt = Utils.destos_to_binfmt(self.env.DEST_OS) + if binfmt == 'pe': + v.dprogram_PATTERN = '%s.exe' + v.dshlib_PATTERN = 'lib%s.dll' + v.dstlib_PATTERN = 'lib%s.a' + elif binfmt == 'mac-o': + v.dprogram_PATTERN = '%s' + v.dshlib_PATTERN = 'lib%s.dylib' + v.dstlib_PATTERN = 'lib%s.a' else: - v.dprogram_PATTERN='%s' - v.dshlib_PATTERN='lib%s.so' - v.dstlib_PATTERN='lib%s.a' -DLIB=''' + v.dprogram_PATTERN = '%s' + v.dshlib_PATTERN = 'lib%s.so' + v.dstlib_PATTERN = 'lib%s.a' + +DLIB = ''' version(D_Version2) { import std.stdio; int main() { @@ -45,8 +50,15 @@ } } ''' +"""Detection string for the D standard library""" + @conf -def check_dlibrary(self,execute=True): - ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True) +def check_dlibrary(self, execute=True): + """ + Detects the kind of standard library that comes with the compiler, + and sets conf.env.DLIBRARY to tango, phobos1 or phobos2 + """ + ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True) if execute: - self.env.DLIBRARY=ret.strip() + self.env.DLIBRARY = ret.strip() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/dmd.py lilv-0.24.6/waflib/Tools/dmd.py --- lilv-0.24.4~dfsg0/waflib/Tools/dmd.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/dmd.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,51 +1,80 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Carlos Rafael Giani, 2007 (dv) +# Thomas Nagy, 2008-2018 (ita) import sys -from waflib.Tools import ar,d +from waflib.Tools import ar, d from waflib.Configure import conf + @conf def find_dmd(conf): - conf.find_program(['dmd','dmd2','ldc'],var='D') - out=conf.cmd_and_log(conf.env.D+['--help']) - if out.find("D Compiler v")==-1: - out=conf.cmd_and_log(conf.env.D+['-version']) - if out.find("based on DMD v1.")==-1: + """ + Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D* + """ + conf.find_program(['dmd', 'dmd2', 'ldc'], var='D') + + # make sure that we're dealing with dmd1, dmd2, or ldc(1) + out = conf.cmd_and_log(conf.env.D + ['--help']) + if out.find("D Compiler v") == -1: + out = conf.cmd_and_log(conf.env.D + ['-version']) + if out.find("based on DMD v1.") == -1: conf.fatal("detected compiler is not dmd/ldc") + @conf def common_flags_ldc(conf): - v=conf.env - v.DFLAGS=['-d-version=Posix'] - v.LINKFLAGS=[] - v.DFLAGS_dshlib=['-relocation-model=pic'] + """ + Sets the D flags required by *ldc* + """ + v = conf.env + v.DFLAGS = ['-d-version=Posix'] + v.LINKFLAGS = [] + v.DFLAGS_dshlib = ['-relocation-model=pic'] + @conf def common_flags_dmd(conf): - v=conf.env - v.D_SRC_F=['-c'] - v.D_TGT_F='-of%s' - v.D_LINKER=v.D - v.DLNK_SRC_F='' - v.DLNK_TGT_F='-of%s' - v.DINC_ST='-I%s' - v.DSHLIB_MARKER=v.DSTLIB_MARKER='' - v.DSTLIB_ST=v.DSHLIB_ST='-L-l%s' - v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L-L%s' - v.LINKFLAGS_dprogram=['-quiet'] - v.DFLAGS_dshlib=['-fPIC'] - v.LINKFLAGS_dshlib=['-L-shared'] - v.DHEADER_ext='.di' - v.DFLAGS_d_with_header=['-H','-Hf'] - v.D_HDR_F='%s' + """ + Set the flags required by *dmd* or *dmd2* + """ + v = conf.env + + v.D_SRC_F = ['-c'] + v.D_TGT_F = '-of%s' + + v.D_LINKER = v.D + v.DLNK_SRC_F = '' + v.DLNK_TGT_F = '-of%s' + v.DINC_ST = '-I%s' + + v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' + v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s' + v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s' + + v.LINKFLAGS_dprogram= ['-quiet'] + + v.DFLAGS_dshlib = ['-fPIC'] + v.LINKFLAGS_dshlib = ['-L-shared'] + + v.DHEADER_ext = '.di' + v.DFLAGS_d_with_header = ['-H', '-Hf'] + v.D_HDR_F = '%s' + def configure(conf): + """ + Configuration for *dmd*, *dmd2*, and *ldc* + """ conf.find_dmd() - if sys.platform=='win32': - out=conf.cmd_and_log(conf.env.D+['--help']) - if out.find('D Compiler v2.')>-1: + + if sys.platform == 'win32': + out = conf.cmd_and_log(conf.env.D + ['--help']) + if out.find('D Compiler v2.') > -1: conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') + conf.load('ar') conf.load('d') conf.common_flags_dmd() conf.d_platform_flags() - if str(conf.env.D).find('ldc')>-1: + + if str(conf.env.D).find('ldc') > -1: conf.common_flags_ldc() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/d.py lilv-0.24.6/waflib/Tools/d.py --- lilv-0.24.4~dfsg0/waflib/Tools/d.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/d.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,54 +1,97 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Carlos Rafael Giani, 2007 (dv) +# Thomas Nagy, 2007-2018 (ita) + +from waflib import Utils, Task, Errors +from waflib.TaskGen import taskgen_method, feature, extension +from waflib.Tools import d_scan, d_config +from waflib.Tools.ccroot import link_task, stlink_task -from waflib import Utils,Task,Errors -from waflib.TaskGen import taskgen_method,feature,extension -from waflib.Tools import d_scan,d_config -from waflib.Tools.ccroot import link_task,stlink_task class d(Task.Task): - color='GREEN' - run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' - scan=d_scan.scan + "Compile a d file into an object file" + color = 'GREEN' + run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' + scan = d_scan.scan + class d_with_header(d): - run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' + "Compile a d file and generate a header" + run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' + class d_header(Task.Task): - color='BLUE' - run_str='${D} ${D_HEADER} ${SRC}' + "Compile d headers" + color = 'BLUE' + run_str = '${D} ${D_HEADER} ${SRC}' + class dprogram(link_task): - run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' - inst_to='${BINDIR}' + "Link object files into a d program" + run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' + inst_to = '${BINDIR}' + class dshlib(dprogram): - inst_to='${LIBDIR}' + "Link object files into a d shared library" + inst_to = '${LIBDIR}' + class dstlib(stlink_task): - pass -@extension('.d','.di','.D') -def d_hook(self,node): - ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o' - out='%s.%d.%s'%(node.name,self.idx,ext) - def create_compiled_task(self,name,node): - task=self.create_task(name,node,node.parent.find_or_declare(out)) + "Link object files into a d static library" + pass # do not remove + +@extension('.d', '.di', '.D') +def d_hook(self, node): + """ + Compile *D* files. To get .di files as well as .o files, set the following:: + + def build(bld): + bld.program(source='foo.d', target='app', generate_headers=True) + + """ + ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o' + out = '%s.%d.%s' % (node.name, self.idx, ext) + def create_compiled_task(self, name, node): + task = self.create_task(name, node, node.parent.find_or_declare(out)) try: self.compiled_tasks.append(task) except AttributeError: - self.compiled_tasks=[task] + self.compiled_tasks = [task] return task - if getattr(self,'generate_headers',None): - tsk=create_compiled_task(self,'d_with_header',node) + + if getattr(self, 'generate_headers', None): + tsk = create_compiled_task(self, 'd_with_header', node) tsk.outputs.append(node.change_ext(self.env.DHEADER_ext)) else: - tsk=create_compiled_task(self,'d',node) + tsk = create_compiled_task(self, 'd', node) return tsk + @taskgen_method -def generate_header(self,filename): +def generate_header(self, filename): + """ + See feature request #104:: + + def build(bld): + tg = bld.program(source='foo.d', target='app') + tg.generate_header('blah.d') + # is equivalent to: + #tg = bld.program(source='foo.d', target='app', header_lst='blah.d') + + :param filename: header to create + :type filename: string + """ try: - self.header_lst.append([filename,self.install_path]) + self.header_lst.append([filename, self.install_path]) except AttributeError: - self.header_lst=[[filename,self.install_path]] + self.header_lst = [[filename, self.install_path]] + @feature('d') def process_header(self): - for i in getattr(self,'header_lst',[]): - node=self.path.find_resource(i[0]) + """ + Process the attribute 'header_lst' to create the d header compilation tasks:: + + def build(bld): + bld.program(source='foo.d', target='app', header_lst='blah.d') + """ + for i in getattr(self, 'header_lst', []): + node = self.path.find_resource(i[0]) if not node: - raise Errors.WafError('file %r not found on d obj'%i[0]) - self.create_task('d_header',node,node.change_ext('.di')) + raise Errors.WafError('file %r not found on d obj' % i[0]) + self.create_task('d_header', node, node.change_ext('.di')) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/d_scan.py lilv-0.24.6/waflib/Tools/d_scan.py --- lilv-0.24.4~dfsg0/waflib/Tools/d_scan.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/d_scan.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,90 +1,120 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2016-2018 (ita) + +""" +Provide a scanner for finding dependencies on d files +""" import re from waflib import Utils + def filter_comments(filename): - txt=Utils.readf(filename) - i=0 - buf=[] - max=len(txt) - begin=0 - while i1: - dupe=True - msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"") + + dupe = False + for (k, v) in mp.items(): + if len(v) > 1: + dupe = True + msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "") Logs.error(msg) for x in v: - if Logs.verbose>1: - Logs.error(' %d. %r',1+v.index(x),x.generator) + if Logs.verbose > 1: + Logs.error(' %d. %r', 1 + v.index(x), x.generator) else: - Logs.error(' %d. %r in %r',1+v.index(x),x.generator.name,getattr(x.generator,'path',None)) + Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)) Logs.error('If you think that this is an error, set no_errcheck_out on the task instance') + if not dupe: - for(k,v)in uids.items(): - if len(v)>1: + for (k, v) in uids.items(): + if len(v) > 1: Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') - tg_details=tsk.generator.name - if Logs.verbose>2: - tg_details=tsk.generator + tg_details = tsk.generator.name + if Logs.verbose > 2: + tg_details = tsk.generator for tsk in v: - Logs.error(' - object %r (%r) defined in %r',tsk.__class__.__name__,tsk,tg_details) + Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details) + def check_invalid_constraints(self): - feat=set() + feat = set() for x in list(TaskGen.feats.values()): feat.union(set(x)) - for(x,y)in TaskGen.task_gen.prec.items(): + for (x, y) in TaskGen.task_gen.prec.items(): feat.add(x) feat.union(set(y)) - ext=set() + ext = set() for x in TaskGen.task_gen.mappings.values(): ext.add(x.__name__) - invalid=ext&feat + invalid = ext & feat if invalid: - Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method',list(invalid)) + Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method', list(invalid)) + + # the build scripts have been read, so we can check for invalid after/before attributes on task classes for cls in list(Task.classes.values()): - if sys.hexversion>0x3000000 and issubclass(cls,Task.Task)and isinstance(cls.hcode,str): - raise Errors.WafError('Class %r has hcode value %r of type , expecting (use Utils.h_cmd() ?)'%(cls,cls.hcode)) - for x in('before','after'): - for y in Utils.to_list(getattr(cls,x,[])): + if sys.hexversion > 0x3000000 and issubclass(cls, Task.Task) and isinstance(cls.hcode, str): + raise Errors.WafError('Class %r has hcode value %r of type , expecting (use Utils.h_cmd() ?)' % (cls, cls.hcode)) + + for x in ('before', 'after'): + for y in Utils.to_list(getattr(cls, x, [])): if not Task.classes.get(y): - Logs.error('Erroneous order constraint %r=%r on task class %r',x,y,cls.__name__) - if getattr(cls,'rule',None): - Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")',cls.__name__) + Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__) + if getattr(cls, 'rule', None): + Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__) + def replace(m): - oldcall=getattr(Build.BuildContext,m) - def call(self,*k,**kw): - ret=oldcall(self,*k,**kw) + """ + Replaces existing BuildContext methods to verify parameter names, + for example ``bld(source=)`` has no ending *s* + """ + oldcall = getattr(Build.BuildContext, m) + def call(self, *k, **kw): + ret = oldcall(self, *k, **kw) for x in typos: if x in kw: - if x=='iscopy'and'subst'in getattr(self,'features',''): + if x == 'iscopy' and 'subst' in getattr(self, 'features', ''): continue - Logs.error('Fix the typo %r -> %r on %r',x,typos[x],ret) + Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret) return ret - setattr(Build.BuildContext,m,call) + setattr(Build.BuildContext, m, call) + def enhance_lib(): + """ + Modifies existing classes and methods to enable error verification + """ for m in meths_typos: replace(m) - def ant_glob(self,*k,**kw): + + # catch '..' in ant_glob patterns + def ant_glob(self, *k, **kw): if k: - lst=Utils.to_list(k[0]) + lst = Utils.to_list(k[0]) for pat in lst: - sp=pat.split('/') - if'..'in sp: - Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'",k[0]) - if'.'in sp: - Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'",k[0]) - return self.old_ant_glob(*k,**kw) - Node.Node.old_ant_glob=Node.Node.ant_glob - Node.Node.ant_glob=ant_glob - def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True,quiet=False): + sp = pat.split('/') + if '..' in sp: + Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0]) + if '.' in sp: + Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0]) + return self.old_ant_glob(*k, **kw) + Node.Node.old_ant_glob = Node.Node.ant_glob + Node.Node.ant_glob = ant_glob + + # catch ant_glob on build folders + def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False): if remove: try: - if self.is_child_of(self.ctx.bldnode)and not quiet: - quiet=True - Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False',self) + if self.is_child_of(self.ctx.bldnode) and not quiet: + quiet = True + Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self) except AttributeError: pass - return self.old_ant_iter(accept,maxdepth,pats,dir,src,remove,quiet) - Node.Node.old_ant_iter=Node.Node.ant_iter - Node.Node.ant_iter=ant_iter - old=Task.is_before - def is_before(t1,t2): - ret=old(t1,t2) - if ret and old(t2,t1): - Logs.error('Contradictory order constraints in classes %r %r',t1,t2) + return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet) + Node.Node.old_ant_iter = Node.Node.ant_iter + Node.Node.ant_iter = ant_iter + + # catch conflicting ext_in/ext_out/before/after declarations + old = Task.is_before + def is_before(t1, t2): + ret = old(t1, t2) + if ret and old(t2, t1): + Logs.error('Contradictory order constraints in classes %r %r', t1, t2) return ret - Task.is_before=is_before + Task.is_before = is_before + + # check for bld(feature='cshlib') where no 'c' is given - this can be either a mistake or on purpose + # so we only issue a warning def check_err_features(self): - lst=self.to_list(self.features) - if'shlib'in lst: + lst = self.to_list(self.features) + if 'shlib' in lst: Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') - for x in('c','cxx','d','fc'): - if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]: - Logs.error('%r features is probably missing %r',self,x) + for x in ('c', 'cxx', 'd', 'fc'): + if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]: + Logs.error('%r features is probably missing %r', self, x) TaskGen.feature('*')(check_err_features) + + # check for erroneous order constraints def check_err_order(self): - if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features): - for x in('before','after','ext_in','ext_out'): - if hasattr(self,x): - Logs.warn('Erroneous order constraint %r on non-rule based task generator %r',x,self) + if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features): + for x in ('before', 'after', 'ext_in', 'ext_out'): + if hasattr(self, x): + Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self) else: - for x in('before','after'): - for y in self.to_list(getattr(self,x,[])): + for x in ('before', 'after'): + for y in self.to_list(getattr(self, x, [])): if not Task.classes.get(y): - Logs.error('Erroneous order constraint %s=%r on %r (no such class)',x,y,self) + Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self) TaskGen.feature('*')(check_err_order) + + # check for @extension used with @feature/@before_method/@after_method def check_compile(self): check_invalid_constraints(self) try: - ret=self.orig_compile() + ret = self.orig_compile() finally: check_same_targets(self) return ret - Build.BuildContext.orig_compile=Build.BuildContext.compile - Build.BuildContext.compile=check_compile - def use_rec(self,name,**kw): + Build.BuildContext.orig_compile = Build.BuildContext.compile + Build.BuildContext.compile = check_compile + + # check for invalid build groups #914 + def use_rec(self, name, **kw): try: - y=self.bld.get_tgen_by_name(name) + y = self.bld.get_tgen_by_name(name) except Errors.WafError: pass else: - idx=self.bld.get_group_idx(self) - odx=self.bld.get_group_idx(y) - if odx>idx: - msg="Invalid 'use' across build groups:" - if Logs.verbose>1: - msg+='\n target %r\n uses:\n %r'%(self,y) + idx = self.bld.get_group_idx(self) + odx = self.bld.get_group_idx(y) + if odx > idx: + msg = "Invalid 'use' across build groups:" + if Logs.verbose > 1: + msg += '\n target %r\n uses:\n %r' % (self, y) else: - msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name) + msg += " %r uses %r (try 'waf -v -v' for the full error)" % (self.name, name) raise Errors.WafError(msg) - self.orig_use_rec(name,**kw) - TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec - TaskGen.task_gen.use_rec=use_rec - def _getattr(self,name,default=None): - if name=='append'or name=='add': + self.orig_use_rec(name, **kw) + TaskGen.task_gen.orig_use_rec = TaskGen.task_gen.use_rec + TaskGen.task_gen.use_rec = use_rec + + # check for env.append + def _getattr(self, name, default=None): + if name == 'append' or name == 'add': raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') - elif name=='prepend': + elif name == 'prepend': raise Errors.WafError('env.prepend does not exist: use env.prepend_value') if name in self.__slots__: - return super(ConfigSet.ConfigSet,self).__getattr__(name,default) + return super(ConfigSet.ConfigSet, self).__getattr__(name, default) else: return self[name] - ConfigSet.ConfigSet.__getattr__=_getattr + ConfigSet.ConfigSet.__getattr__ = _getattr + + def options(opt): + """ + Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options + """ enhance_lib() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/fc_config.py lilv-0.24.6/waflib/Tools/fc_config.py --- lilv-0.24.4~dfsg0/waflib/Tools/fc_config.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/fc_config.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,220 +1,365 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# DC 2008 +# Thomas Nagy 2016-2018 (ita) -import re,os,sys,shlex +""" +Fortran configuration helpers +""" + +import re, os, sys, shlex from waflib.Configure import conf -from waflib.TaskGen import feature,before_method -FC_FRAGMENT=' program main\n end program main\n' -FC_FRAGMENT2=' PROGRAM MAIN\n END\n' +from waflib.TaskGen import feature, before_method + +FC_FRAGMENT = ' program main\n end program main\n' +FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these? + @conf def fc_flags(conf): - v=conf.env - v.FC_SRC_F=[] - v.FC_TGT_F=['-c','-o'] - v.FCINCPATH_ST='-I%s' - v.FCDEFINES_ST='-D%s' + """ + Defines common fortran configuration flags and file extensions + """ + v = conf.env + + v.FC_SRC_F = [] + v.FC_TGT_F = ['-c', '-o'] + v.FCINCPATH_ST = '-I%s' + v.FCDEFINES_ST = '-D%s' + if not v.LINK_FC: - v.LINK_FC=v.FC - v.FCLNK_SRC_F=[] - v.FCLNK_TGT_F=['-o'] - v.FCFLAGS_fcshlib=['-fpic'] - v.LINKFLAGS_fcshlib=['-shared'] - v.fcshlib_PATTERN='lib%s.so' - v.fcstlib_PATTERN='lib%s.a' - v.FCLIB_ST='-l%s' - v.FCLIBPATH_ST='-L%s' - v.FCSTLIB_ST='-l%s' - v.FCSTLIBPATH_ST='-L%s' - v.FCSTLIB_MARKER='-Wl,-Bstatic' - v.FCSHLIB_MARKER='-Wl,-Bdynamic' - v.SONAME_ST='-Wl,-h,%s' + v.LINK_FC = v.FC + + v.FCLNK_SRC_F = [] + v.FCLNK_TGT_F = ['-o'] + + v.FCFLAGS_fcshlib = ['-fpic'] + v.LINKFLAGS_fcshlib = ['-shared'] + v.fcshlib_PATTERN = 'lib%s.so' + + v.fcstlib_PATTERN = 'lib%s.a' + + v.FCLIB_ST = '-l%s' + v.FCLIBPATH_ST = '-L%s' + v.FCSTLIB_ST = '-l%s' + v.FCSTLIBPATH_ST = '-L%s' + v.FCSTLIB_MARKER = '-Wl,-Bstatic' + v.FCSHLIB_MARKER = '-Wl,-Bdynamic' + + v.SONAME_ST = '-Wl,-h,%s' + @conf def fc_add_flags(conf): - conf.add_os_flags('FCPPFLAGS',dup=False) - conf.add_os_flags('FCFLAGS',dup=False) - conf.add_os_flags('LINKFLAGS',dup=False) - conf.add_os_flags('LDFLAGS',dup=False) -@conf -def check_fortran(self,*k,**kw): - self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app') -@conf -def check_fc(self,*k,**kw): - kw['compiler']='fc' - if not'compile_mode'in kw: - kw['compile_mode']='fc' - if not'type'in kw: - kw['type']='fcprogram' - if not'compile_filename'in kw: - kw['compile_filename']='test.f90' - if not'code'in kw: - kw['code']=FC_FRAGMENT - return self.check(*k,**kw) + """ + Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env + """ + conf.add_os_flags('FCPPFLAGS', dup=False) + conf.add_os_flags('FCFLAGS', dup=False) + conf.add_os_flags('LINKFLAGS', dup=False) + conf.add_os_flags('LDFLAGS', dup=False) + +@conf +def check_fortran(self, *k, **kw): + """ + Compiles a Fortran program to ensure that the settings are correct + """ + self.check_cc( + fragment = FC_FRAGMENT, + compile_filename = 'test.f', + features = 'fc fcprogram', + msg = 'Compiling a simple fortran app') + +@conf +def check_fc(self, *k, **kw): + """ + Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language + (this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`) + """ + kw['compiler'] = 'fc' + if not 'compile_mode' in kw: + kw['compile_mode'] = 'fc' + if not 'type' in kw: + kw['type'] = 'fcprogram' + if not 'compile_filename' in kw: + kw['compile_filename'] = 'test.f90' + if not 'code' in kw: + kw['code'] = FC_FRAGMENT + return self.check(*k, **kw) + +# ------------------------------------------------------------------------ +# --- These are the default platform modifiers, refactored here for +# convenience. gfortran and g95 have much overlap. +# ------------------------------------------------------------------------ + @conf def fortran_modifier_darwin(conf): - v=conf.env - v.FCFLAGS_fcshlib=['-fPIC'] - v.LINKFLAGS_fcshlib=['-dynamiclib'] - v.fcshlib_PATTERN='lib%s.dylib' - v.FRAMEWORKPATH_ST='-F%s' - v.FRAMEWORK_ST=['-framework'] - v.LINKFLAGS_fcstlib=[] - v.FCSHLIB_MARKER='' - v.FCSTLIB_MARKER='' - v.SONAME_ST='' + """ + Defines Fortran flags and extensions for OSX systems + """ + v = conf.env + v.FCFLAGS_fcshlib = ['-fPIC'] + v.LINKFLAGS_fcshlib = ['-dynamiclib'] + v.fcshlib_PATTERN = 'lib%s.dylib' + v.FRAMEWORKPATH_ST = '-F%s' + v.FRAMEWORK_ST = ['-framework'] + + v.LINKFLAGS_fcstlib = [] + + v.FCSHLIB_MARKER = '' + v.FCSTLIB_MARKER = '' + v.SONAME_ST = '' + @conf def fortran_modifier_win32(conf): - v=conf.env - v.fcprogram_PATTERN=v.fcprogram_test_PATTERN='%s.exe' - v.fcshlib_PATTERN='%s.dll' - v.implib_PATTERN='%s.dll.a' - v.IMPLIB_ST='-Wl,--out-implib,%s' - v.FCFLAGS_fcshlib=[] - v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) + """ + Defines Fortran flags for Windows platforms + """ + v = conf.env + v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe' + + v.fcshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.dll.a' + v.IMPLIB_ST = '-Wl,--out-implib,%s' + + v.FCFLAGS_fcshlib = [] + + # Auto-import is enabled by default even without this option, + # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages + # that the linker emits otherwise. + v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) + @conf def fortran_modifier_cygwin(conf): + """ + Defines Fortran flags for use on cygwin + """ fortran_modifier_win32(conf) - v=conf.env - v.fcshlib_PATTERN='cyg%s.dll' - v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base']) - v.FCFLAGS_fcshlib=[] + v = conf.env + v.fcshlib_PATTERN = 'cyg%s.dll' + v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base']) + v.FCFLAGS_fcshlib = [] + +# ------------------------------------------------------------------------ + @conf -def check_fortran_dummy_main(self,*k,**kw): +def check_fortran_dummy_main(self, *k, **kw): + """ + Determines if a main function is needed by compiling a code snippet with + the C compiler and linking it with the Fortran compiler (useful on unix-like systems) + """ if not self.env.CC: self.fatal('A c compiler is required for check_fortran_dummy_main') - lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN'] - lst.extend([m.lower()for m in lst]) + + lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN'] + lst.extend([m.lower() for m in lst]) lst.append('') + self.start_msg('Detecting whether we need a dummy main') for main in lst: - kw['fortran_main']=main + kw['fortran_main'] = main try: - self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True) + self.check_cc( + fragment = 'int %s() { return 0; }\n' % (main or 'test'), + features = 'c fcprogram', + mandatory = True + ) if not main: - self.env.FC_MAIN=-1 + self.env.FC_MAIN = -1 self.end_msg('no') else: - self.env.FC_MAIN=main - self.end_msg('yes %s'%main) + self.env.FC_MAIN = main + self.end_msg('yes %s' % main) break except self.errors.ConfigurationError: pass else: self.end_msg('not found') self.fatal('could not detect whether fortran requires a dummy main, see the config.log') -GCC_DRIVER_LINE=re.compile('^Driving:') -POSIX_STATIC_EXT=re.compile('\S+\.a') -POSIX_LIB_FLAGS=re.compile('-l\S+') + +# ------------------------------------------------------------------------ + +GCC_DRIVER_LINE = re.compile('^Driving:') +POSIX_STATIC_EXT = re.compile(r'\S+\.a') +POSIX_LIB_FLAGS = re.compile(r'-l\S+') + @conf -def is_link_verbose(self,txt): - assert isinstance(txt,str) +def is_link_verbose(self, txt): + """Returns True if 'useful' link options can be found in txt""" + assert isinstance(txt, str) for line in txt.splitlines(): if not GCC_DRIVER_LINE.search(line): - if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line): + if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line): return True return False + @conf -def check_fortran_verbose_flag(self,*k,**kw): +def check_fortran_verbose_flag(self, *k, **kw): + """ + Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG + """ self.start_msg('fortran link verbose flag') - for x in('-v','--verbose','-verbose','-V'): + for x in ('-v', '--verbose', '-verbose', '-V'): try: - self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True) + self.check_cc( + features = 'fc fcprogram_test', + fragment = FC_FRAGMENT2, + compile_filename = 'test.f', + linkflags = [x], + mandatory=True) except self.errors.ConfigurationError: pass else: - if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out): + # output is on stderr or stdout (for xlf) + if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out): self.end_msg(x) break else: self.end_msg('failure') self.fatal('Could not obtain the fortran link verbose flag (see config.log)') - self.env.FC_VERBOSE_FLAG=x + + self.env.FC_VERBOSE_FLAG = x return x -LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*'] -if os.name=='nt': - LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname']) + +# ------------------------------------------------------------------------ + +# linkflags which match those are ignored +LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*'] +if os.name == 'nt': + LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname']) else: LINKFLAGS_IGNORED.append(r'-lgcc*') -RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED] +RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED] + def _match_ignore(line): + """Returns True if the line should be ignored (Fortran verbose flag test)""" for i in RLINKFLAGS_IGNORED: if i.match(line): return True return False + def parse_fortran_link(lines): - final_flags=[] + """Given the output of verbose link of Fortran compiler, this returns a + list of flags necessary for linking using the standard linker.""" + final_flags = [] for line in lines: if not GCC_DRIVER_LINE.match(line): - _parse_flink_line(line,final_flags) + _parse_flink_line(line, final_flags) return final_flags -SPACE_OPTS=re.compile('^-[LRuYz]$') -NOSPACE_OPTS=re.compile('^-[RL]') -def _parse_flink_token(lexer,token,tmp_flags): + +SPACE_OPTS = re.compile('^-[LRuYz]$') +NOSPACE_OPTS = re.compile('^-[RL]') + +def _parse_flink_token(lexer, token, tmp_flags): + # Here we go (convention for wildcard is shell, not regex !) + # 1 TODO: we first get some root .a libraries + # 2 TODO: take everything starting by -bI:* + # 3 Ignore the following flags: -lang* | -lcrt*.o | -lc | + # -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*) + # 4 take into account -lkernel32 + # 5 For options of the kind -[[LRuYz]], as they take one argument + # after, the actual option is the next token + # 6 For -YP,*: take and replace by -Larg where arg is the old + # argument + # 7 For -[lLR]*: take + + # step 3 if _match_ignore(token): pass - elif token.startswith('-lkernel32')and sys.platform=='cygwin': + # step 4 + elif token.startswith('-lkernel32') and sys.platform == 'cygwin': tmp_flags.append(token) + # step 5 elif SPACE_OPTS.match(token): - t=lexer.get_token() + t = lexer.get_token() if t.startswith('P,'): - t=t[2:] + t = t[2:] for opt in t.split(os.pathsep): - tmp_flags.append('-L%s'%opt) + tmp_flags.append('-L%s' % opt) + # step 6 elif NOSPACE_OPTS.match(token): tmp_flags.append(token) + # step 7 elif POSIX_LIB_FLAGS.match(token): tmp_flags.append(token) else: + # ignore anything not explicitly taken into account pass - t=lexer.get_token() + + t = lexer.get_token() return t -def _parse_flink_line(line,final_flags): - lexer=shlex.shlex(line,posix=True) - lexer.whitespace_split=True - t=lexer.get_token() - tmp_flags=[] + +def _parse_flink_line(line, final_flags): + """private""" + lexer = shlex.shlex(line, posix = True) + lexer.whitespace_split = True + + t = lexer.get_token() + tmp_flags = [] while t: - t=_parse_flink_token(lexer,t,tmp_flags) + t = _parse_flink_token(lexer, t, tmp_flags) + final_flags.extend(tmp_flags) return final_flags + @conf -def check_fortran_clib(self,autoadd=True,*k,**kw): +def check_fortran_clib(self, autoadd=True, *k, **kw): + """ + Obtains the flags for linking with the C library + if this check works, add uselib='CLIB' to your task generators + """ if not self.env.FC_VERBOSE_FLAG: self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') + self.start_msg('Getting fortran runtime link flags') try: - self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG]) + self.check_cc( + fragment = FC_FRAGMENT2, + compile_filename = 'test.f', + features = 'fc fcprogram_test', + linkflags = [self.env.FC_VERBOSE_FLAG] + ) except Exception: self.end_msg(False) - if kw.get('mandatory',True): + if kw.get('mandatory', True): conf.fatal('Could not find the c library flags') else: - out=self.test_bld.err - flags=parse_fortran_link(out.splitlines()) - self.end_msg('ok (%s)'%' '.join(flags)) - self.env.LINKFLAGS_CLIB=flags + out = self.test_bld.err + flags = parse_fortran_link(out.splitlines()) + self.end_msg('ok (%s)' % ' '.join(flags)) + self.env.LINKFLAGS_CLIB = flags return flags - return[] -def getoutput(conf,cmd,stdin=False): + return [] + +def getoutput(conf, cmd, stdin=False): + """ + Obtains Fortran command outputs + """ from waflib import Errors if conf.env.env: - env=conf.env.env + env = conf.env.env else: - env=dict(os.environ) - env['LANG']='C' - input=stdin and'\n'.encode()or None + env = dict(os.environ) + env['LANG'] = 'C' + input = stdin and '\n'.encode() or None try: - out,err=conf.cmd_and_log(cmd,env=env,output=0,input=input) + out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input) except Errors.WafError as e: - if not(hasattr(e,'stderr')and hasattr(e,'stdout')): + # An WafError might indicate an error code during the command + # execution, in this case we still obtain the stderr and stdout, + # which we can use to find the version string. + if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')): raise e else: - out=e.stdout - err=e.stderr + # Ignore the return code and return the original + # stdout and stderr. + out = e.stdout + err = e.stderr except Exception: - conf.fatal('could not determine the compiler version %r'%cmd) - return(out,err) -ROUTINES_CODE="""\ + conf.fatal('could not determine the compiler version %r' % cmd) + return (out, err) + +# ------------------------------------------------------------------------ + +ROUTINES_CODE = """\ subroutine foobar() return end @@ -222,7 +367,8 @@ return end """ -MAIN_CODE=""" + +MAIN_CODE = """ void %(dummy_func_nounder)s(void); void %(dummy_func_under)s(void); int %(main_func_name)s() { @@ -231,68 +377,111 @@ return 0; } """ + @feature('link_main_routines_func') @before_method('process_source') def link_main_routines_tg_method(self): + """ + The configuration test declares a unique task generator, + so we create other task generators from there for fortran link tests + """ def write_test_file(task): task.outputs[0].write(task.generator.code) - bld=self.bld - bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__) - bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE) - bld(features='fc fcstlib',source='test.f',target='test') - bld(features='c fcprogram',source='main.c',target='app',use='test') + bld = self.bld + bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__) + bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE) + bld(features='fc fcstlib', source='test.f', target='test') + bld(features='c fcprogram', source='main.c', target='app', use='test') + def mangling_schemes(): - for u in('_',''): - for du in('','_'): - for c in("lower","upper"): - yield(u,du,c) -def mangle_name(u,du,c,name): - return getattr(name,c)()+u+(name.find('_')!=-1 and du or'') + """ + Generate triplets for use with mangle_name + (used in check_fortran_mangling) + the order is tuned for gfortan + """ + for u in ('_', ''): + for du in ('', '_'): + for c in ("lower", "upper"): + yield (u, du, c) + +def mangle_name(u, du, c, name): + """Mangle a name from a triplet (used in check_fortran_mangling)""" + return getattr(name, c)() + u + (name.find('_') != -1 and du or '') + @conf -def check_fortran_mangling(self,*k,**kw): +def check_fortran_mangling(self, *k, **kw): + """ + Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found + + This test will compile a fortran static library, then link a c app against it + """ if not self.env.CC: self.fatal('A c compiler is required for link_main_routines') if not self.env.FC: self.fatal('A fortran compiler is required for link_main_routines') if not self.env.FC_MAIN: self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') + self.start_msg('Getting fortran mangling scheme') - for(u,du,c)in mangling_schemes(): + for (u, du, c) in mangling_schemes(): try: - self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',dummy_func_nounder=mangle_name(u,du,c,'foobar'),dummy_func_under=mangle_name(u,du,c,'foo_bar'),main_func_name=self.env.FC_MAIN) + self.check_cc( + compile_filename = [], + features = 'link_main_routines_func', + msg = 'nomsg', + errmsg = 'nomsg', + dummy_func_nounder = mangle_name(u, du, c, 'foobar'), + dummy_func_under = mangle_name(u, du, c, 'foo_bar'), + main_func_name = self.env.FC_MAIN + ) except self.errors.ConfigurationError: pass else: - self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c)) - self.env.FORTRAN_MANGLING=(u,du,c) + self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c)) + self.env.FORTRAN_MANGLING = (u, du, c) break else: self.end_msg(False) self.fatal('mangler not found') - return(u,du,c) + return (u, du, c) + @feature('pyext') -@before_method('propagate_uselib_vars','apply_link') +@before_method('propagate_uselib_vars', 'apply_link') def set_lib_pat(self): - self.env.fcshlib_PATTERN=self.env.pyext_PATTERN + """Sets the Fortran flags for linking with Python""" + self.env.fcshlib_PATTERN = self.env.pyext_PATTERN + @conf def detect_openmp(self): - for x in('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): + """ + Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS`` + """ + for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): try: - self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP') + self.check_fc( + msg = 'Checking for OpenMP flag %s' % x, + fragment = 'program main\n call omp_get_num_threads()\nend program main', + fcflags = x, + linkflags = x, + uselib_store = 'OPENMP' + ) except self.errors.ConfigurationError: pass else: break else: self.fatal('Could not find OpenMP') + @conf def check_gfortran_o_space(self): - if self.env.FC_NAME!='GFORTRAN'or int(self.env.FC_VERSION[0])>4: + if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4: + # This is for old compilers and only for gfortran. + # No idea how other implementations handle this. Be safe and bail out. return self.env.stash() - self.env.FCLNK_TGT_F=['-o',''] + self.env.FCLNK_TGT_F = ['-o', ''] try: - self.check_fc(msg='Checking if the -o link must be split from arguments',fragment=FC_FRAGMENT,features='fc fcshlib') + self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib') except self.errors.ConfigurationError: self.env.revert() else: diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/fc.py lilv-0.24.6/waflib/Tools/fc.py --- lilv-0.24.4~dfsg0/waflib/Tools/fc.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/fc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,108 +1,203 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# DC 2008 +# Thomas Nagy 2016-2018 (ita) -from waflib import Utils,Task,Errors -from waflib.Tools import ccroot,fc_config,fc_scan +""" +Fortran support +""" + +from waflib import Utils, Task, Errors +from waflib.Tools import ccroot, fc_config, fc_scan from waflib.TaskGen import extension from waflib.Configure import conf -ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES','FCPPFLAGS']) -ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS']) + +ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS']) +ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) +ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) +ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS']) + @extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08') -def fc_hook(self,node): - return self.create_compiled_task('fc',node) +def fc_hook(self, node): + "Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances" + return self.create_compiled_task('fc', node) + @conf -def modfile(conf,name): - return{'lower':name.lower()+'.mod','lower.MOD':name.lower()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower'] +def modfile(conf, name): + """ + Turns a module name into the right module file name. + Defaults to all lower case. + """ + if name.find(':') >= 0: + # Depending on a submodule! + separator = conf.env.FC_SUBMOD_SEPARATOR or '@' + # Ancestors of the submodule will be prefixed to the + # submodule name, separated by a colon. + modpath = name.split(':') + # Only the ancestor (actual) module and the submodule name + # will be used for the filename. + modname = modpath[0] + separator + modpath[-1] + suffix = conf.env.FC_SUBMOD_SUFFIX or '.smod' + else: + modname = name + suffix = '.mod' + + return {'lower' :modname.lower() + suffix.lower(), + 'lower.MOD' :modname.lower() + suffix.upper(), + 'UPPER.mod' :modname.upper() + suffix.lower(), + 'UPPER' :modname.upper() + suffix.upper()}[conf.env.FC_MOD_CAPITALIZATION or 'lower'] + def get_fortran_tasks(tsk): - bld=tsk.generator.bld - tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator)) - return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)] + """ + Obtains all fortran tasks from the same build group. Those tasks must not have + the attribute 'nomod' or 'mod_fortran_done' + + :return: a list of :py:class:`waflib.Tools.fc.fc` instances + """ + bld = tsk.generator.bld + tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator)) + return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)] + class fc(Task.Task): - color='GREEN' - run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}' - vars=["FORTRANMODPATHFLAG"] + """ + Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed + This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency) + Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop + """ + color = 'GREEN' + run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}' + vars = ["FORTRANMODPATHFLAG"] + def scan(self): - tmp=fc_scan.fortran_parser(self.generator.includes_nodes) - tmp.task=self + """Fortran dependency scanner""" + tmp = fc_scan.fortran_parser(self.generator.includes_nodes) + tmp.task = self tmp.start(self.inputs[0]) - return(tmp.nodes,tmp.names) + return (tmp.nodes, tmp.names) + def runnable_status(self): - if getattr(self,'mod_fortran_done',None): - return super(fc,self).runnable_status() - bld=self.generator.bld - lst=get_fortran_tasks(self) + """ + Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks + executed by the main thread so there are no concurrency issues + """ + if getattr(self, 'mod_fortran_done', None): + return super(fc, self).runnable_status() + + # now, if we reach this part it is because this fortran task is the first in the list + bld = self.generator.bld + + # obtain the fortran tasks + lst = get_fortran_tasks(self) + + # disable this method for other tasks for tsk in lst: - tsk.mod_fortran_done=True + tsk.mod_fortran_done = True + + # wait for all the .f tasks to be ready for execution + # and ensure that the scanners are called at least once for tsk in lst: - ret=tsk.runnable_status() - if ret==Task.ASK_LATER: + ret = tsk.runnable_status() + if ret == Task.ASK_LATER: + # we have to wait for one of the other fortran tasks to be ready + # this may deadlock if there are dependencies between fortran tasks + # but this should not happen (we are setting them here!) for x in lst: - x.mod_fortran_done=None + x.mod_fortran_done = None + return Task.ASK_LATER - ins=Utils.defaultdict(set) - outs=Utils.defaultdict(set) + + ins = Utils.defaultdict(set) + outs = Utils.defaultdict(set) + + # the .mod files to create for tsk in lst: - key=tsk.uid() + key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith('MOD@'): - name=bld.modfile(x.replace('MOD@','')) - node=bld.srcnode.find_or_declare(name) + name = bld.modfile(x.replace('MOD@', '')) + node = bld.srcnode.find_or_declare(name) tsk.set_outputs(node) outs[node].add(tsk) + + # the .mod files to use for tsk in lst: - key=tsk.uid() + key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith('USE@'): - name=bld.modfile(x.replace('USE@','')) - node=bld.srcnode.find_resource(name) + name = bld.modfile(x.replace('USE@', '')) + node = bld.srcnode.find_resource(name) if node and node not in tsk.outputs: if not node in bld.node_deps[key]: bld.node_deps[key].append(node) ins[node].add(tsk) + + # if the intersection matches, set the order for k in ins.keys(): for a in ins[k]: a.run_after.update(outs[k]) - tmp=[] + for x in outs[k]: + self.generator.bld.producer.revdeps[x].add(a) + + # the scanner cannot output nodes, so we have to set them + # ourselves as task.dep_nodes (additional input nodes) + tmp = [] for t in outs[k]: tmp.extend(t.outputs) a.dep_nodes.extend(tmp) - a.dep_nodes.sort(key=lambda x:x.abspath()) + a.dep_nodes.sort(key=lambda x: x.abspath()) + + # the task objects have changed: clear the signature cache for tsk in lst: try: - delattr(tsk,'cache_sig') + delattr(tsk, 'cache_sig') except AttributeError: pass - return super(fc,self).runnable_status() + + return super(fc, self).runnable_status() + class fcprogram(ccroot.link_task): - color='YELLOW' - run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' - inst_to='${BINDIR}' + """Links Fortran programs""" + color = 'YELLOW' + run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' + inst_to = '${BINDIR}' + class fcshlib(fcprogram): - inst_to='${LIBDIR}' + """Links Fortran libraries""" + inst_to = '${LIBDIR}' + class fcstlib(ccroot.stlink_task): - pass + """Links Fortran static libraries (uses ar by default)""" + pass # do not remove the pass statement + class fcprogram_test(fcprogram): + """Custom link task to obtain compiler outputs for Fortran configuration tests""" + def runnable_status(self): - ret=super(fcprogram_test,self).runnable_status() - if ret==Task.SKIP_ME: - ret=Task.RUN_ME + """This task is always executed""" + ret = super(fcprogram_test, self).runnable_status() + if ret == Task.SKIP_ME: + ret = Task.RUN_ME return ret - def exec_command(self,cmd,**kw): - bld=self.generator.bld - kw['shell']=isinstance(cmd,str) - kw['stdout']=kw['stderr']=Utils.subprocess.PIPE - kw['cwd']=self.get_cwd() - bld.out=bld.err='' - bld.to_log('command: %s\n'%cmd) - kw['output']=0 + + def exec_command(self, cmd, **kw): + """Stores the compiler std our/err onto the build context, to bld.out + bld.err""" + bld = self.generator.bld + + kw['shell'] = isinstance(cmd, str) + kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE + kw['cwd'] = self.get_cwd() + bld.out = bld.err = '' + + bld.to_log('command: %s\n' % cmd) + + kw['output'] = 0 try: - (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw) + (bld.out, bld.err) = bld.cmd_and_log(cmd, **kw) except Errors.WafError: - return-1 + return -1 + if bld.out: - bld.to_log('out: %s\n'%bld.out) + bld.to_log('out: %s\n' % bld.out) if bld.err: - bld.to_log('err: %s\n'%bld.err) + bld.to_log('err: %s\n' % bld.err) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/fc_scan.py lilv-0.24.6/waflib/Tools/fc_scan.py --- lilv-0.24.4~dfsg0/waflib/Tools/fc_scan.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/fc_scan.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,60 +1,115 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# DC 2008 +# Thomas Nagy 2016-2018 (ita) import re -INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" -USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" -MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" -re_inc=re.compile(INC_REGEX,re.I) -re_use=re.compile(USE_REGEX,re.I) -re_mod=re.compile(MOD_REGEX,re.I) + +INC_REGEX = r"""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" +USE_REGEX = r"""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +MOD_REGEX = r"""(?:^|;)\s*MODULE(?!\s+(?:PROCEDURE|SUBROUTINE|FUNCTION))\s+(\w+)""" +SMD_REGEX = r"""(?:^|;)\s*SUBMODULE\s*\(([\w:]+)\)\s*(\w+)""" + +re_inc = re.compile(INC_REGEX, re.I) +re_use = re.compile(USE_REGEX, re.I) +re_mod = re.compile(MOD_REGEX, re.I) +re_smd = re.compile(SMD_REGEX, re.I) + class fortran_parser(object): - def __init__(self,incpaths): - self.seen=[] - self.nodes=[] - self.names=[] - self.incpaths=incpaths - def find_deps(self,node): - txt=node.read() - incs=[] - uses=[] - mods=[] + """ + This parser returns: + + * the nodes corresponding to the module names to produce + * the nodes corresponding to the include files used + * the module names used by the fortran files + """ + def __init__(self, incpaths): + self.seen = [] + """Files already parsed""" + + self.nodes = [] + """List of :py:class:`waflib.Node.Node` representing the dependencies to return""" + + self.names = [] + """List of module names to return""" + + self.incpaths = incpaths + """List of :py:class:`waflib.Node.Node` representing the include paths""" + + def find_deps(self, node): + """ + Parses a Fortran file to obtain the dependencies used/provided + + :param node: fortran file to read + :type node: :py:class:`waflib.Node.Node` + :return: lists representing the includes, the modules used, and the modules created by a fortran file + :rtype: tuple of list of strings + """ + txt = node.read() + incs = [] + uses = [] + mods = [] for line in txt.splitlines(): - m=re_inc.search(line) + # line by line regexp search? optimize? + m = re_inc.search(line) if m: incs.append(m.group(1)) - m=re_use.search(line) + m = re_use.search(line) if m: uses.append(m.group(1)) - m=re_mod.search(line) + m = re_mod.search(line) if m: mods.append(m.group(1)) - return(incs,uses,mods) - def start(self,node): - self.waiting=[node] + m = re_smd.search(line) + if m: + uses.append(m.group(1)) + mods.append('{0}:{1}'.format(m.group(1),m.group(2))) + return (incs, uses, mods) + + def start(self, node): + """ + Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on + + :param node: fortran file + :type node: :py:class:`waflib.Node.Node` + """ + self.waiting = [node] while self.waiting: - nd=self.waiting.pop(0) + nd = self.waiting.pop(0) self.iter(nd) - def iter(self,node): - incs,uses,mods=self.find_deps(node) + + def iter(self, node): + """ + Processes a single file during dependency parsing. Extracts files used + modules used and modules provided. + """ + incs, uses, mods = self.find_deps(node) for x in incs: if x in self.seen: continue self.seen.append(x) self.tryfind_header(x) + for x in uses: - name="USE@%s"%x + name = "USE@%s" % x if not name in self.names: self.names.append(name) + for x in mods: - name="MOD@%s"%x + name = "MOD@%s" % x if not name in self.names: self.names.append(name) - def tryfind_header(self,filename): - found=None + + def tryfind_header(self, filename): + """ + Adds an include file to the list of nodes to process + + :param filename: file name + :type filename: string + """ + found = None for n in self.incpaths: - found=n.find_resource(filename) + found = n.find_resource(filename) if found: self.nodes.append(found) self.waiting.append(found) @@ -62,3 +117,4 @@ if not found: if not filename in self.names: self.names.append(filename) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/flex.py lilv-0.24.6/waflib/Tools/flex.py --- lilv-0.24.4~dfsg0/waflib/Tools/flex.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/flex.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,38 +1,62 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# John O'Meara, 2006 +# Thomas Nagy, 2006-2018 (ita) -import os,re -from waflib import Task,TaskGen +""" +The **flex** program is a code generator which creates C or C++ files. +The generated files are compiled into object files. +""" + +import os, re +from waflib import Task, TaskGen from waflib.Tools import ccroot -def decide_ext(self,node): - if'cxx'in self.features: - return['.lex.cc'] - return['.lex.c'] + +def decide_ext(self, node): + if 'cxx' in self.features: + return ['.lex.cc'] + return ['.lex.c'] + def flexfun(tsk): - env=tsk.env - bld=tsk.generator.bld - wd=bld.variant_dir + env = tsk.env + bld = tsk.generator.bld + wd = bld.variant_dir def to_list(xx): - if isinstance(xx,str): - return[xx] + if isinstance(xx, str): + return [xx] return xx - tsk.last_cmd=lst=[] + tsk.last_cmd = lst = [] lst.extend(to_list(env.FLEX)) lst.extend(to_list(env.FLEXFLAGS)) - inputs=[a.path_from(tsk.get_cwd())for a in tsk.inputs] + inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs] if env.FLEX_MSYS: - inputs=[x.replace(os.sep,'/')for x in inputs] + inputs = [x.replace(os.sep, '/') for x in inputs] lst.extend(inputs) - lst=[x for x in lst if x] - txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) - tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n')) -TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,) -Task.classes['flex'].vars=['FLEXFLAGS','FLEX'] + lst = [x for x in lst if x] + txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) + tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207 + +TaskGen.declare_chain( + name = 'flex', + rule = flexfun, # issue #854 + ext_in = '.l', + decider = decide_ext, +) + +# To support the following: +# bld(features='c', flexflags='-P/foo') +Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX'] ccroot.USELIB_VARS['c'].add('FLEXFLAGS') ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS') + def configure(conf): - conf.find_program('flex',var='FLEX') - conf.env.FLEXFLAGS=['-t'] - if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX[0]): - conf.env.FLEX_MSYS=True + """ + Detect the *flex* program + """ + conf.find_program('flex', var='FLEX') + conf.env.FLEXFLAGS = ['-t'] + + if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]): + # this is the flex shipped with MSYS + conf.env.FLEX_MSYS = True + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/g95.py lilv-0.24.6/waflib/Tools/g95.py --- lilv-0.24.4~dfsg0/waflib/Tools/g95.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/g95.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,50 +1,61 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# KWS 2010 +# Thomas Nagy 2016-2018 (ita) import re from waflib import Utils -from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Tools import fc, fc_config, fc_scan, ar from waflib.Configure import conf + @conf def find_g95(conf): - fc=conf.find_program('g95',var='FC') + fc = conf.find_program('g95', var='FC') conf.get_g95_version(fc) - conf.env.FC_NAME='G95' + conf.env.FC_NAME = 'G95' + @conf def g95_flags(conf): - v=conf.env - v.FCFLAGS_fcshlib=['-fPIC'] - v.FORTRANMODFLAG=['-fmod=',''] - v.FCFLAGS_DEBUG=['-Werror'] + v = conf.env + v.FCFLAGS_fcshlib = ['-fPIC'] + v.FORTRANMODFLAG = ['-fmod=', ''] # template for module path + v.FCFLAGS_DEBUG = ['-Werror'] # why not + @conf def g95_modifier_win32(conf): fc_config.fortran_modifier_win32(conf) + @conf def g95_modifier_cygwin(conf): fc_config.fortran_modifier_cygwin(conf) + @conf def g95_modifier_darwin(conf): fc_config.fortran_modifier_darwin(conf) + @conf def g95_modifier_platform(conf): - dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform() - g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None) + dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() + g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None) if g95_modifier_func: g95_modifier_func() + @conf -def get_g95_version(conf,fc): - version_re=re.compile(r"g95\s*(?P\d*)\.(?P\d*)").search - cmd=fc+['--version'] - out,err=fc_config.getoutput(conf,cmd,stdin=False) +def get_g95_version(conf, fc): + """get the compiler version""" + + version_re = re.compile(r"g95\s*(?P\d*)\.(?P\d*)").search + cmd = fc + ['--version'] + out, err = fc_config.getoutput(conf, cmd, stdin=False) if out: - match=version_re(out) + match = version_re(out) else: - match=version_re(err) + match = version_re(err) if not match: conf.fatal('cannot determine g95 version') - k=match.groupdict() - conf.env.FC_VERSION=(k['major'],k['minor']) + k = match.groupdict() + conf.env.FC_VERSION = (k['major'], k['minor']) + def configure(conf): conf.find_g95() conf.find_ar() @@ -52,3 +63,4 @@ conf.fc_add_flags() conf.g95_flags() conf.g95_modifier_platform() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/gas.py lilv-0.24.6/waflib/Tools/gas.py --- lilv-0.24.4~dfsg0/waflib/Tools/gas.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/gas.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,12 +1,19 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2008-2018 (ita) -import waflib.Tools.asm +"Detect as/gas/gcc for compiling assembly files" + +import waflib.Tools.asm # - leave this from waflib.Tools import ar + def configure(conf): - conf.find_program(['gas','gcc'],var='AS') - conf.env.AS_TGT_F=['-c','-o'] - conf.env.ASLNK_TGT_F=['-o'] + """ + Find the programs gas/as/gcc and set the variable *AS* + """ + conf.find_program(['gas', 'gcc'], var='AS') + conf.env.AS_TGT_F = ['-c', '-o'] + conf.env.ASLNK_TGT_F = ['-o'] conf.find_ar() conf.load('asm') + conf.env.ASM_NAME = 'gas' diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/gcc.py lilv-0.24.6/waflib/Tools/gcc.py --- lilv-0.24.4~dfsg0/waflib/Tools/gcc.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/gcc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,99 +1,150 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) +# Ralf Habacker, 2006 (rh) +# Yinon Ehrlich, 2009 -from waflib.Tools import ccroot,ar +""" +gcc/llvm detection. +""" + +from waflib.Tools import ccroot, ar from waflib.Configure import conf + @conf def find_gcc(conf): - cc=conf.find_program(['gcc','cc'],var='CC') - conf.get_cc_version(cc,gcc=True) - conf.env.CC_NAME='gcc' + """ + Find the program gcc, and if present, try to detect its version number + """ + cc = conf.find_program(['gcc', 'cc'], var='CC') + conf.get_cc_version(cc, gcc=True) + conf.env.CC_NAME = 'gcc' + @conf def gcc_common_flags(conf): - v=conf.env - v.CC_SRC_F=[] - v.CC_TGT_F=['-c','-o'] + """ + Common flags for gcc on nearly all platforms + """ + v = conf.env + + v.CC_SRC_F = [] + v.CC_TGT_F = ['-c', '-o'] + if not v.LINK_CC: - v.LINK_CC=v.CC - v.CCLNK_SRC_F=[] - v.CCLNK_TGT_F=['-o'] - v.CPPPATH_ST='-I%s' - v.DEFINES_ST='-D%s' - v.LIB_ST='-l%s' - v.LIBPATH_ST='-L%s' - v.STLIB_ST='-l%s' - v.STLIBPATH_ST='-L%s' - v.RPATH_ST='-Wl,-rpath,%s' - v.SONAME_ST='-Wl,-h,%s' - v.SHLIB_MARKER='-Wl,-Bdynamic' - v.STLIB_MARKER='-Wl,-Bstatic' - v.cprogram_PATTERN='%s' - v.CFLAGS_cshlib=['-fPIC'] - v.LINKFLAGS_cshlib=['-shared'] - v.cshlib_PATTERN='lib%s.so' - v.LINKFLAGS_cstlib=['-Wl,-Bstatic'] - v.cstlib_PATTERN='lib%s.a' - v.LINKFLAGS_MACBUNDLE=['-bundle','-undefined','dynamic_lookup'] - v.CFLAGS_MACBUNDLE=['-fPIC'] - v.macbundle_PATTERN='%s.bundle' + v.LINK_CC = v.CC + + v.CCLNK_SRC_F = [] + v.CCLNK_TGT_F = ['-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' + + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + v.RPATH_ST = '-Wl,-rpath,%s' + + v.SONAME_ST = '-Wl,-h,%s' + v.SHLIB_MARKER = '-Wl,-Bdynamic' + v.STLIB_MARKER = '-Wl,-Bstatic' + + v.cprogram_PATTERN = '%s' + + v.CFLAGS_cshlib = ['-fPIC'] + v.LINKFLAGS_cshlib = ['-shared'] + v.cshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cstlib = ['-Wl,-Bstatic'] + v.cstlib_PATTERN = 'lib%s.a' + + v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup'] + v.CFLAGS_MACBUNDLE = ['-fPIC'] + v.macbundle_PATTERN = '%s.bundle' + @conf def gcc_modifier_win32(conf): - v=conf.env - v.cprogram_PATTERN='%s.exe' - v.cshlib_PATTERN='%s.dll' - v.implib_PATTERN='%s.dll.a' - v.IMPLIB_ST='-Wl,--out-implib,%s' - v.CFLAGS_cshlib=[] - v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) + """Configuration flags for executing gcc on Windows""" + v = conf.env + v.cprogram_PATTERN = '%s.exe' + + v.cshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.dll.a' + v.IMPLIB_ST = '-Wl,--out-implib,%s' + + v.CFLAGS_cshlib = [] + + # Auto-import is enabled by default even without this option, + # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages + # that the linker emits otherwise. + v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) + @conf def gcc_modifier_cygwin(conf): + """Configuration flags for executing gcc on Cygwin""" gcc_modifier_win32(conf) - v=conf.env - v.cshlib_PATTERN='cyg%s.dll' - v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base']) - v.CFLAGS_cshlib=[] + v = conf.env + v.cshlib_PATTERN = 'cyg%s.dll' + v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base']) + v.CFLAGS_cshlib = [] + @conf def gcc_modifier_darwin(conf): - v=conf.env - v.CFLAGS_cshlib=['-fPIC'] - v.LINKFLAGS_cshlib=['-dynamiclib'] - v.cshlib_PATTERN='lib%s.dylib' - v.FRAMEWORKPATH_ST='-F%s' - v.FRAMEWORK_ST=['-framework'] - v.ARCH_ST=['-arch'] - v.LINKFLAGS_cstlib=[] - v.SHLIB_MARKER=[] - v.STLIB_MARKER=[] - v.SONAME_ST=[] + """Configuration flags for executing gcc on MacOS""" + v = conf.env + v.CFLAGS_cshlib = ['-fPIC'] + v.LINKFLAGS_cshlib = ['-dynamiclib'] + v.cshlib_PATTERN = 'lib%s.dylib' + v.FRAMEWORKPATH_ST = '-F%s' + v.FRAMEWORK_ST = ['-framework'] + v.ARCH_ST = ['-arch'] + + v.LINKFLAGS_cstlib = [] + + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.SONAME_ST = [] + @conf def gcc_modifier_aix(conf): - v=conf.env - v.LINKFLAGS_cprogram=['-Wl,-brtl'] - v.LINKFLAGS_cshlib=['-shared','-Wl,-brtl,-bexpfull'] - v.SHLIB_MARKER=[] + """Configuration flags for executing gcc on AIX""" + v = conf.env + v.LINKFLAGS_cprogram = ['-Wl,-brtl'] + v.LINKFLAGS_cshlib = ['-shared','-Wl,-brtl,-bexpfull'] + v.SHLIB_MARKER = [] + @conf def gcc_modifier_hpux(conf): - v=conf.env - v.SHLIB_MARKER=[] - v.STLIB_MARKER=[] - v.CFLAGS_cshlib=['-fPIC','-DPIC'] - v.cshlib_PATTERN='lib%s.sl' + v = conf.env + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.CFLAGS_cshlib = ['-fPIC','-DPIC'] + v.cshlib_PATTERN = 'lib%s.sl' + @conf def gcc_modifier_openbsd(conf): - conf.env.SONAME_ST=[] + conf.env.SONAME_ST = [] + @conf def gcc_modifier_osf1V(conf): - v=conf.env - v.SHLIB_MARKER=[] - v.STLIB_MARKER=[] - v.SONAME_ST=[] + v = conf.env + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.SONAME_ST = [] + @conf def gcc_modifier_platform(conf): - gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None) + """Execute platform-specific functions based on *gcc_modifier_+NAME*""" + # * set configurations specific for a platform. + # * the destination platform is detected automatically by looking at the macros the compiler predefines, + # and if it's not recognised, it fallbacks to sys.platform. + gcc_modifier_func = getattr(conf, 'gcc_modifier_' + conf.env.DEST_OS, None) if gcc_modifier_func: gcc_modifier_func() + def configure(conf): + """ + Configuration for gcc + """ conf.find_gcc() conf.find_ar() conf.gcc_common_flags() @@ -102,3 +153,4 @@ conf.cc_add_flags() conf.link_add_flags() conf.check_gcc_o_space() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/gdc.py lilv-0.24.6/waflib/Tools/gdc.py --- lilv-0.24.4~dfsg0/waflib/Tools/gdc.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/gdc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,35 +1,55 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Carlos Rafael Giani, 2007 (dv) -from waflib.Tools import ar,d +from waflib.Tools import ar, d from waflib.Configure import conf + @conf def find_gdc(conf): - conf.find_program('gdc',var='D') - out=conf.cmd_and_log(conf.env.D+['--version']) - if out.find("gdc")==-1: + """ + Finds the program gdc and set the variable *D* + """ + conf.find_program('gdc', var='D') + + out = conf.cmd_and_log(conf.env.D + ['--version']) + if out.find("gdc") == -1: conf.fatal("detected compiler is not gdc") + @conf def common_flags_gdc(conf): - v=conf.env - v.DFLAGS=[] - v.D_SRC_F=['-c'] - v.D_TGT_F='-o%s' - v.D_LINKER=v.D - v.DLNK_SRC_F='' - v.DLNK_TGT_F='-o%s' - v.DINC_ST='-I%s' - v.DSHLIB_MARKER=v.DSTLIB_MARKER='' - v.DSTLIB_ST=v.DSHLIB_ST='-l%s' - v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L%s' - v.LINKFLAGS_dshlib=['-shared'] - v.DHEADER_ext='.di' - v.DFLAGS_d_with_header='-fintfc' - v.D_HDR_F='-fintfc-file=%s' + """ + Sets the flags required by *gdc* + """ + v = conf.env + + v.DFLAGS = [] + + v.D_SRC_F = ['-c'] + v.D_TGT_F = '-o%s' + + v.D_LINKER = v.D + v.DLNK_SRC_F = '' + v.DLNK_TGT_F = '-o%s' + v.DINC_ST = '-I%s' + + v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' + v.DSTLIB_ST = v.DSHLIB_ST = '-l%s' + v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L%s' + + v.LINKFLAGS_dshlib = ['-shared'] + + v.DHEADER_ext = '.di' + v.DFLAGS_d_with_header = '-fintfc' + v.D_HDR_F = '-fintfc-file=%s' + def configure(conf): + """ + Configuration for gdc + """ conf.find_gdc() conf.load('ar') conf.load('d') conf.common_flags_gdc() conf.d_platform_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/gfortran.py lilv-0.24.6/waflib/Tools/gfortran.py --- lilv-0.24.4~dfsg0/waflib/Tools/gfortran.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/gfortran.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,66 +1,88 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# DC 2008 +# Thomas Nagy 2016-2018 (ita) import re from waflib import Utils -from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Tools import fc, fc_config, fc_scan, ar from waflib.Configure import conf + @conf def find_gfortran(conf): - fc=conf.find_program(['gfortran','g77'],var='FC') + """Find the gfortran program (will look in the environment variable 'FC')""" + fc = conf.find_program(['gfortran','g77'], var='FC') + # (fallback to g77 for systems, where no gfortran is available) conf.get_gfortran_version(fc) - conf.env.FC_NAME='GFORTRAN' + conf.env.FC_NAME = 'GFORTRAN' + @conf def gfortran_flags(conf): - v=conf.env - v.FCFLAGS_fcshlib=['-fPIC'] - v.FORTRANMODFLAG=['-J',''] - v.FCFLAGS_DEBUG=['-Werror'] + v = conf.env + v.FCFLAGS_fcshlib = ['-fPIC'] + v.FORTRANMODFLAG = ['-J', ''] # template for module path + v.FCFLAGS_DEBUG = ['-Werror'] # why not + @conf def gfortran_modifier_win32(conf): fc_config.fortran_modifier_win32(conf) + @conf def gfortran_modifier_cygwin(conf): fc_config.fortran_modifier_cygwin(conf) + @conf def gfortran_modifier_darwin(conf): fc_config.fortran_modifier_darwin(conf) + @conf def gfortran_modifier_platform(conf): - dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform() - gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None) + dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() + gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None) if gfortran_modifier_func: gfortran_modifier_func() + @conf -def get_gfortran_version(conf,fc): - version_re=re.compile(r"GNU\s*Fortran",re.I).search - cmd=fc+['--version'] - out,err=fc_config.getoutput(conf,cmd,stdin=False) +def get_gfortran_version(conf, fc): + """Get the compiler version""" + + # ensure this is actually gfortran, not an imposter. + version_re = re.compile(r"GNU\s*Fortran", re.I).search + cmd = fc + ['--version'] + out, err = fc_config.getoutput(conf, cmd, stdin=False) if out: - match=version_re(out) + match = version_re(out) else: - match=version_re(err) + match = version_re(err) if not match: conf.fatal('Could not determine the compiler type') - cmd=fc+['-dM','-E','-'] - out,err=fc_config.getoutput(conf,cmd,stdin=True) - if out.find('__GNUC__')<0: + + # --- now get more detailed info -- see c_config.get_cc_version + cmd = fc + ['-dM', '-E', '-'] + out, err = fc_config.getoutput(conf, cmd, stdin=True) + + if out.find('__GNUC__') < 0: conf.fatal('Could not determine the compiler type') - k={} - out=out.splitlines() + + k = {} + out = out.splitlines() import shlex + for line in out: - lst=shlex.split(line) + lst = shlex.split(line) if len(lst)>2: - key=lst[1] - val=lst[2] - k[key]=val + key = lst[1] + val = lst[2] + k[key] = val + def isD(var): return var in k + def isT(var): - return var in k and k[var]!='0' - conf.env.FC_VERSION=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) + return var in k and k[var] != '0' + + conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) + def configure(conf): conf.find_gfortran() conf.find_ar() diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/glib2.py lilv-0.24.6/waflib/Tools/glib2.py --- lilv-0.24.4~dfsg0/waflib/Tools/glib2.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/glib2.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,242 +1,489 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) + +""" +Support for GLib2 tools: + +* marshal +* enums +* gsettings +* gresource +""" import os import functools -from waflib import Context,Task,Utils,Options,Errors,Logs -from waflib.TaskGen import taskgen_method,before_method,feature,extension +from waflib import Context, Task, Utils, Options, Errors, Logs +from waflib.TaskGen import taskgen_method, before_method, feature, extension from waflib.Configure import conf + +################## marshal files + @taskgen_method -def add_marshal_file(self,filename,prefix): - if not hasattr(self,'marshal_list'): - self.marshal_list=[] +def add_marshal_file(self, filename, prefix): + """ + Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*. + + :param filename: xml file to compile + :type filename: string + :param prefix: marshal prefix (--prefix=prefix) + :type prefix: string + """ + if not hasattr(self, 'marshal_list'): + self.marshal_list = [] self.meths.append('process_marshal') - self.marshal_list.append((filename,prefix)) + self.marshal_list.append((filename, prefix)) + @before_method('process_source') def process_marshal(self): - for f,prefix in getattr(self,'marshal_list',[]): - node=self.path.find_resource(f) + """ + Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances. + Adds the c file created to the list of source to process. + """ + for f, prefix in getattr(self, 'marshal_list', []): + node = self.path.find_resource(f) + if not node: - raise Errors.WafError('file not found %r'%f) - h_node=node.change_ext('.h') - c_node=node.change_ext('.c') - task=self.create_task('glib_genmarshal',node,[h_node,c_node]) - task.env.GLIB_GENMARSHAL_PREFIX=prefix - self.source=self.to_nodes(getattr(self,'source',[])) + raise Errors.WafError('file not found %r' % f) + + h_node = node.change_ext('.h') + c_node = node.change_ext('.c') + + task = self.create_task('glib_genmarshal', node, [h_node, c_node]) + task.env.GLIB_GENMARSHAL_PREFIX = prefix + self.source = self.to_nodes(getattr(self, 'source', [])) self.source.append(c_node) + class glib_genmarshal(Task.Task): - vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'] - color='BLUE' - ext_out=['.h'] + vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'] + color = 'BLUE' + ext_out = ['.h'] def run(self): - bld=self.generator.bld - get=self.env.get_flat - cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath()) - ret=bld.exec_command(cmd1) + bld = self.generator.bld + + get = self.env.get_flat + cmd1 = "%s %s --prefix=%s --header > %s" % ( + get('GLIB_GENMARSHAL'), + self.inputs[0].srcpath(), + get('GLIB_GENMARSHAL_PREFIX'), + self.outputs[0].abspath() + ) + + ret = bld.exec_command(cmd1) if ret: return ret - c='''#include "%s"\n'''%self.outputs[0].name + + #print self.outputs[1].abspath() + c = '''#include "%s"\n''' % self.outputs[0].name self.outputs[1].write(c) - cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath()) + + cmd2 = "%s %s --prefix=%s --body >> %s" % ( + get('GLIB_GENMARSHAL'), + self.inputs[0].srcpath(), + get('GLIB_GENMARSHAL_PREFIX'), + self.outputs[1].abspath() + ) return bld.exec_command(cmd2) + +########################## glib-mkenums + @taskgen_method -def add_enums_from_template(self,source='',target='',template='',comments=''): - if not hasattr(self,'enums_list'): - self.enums_list=[] +def add_enums_from_template(self, source='', target='', template='', comments=''): + """ + Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*. + + :param source: enum file to process + :type source: string + :param target: target file + :type target: string + :param template: template file + :type template: string + :param comments: comments + :type comments: string + """ + if not hasattr(self, 'enums_list'): + self.enums_list = [] self.meths.append('process_enums') - self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments}) + self.enums_list.append({'source': source, + 'target': target, + 'template': template, + 'file-head': '', + 'file-prod': '', + 'file-tail': '', + 'enum-prod': '', + 'value-head': '', + 'value-prod': '', + 'value-tail': '', + 'comments': comments}) + @taskgen_method -def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''): - if not hasattr(self,'enums_list'): - self.enums_list=[] +def add_enums(self, source='', target='', + file_head='', file_prod='', file_tail='', enum_prod='', + value_head='', value_prod='', value_tail='', comments=''): + """ + Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*. + + :param source: enum file to process + :type source: string + :param target: target file + :type target: string + :param file_head: unused + :param file_prod: unused + :param file_tail: unused + :param enum_prod: unused + :param value_head: unused + :param value_prod: unused + :param value_tail: unused + :param comments: comments + :type comments: string + """ + if not hasattr(self, 'enums_list'): + self.enums_list = [] self.meths.append('process_enums') - self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments}) + self.enums_list.append({'source': source, + 'template': '', + 'target': target, + 'file-head': file_head, + 'file-prod': file_prod, + 'file-tail': file_tail, + 'enum-prod': enum_prod, + 'value-head': value_head, + 'value-prod': value_prod, + 'value-tail': value_tail, + 'comments': comments}) + @before_method('process_source') def process_enums(self): - for enum in getattr(self,'enums_list',[]): - task=self.create_task('glib_mkenums') - env=task.env - inputs=[] - source_list=self.to_list(enum['source']) + """ + Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. + """ + for enum in getattr(self, 'enums_list', []): + task = self.create_task('glib_mkenums') + env = task.env + + inputs = [] + + # process the source + source_list = self.to_list(enum['source']) if not source_list: - raise Errors.WafError('missing source '+str(enum)) - source_list=[self.path.find_resource(k)for k in source_list] - inputs+=source_list - env.GLIB_MKENUMS_SOURCE=[k.abspath()for k in source_list] + raise Errors.WafError('missing source ' + str(enum)) + source_list = [self.path.find_resource(k) for k in source_list] + inputs += source_list + env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list] + + # find the target if not enum['target']: - raise Errors.WafError('missing target '+str(enum)) - tgt_node=self.path.find_or_declare(enum['target']) + raise Errors.WafError('missing target ' + str(enum)) + tgt_node = self.path.find_or_declare(enum['target']) if tgt_node.name.endswith('.c'): self.source.append(tgt_node) - env.GLIB_MKENUMS_TARGET=tgt_node.abspath() - options=[] - if enum['template']: - template_node=self.path.find_resource(enum['template']) - options.append('--template %s'%(template_node.abspath())) + env.GLIB_MKENUMS_TARGET = tgt_node.abspath() + + + options = [] + + if enum['template']: # template, if provided + template_node = self.path.find_resource(enum['template']) + options.append('--template %s' % (template_node.abspath())) inputs.append(template_node) - params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'} - for param,option in params.items(): + params = {'file-head' : '--fhead', + 'file-prod' : '--fprod', + 'file-tail' : '--ftail', + 'enum-prod' : '--eprod', + 'value-head' : '--vhead', + 'value-prod' : '--vprod', + 'value-tail' : '--vtail', + 'comments': '--comments'} + for param, option in params.items(): if enum[param]: - options.append('%s %r'%(option,enum[param])) - env.GLIB_MKENUMS_OPTIONS=' '.join(options) + options.append('%s %r' % (option, enum[param])) + + env.GLIB_MKENUMS_OPTIONS = ' '.join(options) + + # update the task instance task.set_inputs(inputs) task.set_outputs(tgt_node) + class glib_mkenums(Task.Task): - run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' - color='PINK' - ext_out=['.h'] + """ + Processes enum files + """ + run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' + color = 'PINK' + ext_out = ['.h'] + +######################################### gsettings + @taskgen_method -def add_settings_schemas(self,filename_list): - if not hasattr(self,'settings_schema_files'): - self.settings_schema_files=[] - if not isinstance(filename_list,list): - filename_list=[filename_list] +def add_settings_schemas(self, filename_list): + """ + Adds settings files to process to *settings_schema_files* + + :param filename_list: files + :type filename_list: list of string + """ + if not hasattr(self, 'settings_schema_files'): + self.settings_schema_files = [] + + if not isinstance(filename_list, list): + filename_list = [filename_list] + self.settings_schema_files.extend(filename_list) + @taskgen_method -def add_settings_enums(self,namespace,filename_list): - if hasattr(self,'settings_enum_namespace'): - raise Errors.WafError("Tried to add gsettings enums to %r more than once"%self.name) - self.settings_enum_namespace=namespace - if not isinstance(filename_list,list): - filename_list=[filename_list] - self.settings_enum_files=filename_list +def add_settings_enums(self, namespace, filename_list): + """ + Called only once by task generator to set the enums namespace. + + :param namespace: namespace + :type namespace: string + :param filename_list: enum files to process + :type filename_list: file list + """ + if hasattr(self, 'settings_enum_namespace'): + raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name) + self.settings_enum_namespace = namespace + + if not isinstance(filename_list, list): + filename_list = [filename_list] + self.settings_enum_files = filename_list + @feature('glib2') def process_settings(self): - enums_tgt_node=[] - install_files=[] - settings_schema_files=getattr(self,'settings_schema_files',[]) + """ + Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The + same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks. + + """ + enums_tgt_node = [] + install_files = [] + + settings_schema_files = getattr(self, 'settings_schema_files', []) if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS: - raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") - if hasattr(self,'settings_enum_files'): - enums_task=self.create_task('glib_mkenums') - source_list=self.settings_enum_files - source_list=[self.path.find_resource(k)for k in source_list] + raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") + + # 1. process gsettings_enum_files (generate .enums.xml) + # + if hasattr(self, 'settings_enum_files'): + enums_task = self.create_task('glib_mkenums') + + source_list = self.settings_enum_files + source_list = [self.path.find_resource(k) for k in source_list] enums_task.set_inputs(source_list) - enums_task.env.GLIB_MKENUMS_SOURCE=[k.abspath()for k in source_list] - target=self.settings_enum_namespace+'.enums.xml' - tgt_node=self.path.find_or_declare(target) + enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list] + + target = self.settings_enum_namespace + '.enums.xml' + tgt_node = self.path.find_or_declare(target) enums_task.set_outputs(tgt_node) - enums_task.env.GLIB_MKENUMS_TARGET=tgt_node.abspath() - enums_tgt_node=[tgt_node] + enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath() + enums_tgt_node = [tgt_node] + install_files.append(tgt_node) - options='--comments "" --fhead "" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " " --vtail " " --ftail "" '%(self.settings_enum_namespace) - enums_task.env.GLIB_MKENUMS_OPTIONS=options + + options = '--comments "" --fhead "" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " " --vtail " " --ftail "" ' % (self.settings_enum_namespace) + enums_task.env.GLIB_MKENUMS_OPTIONS = options + + # 2. process gsettings_schema_files (validate .gschema.xml files) + # for schema in settings_schema_files: - schema_task=self.create_task('glib_validate_schema') - schema_node=self.path.find_resource(schema) + schema_task = self.create_task ('glib_validate_schema') + + schema_node = self.path.find_resource(schema) if not schema_node: - raise Errors.WafError("Cannot find the schema file %r"%schema) + raise Errors.WafError("Cannot find the schema file %r" % schema) install_files.append(schema_node) - source_list=enums_tgt_node+[schema_node] - schema_task.set_inputs(source_list) - schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS=[("--schema-file="+k.abspath())for k in source_list] - target_node=schema_node.change_ext('.xml.valid') - schema_task.set_outputs(target_node) - schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT=target_node.abspath() + source_list = enums_tgt_node + [schema_node] + + schema_task.set_inputs (source_list) + schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list] + + target_node = schema_node.change_ext('.xml.valid') + schema_task.set_outputs (target_node) + schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath() + + # 3. schemas install task def compile_schemas_callback(bld): if not bld.is_install: return - compile_schemas=Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS) - destdir=Options.options.destdir - paths=bld._compile_schemas_registered + compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS) + destdir = Options.options.destdir + paths = bld._compile_schemas_registered if destdir: - paths=(os.path.join(destdir,path.lstrip(os.sep))for path in paths) + paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths) for path in paths: - Logs.pprint('YELLOW','Updating GSettings schema cache %r'%path) - if self.bld.exec_command(compile_schemas+[path]): - Logs.warn('Could not update GSettings schema cache %r'%path) + Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path) + if self.bld.exec_command(compile_schemas + [path]): + Logs.warn('Could not update GSettings schema cache %r' % path) + if self.bld.is_install: - schemadir=self.env.GSETTINGSSCHEMADIR + schemadir = self.env.GSETTINGSSCHEMADIR if not schemadir: - raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') + raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') + if install_files: - self.add_install_files(install_to=schemadir,install_from=install_files) - registered_schemas=getattr(self.bld,'_compile_schemas_registered',None) + self.add_install_files(install_to=schemadir, install_from=install_files) + registered_schemas = getattr(self.bld, '_compile_schemas_registered', None) if not registered_schemas: - registered_schemas=self.bld._compile_schemas_registered=set() + registered_schemas = self.bld._compile_schemas_registered = set() self.bld.add_post_fun(compile_schemas_callback) registered_schemas.add(schemadir) + class glib_validate_schema(Task.Task): - run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' - color='PINK' + """ + Validates schema files + """ + run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' + color = 'PINK' + +################## gresource + @extension('.gresource.xml') -def process_gresource_source(self,node): +def process_gresource_source(self, node): + """ + Creates tasks that turn ``.gresource.xml`` files to C code + """ if not self.env.GLIB_COMPILE_RESOURCES: - raise Errors.WafError("Unable to process GResource file - glib-compile-resources was not found during configure") - if'gresource'in self.features: + raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure") + + if 'gresource' in self.features: return - h_node=node.change_ext('_xml.h') - c_node=node.change_ext('_xml.c') - self.create_task('glib_gresource_source',node,[h_node,c_node]) + + h_node = node.change_ext('_xml.h') + c_node = node.change_ext('_xml.c') + self.create_task('glib_gresource_source', node, [h_node, c_node]) self.source.append(c_node) + @feature('gresource') def process_gresource_bundle(self): + """ + Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files:: + + def build(bld): + bld( + features='gresource', + source=['resources1.gresource.xml', 'resources2.gresource.xml'], + install_path='${LIBDIR}/${PACKAGE}' + ) + + :param source: XML files to process + :type source: list of string + :param install_path: installation path + :type install_path: string + """ for i in self.to_list(self.source): - node=self.path.find_resource(i) - task=self.create_task('glib_gresource_bundle',node,node.change_ext('')) - inst_to=getattr(self,'install_path',None) + node = self.path.find_resource(i) + + task = self.create_task('glib_gresource_bundle', node, node.change_ext('')) + inst_to = getattr(self, 'install_path', None) if inst_to: - self.add_install_files(install_to=inst_to,install_from=task.outputs) + self.add_install_files(install_to=inst_to, install_from=task.outputs) + class glib_gresource_base(Task.Task): - color='BLUE' - base_cmd='${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' + """ + Base class for gresource based tasks + """ + color = 'BLUE' + base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' + def scan(self): - bld=self.generator.bld - kw={} - kw['cwd']=self.get_cwd() - kw['quiet']=Context.BOTH - cmd=Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s'%(self.inputs[0].parent.srcpath(),self.inputs[0].bld_dir(),self.inputs[0].bldpath()),self.env) - output=bld.cmd_and_log(cmd,**kw) - nodes=[] - names=[] + """ + Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command`` + """ + bld = self.generator.bld + kw = {} + kw['cwd'] = self.get_cwd() + kw['quiet'] = Context.BOTH + + cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % ( + self.inputs[0].parent.srcpath(), + self.inputs[0].bld_dir(), + self.inputs[0].bldpath() + ), self.env) + + output = bld.cmd_and_log(cmd, **kw) + + nodes = [] + names = [] for dep in output.splitlines(): if dep: - node=bld.bldnode.find_node(dep) + node = bld.bldnode.find_node(dep) if node: nodes.append(node) else: names.append(dep) - return(nodes,names) + + return (nodes, names) + class glib_gresource_source(glib_gresource_base): - vars=['GLIB_COMPILE_RESOURCES'] - fun_h=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[0].abspath()} --generate-header ${SRC}') - fun_c=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[1].abspath()} --generate-source ${SRC}') - ext_out=['.h'] + """ + Task to generate C source code (.h and .c files) from a gresource.xml file + """ + vars = ['GLIB_COMPILE_RESOURCES'] + fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}') + fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}') + ext_out = ['.h'] + def run(self): - return self.fun_h[0](self)or self.fun_c[0](self) + return self.fun_h[0](self) or self.fun_c[0](self) + class glib_gresource_bundle(glib_gresource_base): - run_str=glib_gresource_base.base_cmd+' --target=${TGT} ${SRC}' - shell=True + """ + Task to generate a .gresource binary file from a gresource.xml file + """ + run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}' + shell = True # temporary workaround for #795 + @conf def find_glib_genmarshal(conf): - conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL') + conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL') + @conf def find_glib_mkenums(conf): if not conf.env.PERL: - conf.find_program('perl',var='PERL') - conf.find_program('glib-mkenums',interpreter='PERL',var='GLIB_MKENUMS') + conf.find_program('perl', var='PERL') + conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS') + @conf def find_glib_compile_schemas(conf): - conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS') + # when cross-compiling, gsettings.m4 locates the program with the following: + # pkg-config --variable glib_compile_schemas gio-2.0 + conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS') + def getstr(varname): - return getattr(Options.options,varname,getattr(conf.env,varname,'')) - gsettingsschemadir=getstr('GSETTINGSSCHEMADIR') + return getattr(Options.options, varname, getattr(conf.env,varname, '')) + + gsettingsschemadir = getstr('GSETTINGSSCHEMADIR') if not gsettingsschemadir: - datadir=getstr('DATADIR') + datadir = getstr('DATADIR') if not datadir: - prefix=conf.env.PREFIX - datadir=os.path.join(prefix,'share') - gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas') - conf.env.GSETTINGSSCHEMADIR=gsettingsschemadir + prefix = conf.env.PREFIX + datadir = os.path.join(prefix, 'share') + gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas') + + conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir + @conf def find_glib_compile_resources(conf): - conf.find_program('glib-compile-resources',var='GLIB_COMPILE_RESOURCES') + conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES') + def configure(conf): + """ + Finds the following programs: + + * *glib-genmarshal* and set *GLIB_GENMARSHAL* + * *glib-mkenums* and set *GLIB_MKENUMS* + * *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory) + * *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory) + """ conf.find_glib_genmarshal() conf.find_glib_mkenums() conf.find_glib_compile_schemas(mandatory=False) conf.find_glib_compile_resources(mandatory=False) + def options(opt): - gr=opt.add_option_group('Installation directories') - gr.add_option('--gsettingsschemadir',help='GSettings schema location [DATADIR/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR') + """ + Adds the ``--gsettingsschemadir`` command-line option + """ + gr = opt.add_option_group('Installation directories') + gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/gnu_dirs.py lilv-0.24.6/waflib/Tools/gnu_dirs.py --- lilv-0.24.4~dfsg0/waflib/Tools/gnu_dirs.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/gnu_dirs.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,10 +1,51 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Ali Sabil, 2007 -import os,re -from waflib import Utils,Options,Context -gnuopts=''' +""" +Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call:: + + opt.load('gnu_dirs') + +and:: + + conf.load('gnu_dirs') + +Add options for the standard GNU directories, this tool will add the options +found in autotools, and will update the environment with the following +installation variables: + +============== ========================================= ======================= +Variable Description Default Value +============== ========================================= ======================= +PREFIX installation prefix /usr/local +EXEC_PREFIX installation prefix for binaries PREFIX +BINDIR user commands EXEC_PREFIX/bin +SBINDIR system binaries EXEC_PREFIX/sbin +LIBEXECDIR program-specific binaries EXEC_PREFIX/libexec +SYSCONFDIR host-specific configuration PREFIX/etc +SHAREDSTATEDIR architecture-independent variable data PREFIX/com +LOCALSTATEDIR variable data PREFIX/var +LIBDIR object code libraries EXEC_PREFIX/lib +INCLUDEDIR header files PREFIX/include +OLDINCLUDEDIR header files for non-GCC compilers /usr/include +DATAROOTDIR architecture-independent data root PREFIX/share +DATADIR architecture-independent data DATAROOTDIR +INFODIR GNU "info" documentation DATAROOTDIR/info +LOCALEDIR locale-dependent data DATAROOTDIR/locale +MANDIR manual pages DATAROOTDIR/man +DOCDIR documentation root DATAROOTDIR/doc/APPNAME +HTMLDIR HTML documentation DOCDIR +DVIDIR DVI documentation DOCDIR +PDFDIR PDF documentation DOCDIR +PSDIR PostScript documentation DOCDIR +============== ========================================= ======================= +""" + +import os, re +from waflib import Utils, Options, Context + +gnuopts = ''' bindir, user commands, ${EXEC_PREFIX}/bin sbindir, system binaries, ${EXEC_PREFIX}/sbin libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec @@ -24,43 +65,67 @@ dvidir, DVI documentation, ${DOCDIR} pdfdir, PDF documentation, ${DOCDIR} psdir, PostScript documentation, ${DOCDIR} -'''%Utils.lib64() -_options=[x.split(', ')for x in gnuopts.splitlines()if x] +''' % Utils.lib64() + +_options = [x.split(', ') for x in gnuopts.splitlines() if x] + def configure(conf): - def get_param(varname,default): - return getattr(Options.options,varname,'')or default - env=conf.env - env.LIBDIR=env.BINDIR=[] - env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX) - env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE - complete=False - iter=0 - while not complete and iter\d*)\.(?P\d*)",re.I).search +def get_ifort_version(conf, fc): + """ + Detects the compiler version and sets ``conf.env.FC_VERSION`` + """ + version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P\d*)\.(?P\d*)",re.I).search if Utils.is_win32: - cmd=fc + cmd = fc else: - cmd=fc+['-logo'] - out,err=fc_config.getoutput(conf,cmd,stdin=False) - match=version_re(out)or version_re(err) + cmd = fc + ['-logo'] + + out, err = fc_config.getoutput(conf, cmd, stdin=False) + match = version_re(out) or version_re(err) if not match: conf.fatal('cannot determine ifort version.') - k=match.groupdict() - conf.env.FC_VERSION=(k['major'],k['minor']) + k = match.groupdict() + conf.env.FC_VERSION = (k['major'], k['minor']) + def configure(conf): + """ + Detects the Intel Fortran compilers + """ if Utils.is_win32: - compiler,version,path,includes,libdirs,arch=conf.detect_ifort() - v=conf.env - v.DEST_CPU=arch - v.PATH=path - v.INCLUDES=includes - v.LIBPATH=libdirs - v.MSVC_COMPILER=compiler + compiler, version, path, includes, libdirs, arch = conf.detect_ifort() + v = conf.env + v.DEST_CPU = arch + v.PATH = path + v.INCLUDES = includes + v.LIBPATH = libdirs + v.MSVC_COMPILER = compiler try: - v.MSVC_VERSION=float(version) + v.MSVC_VERSION = float(version) except ValueError: - v.MSVC_VERSION=float(version[:-3]) + v.MSVC_VERSION = float(version[:-3]) + conf.find_ifort_win32() conf.ifort_modifier_win32() else: conf.find_ifort() - conf.find_program('xiar',var='AR') + conf.find_program('xiar', var='AR') conf.find_ar() conf.fc_flags() conf.fc_add_flags() conf.ifort_modifier_platform() -all_ifort_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] + + +all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')] +"""List of icl platforms""" + @conf -def gather_ifort_versions(conf,versions): - version_pattern=re.compile('^...?.?\....?.?') +def gather_ifort_versions(conf, versions): + """ + List compiler versions by looking up registry keys + """ + version_pattern = re.compile(r'^...?.?\....?.?') try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran') except OSError: try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\Fortran') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran') except OSError: return - index=0 + index = 0 while 1: try: - version=Utils.winreg.EnumKey(all_versions,index) + version = Utils.winreg.EnumKey(all_versions, index) except OSError: break - index+=1 + index += 1 if not version_pattern.match(version): continue - targets={} + targets = {} for target,arch in all_ifort_platforms: if target=='intel64': targetDir='EM64T_NATIVE' @@ -113,45 +139,58 @@ else: batch_file=os.path.join(path,'bin','ifortvars.bat') if os.path.isfile(batch_file): - targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file) + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) + for target,arch in all_ifort_platforms: try: - icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target) + path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir') except OSError: continue else: batch_file=os.path.join(path,'bin','ifortvars.bat') if os.path.isfile(batch_file): - targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file) - major=version[0:2] - versions['intel '+major]=targets -@conf -def setup_ifort(conf,versiondict): - platforms=Utils.to_list(conf.env.MSVC_TARGETS)or[i for i,j in all_ifort_platforms] - desired_versions=conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys()))) + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) + major = version[0:2] + versions['intel ' + major] = targets + +@conf +def setup_ifort(conf, versiondict): + """ + Checks installed compilers and targets and returns the first combination from the user's + options, env, or the global supported lists that checks. + + :param versiondict: dict(platform -> dict(architecture -> configuration)) + :type versiondict: dict(string -> dict(string -> target_compiler) + :return: the compiler, revision, path, include dirs, library paths and target architecture + :rtype: tuple of strings + """ + platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms] + desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys()))) for version in desired_versions: try: - targets=versiondict[version] + targets = versiondict[version] except KeyError: continue for arch in platforms: try: - cfg=targets[arch] + cfg = targets[arch] except KeyError: continue cfg.evaluate() if cfg.is_valid: - compiler,revision=version.rsplit(' ',1) + compiler,revision = version.rsplit(' ', 1) return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu - conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r'%(desired_versions,list(versiondict.keys()))) + conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys()))) + @conf -def get_ifort_version_win32(conf,compiler,version,target,vcvars): +def get_ifort_version_win32(conf, compiler, version, target, vcvars): + # FIXME hack try: - conf.msvc_cnt+=1 + conf.msvc_cnt += 1 except AttributeError: - conf.msvc_cnt=1 - batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt) + conf.msvc_cnt = 1 + batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt) batfile.write("""@echo off set INCLUDE= set LIB= @@ -159,145 +198,216 @@ echo PATH=%%PATH%% echo INCLUDE=%%INCLUDE%% echo LIB=%%LIB%%;%%LIBPATH%% -"""%(vcvars,target)) - sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()]) +""" % (vcvars,target)) + sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()]) batfile.delete() - lines=sout.splitlines() + lines = sout.splitlines() + if not lines[0]: lines.pop(0) - MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None + + MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None for line in lines: if line.startswith('PATH='): - path=line[5:] - MSVC_PATH=path.split(';') + path = line[5:] + MSVC_PATH = path.split(';') elif line.startswith('INCLUDE='): - MSVC_INCDIR=[i for i in line[8:].split(';')if i] + MSVC_INCDIR = [i for i in line[8:].split(';') if i] elif line.startswith('LIB='): - MSVC_LIBDIR=[i for i in line[4:].split(';')if i] - if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): + MSVC_LIBDIR = [i for i in line[4:].split(';') if i] + if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR): conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)') - env=dict(os.environ) - env.update(PATH=path) - compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) - fc=conf.find_program(compiler_name,path_list=MSVC_PATH) - if'CL'in env: + + # Check if the compiler is usable at all. + # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. + env = dict(os.environ) + env.update(PATH = path) + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + fc = conf.find_program(compiler_name, path_list=MSVC_PATH) + + # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically. + if 'CL' in env: del(env['CL']) + try: - conf.cmd_and_log(fc+['/help'],env=env) + conf.cmd_and_log(fc + ['/help'], env=env) except UnicodeError: - st=traceback.format_exc() + st = traceback.format_exc() if conf.logger: conf.logger.error(st) conf.fatal('ifort: Unicode error - check the code page?') except Exception as e: - Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s',compiler,version,target,str(e)) + Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e)) conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)') else: - Logs.debug('ifort: get_ifort_version: %r %r %r -> OK',compiler,version,target) + Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target) finally: - conf.env[compiler_name]='' - return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) + conf.env[compiler_name] = '' + + return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR) + class target_compiler(object): - def __init__(self,ctx,compiler,cpu,version,bat_target,bat,callback=None): - self.conf=ctx - self.name=None - self.is_valid=False - self.is_done=False - self.compiler=compiler - self.cpu=cpu - self.version=version - self.bat_target=bat_target - self.bat=bat - self.callback=callback + """ + Wraps a compiler configuration; call evaluate() to determine + whether the configuration is usable. + """ + def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None): + """ + :param ctx: configuration context to use to eventually get the version environment + :param compiler: compiler name + :param cpu: target cpu + :param version: compiler version number + :param bat_target: ? + :param bat: path to the batch file to run + :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths) + """ + self.conf = ctx + self.name = None + self.is_valid = False + self.is_done = False + + self.compiler = compiler + self.cpu = cpu + self.version = version + self.bat_target = bat_target + self.bat = bat + self.callback = callback + def evaluate(self): if self.is_done: return - self.is_done=True + self.is_done = True try: - vs=self.conf.get_ifort_version_win32(self.compiler,self.version,self.bat_target,self.bat) + vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat) except Errors.ConfigurationError: - self.is_valid=False + self.is_valid = False return if self.callback: - vs=self.callback(self,vs) - self.is_valid=True - (self.bindirs,self.incdirs,self.libdirs)=vs + vs = self.callback(self, vs) + self.is_valid = True + (self.bindirs, self.incdirs, self.libdirs) = vs + def __str__(self): - return str((self.bindirs,self.incdirs,self.libdirs)) + return str((self.bindirs, self.incdirs, self.libdirs)) + def __repr__(self): - return repr((self.bindirs,self.incdirs,self.libdirs)) + return repr((self.bindirs, self.incdirs, self.libdirs)) + @conf def detect_ifort(self): return self.setup_ifort(self.get_ifort_versions(False)) + @conf -def get_ifort_versions(self,eval_and_save=True): - dct={} +def get_ifort_versions(self, eval_and_save=True): + """ + :return: platforms to compiler configurations + :rtype: dict + """ + dct = {} self.gather_ifort_versions(dct) return dct -def _get_prog_names(self,compiler): + +def _get_prog_names(self, compiler): if compiler=='intel': - compiler_name='ifort' - linker_name='XILINK' - lib_name='XILIB' + compiler_name = 'ifort' + linker_name = 'XILINK' + lib_name = 'XILIB' else: - compiler_name='CL' - linker_name='LINK' - lib_name='LIB' - return compiler_name,linker_name,lib_name + # assumes CL.exe + compiler_name = 'CL' + linker_name = 'LINK' + lib_name = 'LIB' + return compiler_name, linker_name, lib_name + @conf def find_ifort_win32(conf): - v=conf.env - path=v.PATH - compiler=v.MSVC_COMPILER - version=v.MSVC_VERSION - compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) - v.IFORT_MANIFEST=(compiler=='intel'and version>=11) - fc=conf.find_program(compiler_name,var='FC',path_list=path) - env=dict(conf.environ) + # the autodetection is supposed to be performed before entering in this method + v = conf.env + path = v.PATH + compiler = v.MSVC_COMPILER + version = v.MSVC_VERSION + + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11) + + # compiler + fc = conf.find_program(compiler_name, var='FC', path_list=path) + + # before setting anything, check if the compiler is really intel fortran + env = dict(conf.environ) if path: - env.update(PATH=';'.join(path)) - if not conf.cmd_and_log(fc+['/nologo','/help'],env=env): + env.update(PATH = ';'.join(path)) + if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env): conf.fatal('not intel fortran compiler could not be identified') - v.FC_NAME='IFORT' + + v.FC_NAME = 'IFORT' + if not v.LINK_FC: - conf.find_program(linker_name,var='LINK_FC',path_list=path,mandatory=True) + conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True) + if not v.AR: - conf.find_program(lib_name,path_list=path,var='AR',mandatory=True) - v.ARFLAGS=['/nologo'] + conf.find_program(lib_name, path_list=path, var='AR', mandatory=True) + v.ARFLAGS = ['/nologo'] + + # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later if v.IFORT_MANIFEST: - conf.find_program('MT',path_list=path,var='MT') - v.MTFLAGS=['/nologo'] + conf.find_program('MT', path_list=path, var='MT') + v.MTFLAGS = ['/nologo'] + try: conf.load('winres') except Errors.WafError: Logs.warn('Resource compiler not found. Compiling resource file is disabled') + +####################################################################################################### +##### conf above, build below + @after_method('apply_link') @feature('fc') def apply_flags_ifort(self): - if not self.env.IFORT_WIN32 or not getattr(self,'link_task',None): + """ + Adds additional flags implied by msvc, such as subsystems and pdb files:: + + def build(bld): + bld.stlib(source='main.c', target='bar', subsystem='gruik') + """ + if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None): return - is_static=isinstance(self.link_task,ccroot.stlink_task) - subsystem=getattr(self,'subsystem','') + + is_static = isinstance(self.link_task, ccroot.stlink_task) + + subsystem = getattr(self, 'subsystem', '') if subsystem: - subsystem='/subsystem:%s'%subsystem - flags=is_static and'ARFLAGS'or'LINKFLAGS' - self.env.append_value(flags,subsystem) + subsystem = '/subsystem:%s' % subsystem + flags = is_static and 'ARFLAGS' or 'LINKFLAGS' + self.env.append_value(flags, subsystem) + if not is_static: for f in self.env.LINKFLAGS: - d=f.lower() - if d[1:]=='debug': - pdbnode=self.link_task.outputs[0].change_ext('.pdb') + d = f.lower() + if d[1:] == 'debug': + pdbnode = self.link_task.outputs[0].change_ext('.pdb') self.link_task.outputs.append(pdbnode) - if getattr(self,'install_task',None): - self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=pdbnode) + + if getattr(self, 'install_task', None): + self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode) + break -@feature('fcprogram','fcshlib','fcprogram_test') + +@feature('fcprogram', 'fcshlib', 'fcprogram_test') @after_method('apply_link') def apply_manifest_ifort(self): - if self.env.IFORT_WIN32 and getattr(self,'link_task',None): - self.link_task.env.FC=self.env.LINK_FC - if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self,'link_task',None): - out_node=self.link_task.outputs[0] - man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') + """ + Enables manifest embedding in Fortran DLLs when using ifort on Windows + See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx + """ + if self.env.IFORT_WIN32 and getattr(self, 'link_task', None): + # it seems ifort.exe cannot be called for linking + self.link_task.env.FC = self.env.LINK_FC + + if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None): + out_node = self.link_task.outputs[0] + man_node = out_node.parent.find_or_declare(out_node.name + '.manifest') self.link_task.outputs.append(man_node) - self.env.DO_MANIFEST=True + self.env.DO_MANIFEST = True + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/__init__.py lilv-0.24.6/waflib/Tools/__init__.py --- lilv-0.24.4~dfsg0/waflib/Tools/__init__.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/__init__.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,4 +1,3 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - +# Thomas Nagy, 2005-2018 (ita) diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/intltool.py lilv-0.24.6/waflib/Tools/intltool.py --- lilv-0.24.4~dfsg0/waflib/Tools/intltool.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/intltool.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,101 +1,231 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) + +""" +Support for translation tools such as msgfmt and intltool + +Usage:: + + def configure(conf): + conf.load('gnu_dirs intltool') + + def build(bld): + # process the .po files into .gmo files, and install them in LOCALEDIR + bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}") + + # process an input file, substituting the translations from the po dir + bld( + features = "intltool_in", + podir = "../po", + style = "desktop", + flags = ["-u"], + source = 'kupfer.desktop.in', + install_path = "${DATADIR}/applications", + ) + +Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory. +""" from __future__ import with_statement -import os,re -from waflib import Context,Task,Utils,Logs + +import os, re +from waflib import Context, Task, Utils, Logs import waflib.Tools.ccroot -from waflib.TaskGen import feature,before_method,taskgen_method +from waflib.TaskGen import feature, before_method, taskgen_method from waflib.Logs import error from waflib.Configure import conf -_style_flags={'ba':'-b','desktop':'-d','keys':'-k','quoted':'--quoted-style','quotedxml':'--quotedxml-style','rfc822deb':'-r','schemas':'-s','xml':'-x',} + +_style_flags = { + 'ba': '-b', + 'desktop': '-d', + 'keys': '-k', + 'quoted': '--quoted-style', + 'quotedxml': '--quotedxml-style', + 'rfc822deb': '-r', + 'schemas': '-s', + 'xml': '-x', +} + @taskgen_method def ensure_localedir(self): + """ + Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale + """ + # use the tool gnu_dirs to provide options to define this if not self.env.LOCALEDIR: if self.env.DATAROOTDIR: - self.env.LOCALEDIR=os.path.join(self.env.DATAROOTDIR,'locale') + self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale') else: - self.env.LOCALEDIR=os.path.join(self.env.PREFIX,'share','locale') + self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale') + @before_method('process_source') @feature('intltool_in') def apply_intltool_in_f(self): + """ + Creates tasks to translate files by intltool-merge:: + + def build(bld): + bld( + features = "intltool_in", + podir = "../po", + style = "desktop", + flags = ["-u"], + source = 'kupfer.desktop.in', + install_path = "${DATADIR}/applications", + ) + + :param podir: location of the .po files + :type podir: string + :param source: source files to process + :type source: list of string + :param style: the intltool-merge mode of operation, can be one of the following values: + ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``. + See the ``intltool-merge`` man page for more information about supported modes of operation. + :type style: string + :param flags: compilation flags ("-quc" by default) + :type flags: list of string + :param install_path: installation path + :type install_path: string + """ try: self.meths.remove('process_source') except ValueError: pass + self.ensure_localedir() - podir=getattr(self,'podir','.') - podirnode=self.path.find_dir(podir) + + podir = getattr(self, 'podir', '.') + podirnode = self.path.find_dir(podir) if not podirnode: - error("could not find the podir %r"%podir) + error("could not find the podir %r" % podir) return - cache=getattr(self,'intlcache','.intlcache') - self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)] - self.env.INTLPODIR=podirnode.bldpath() - self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT)) - if'-c'in self.env.INTLFLAGS: - self.bld.fatal('Redundant -c flag in intltool task %r'%self) - style=getattr(self,'style',None) + + cache = getattr(self, 'intlcache', '.intlcache') + self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)] + self.env.INTLPODIR = podirnode.bldpath() + self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT)) + + if '-c' in self.env.INTLFLAGS: + self.bld.fatal('Redundant -c flag in intltool task %r' % self) + + style = getattr(self, 'style', None) if style: try: - style_flag=_style_flags[style] + style_flag = _style_flags[style] except KeyError: - self.bld.fatal('intltool_in style "%s" is not valid'%style) - self.env.append_unique('INTLFLAGS',[style_flag]) + self.bld.fatal('intltool_in style "%s" is not valid' % style) + + self.env.append_unique('INTLFLAGS', [style_flag]) + for i in self.to_list(self.source): - node=self.path.find_resource(i) - task=self.create_task('intltool',node,node.change_ext('')) - inst=getattr(self,'install_path',None) + node = self.path.find_resource(i) + + task = self.create_task('intltool', node, node.change_ext('')) + inst = getattr(self, 'install_path', None) if inst: - self.add_install_files(install_to=inst,install_from=task.outputs) + self.add_install_files(install_to=inst, install_from=task.outputs) + @feature('intltool_po') def apply_intltool_po(self): + """ + Creates tasks to process po files:: + + def build(bld): + bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}") + + The relevant task generator arguments are: + + :param podir: directory of the .po files + :type podir: string + :param appname: name of the application + :type appname: string + :param install_path: installation directory + :type install_path: string + + The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process. + """ try: self.meths.remove('process_source') except ValueError: pass + self.ensure_localedir() - appname=getattr(self,'appname',getattr(Context.g_module,Context.APPNAME,'set_your_app_name')) - podir=getattr(self,'podir','.') - inst=getattr(self,'install_path','${LOCALEDIR}') - linguas=self.path.find_node(os.path.join(podir,'LINGUAS')) + + appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name')) + podir = getattr(self, 'podir', '.') + inst = getattr(self, 'install_path', '${LOCALEDIR}') + + linguas = self.path.find_node(os.path.join(podir, 'LINGUAS')) if linguas: - with open(linguas.abspath())as f: - langs=[] + # scan LINGUAS file for locales to process + with open(linguas.abspath()) as f: + langs = [] for line in f.readlines(): + # ignore lines containing comments if not line.startswith('#'): - langs+=line.split() - re_linguas=re.compile('[-a-zA-Z_@.]+') + langs += line.split() + re_linguas = re.compile('[-a-zA-Z_@.]+') for lang in langs: + # Make sure that we only process lines which contain locales if re_linguas.match(lang): - node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po')) - task=self.create_task('po',node,node.change_ext('.mo')) + node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po')) + task = self.create_task('po', node, node.change_ext('.mo')) + if inst: - filename=task.outputs[0].name - (langname,ext)=os.path.splitext(filename) - inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo' - self.add_install_as(install_to=inst_file,install_from=task.outputs[0],chmod=getattr(self,'chmod',Utils.O644)) + filename = task.outputs[0].name + (langname, ext) = os.path.splitext(filename) + inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo' + self.add_install_as(install_to=inst_file, install_from=task.outputs[0], + chmod=getattr(self, 'chmod', Utils.O644)) + else: - Logs.pprint('RED',"Error no LINGUAS file found in po directory") + Logs.pprint('RED', "Error no LINGUAS file found in po directory") + class po(Task.Task): - run_str='${MSGFMT} -o ${TGT} ${SRC}' - color='BLUE' + """ + Compiles .po files into .gmo files + """ + run_str = '${MSGFMT} -o ${TGT} ${SRC}' + color = 'BLUE' + class intltool(Task.Task): - run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' - color='BLUE' + """ + Calls intltool-merge to update translation files + """ + run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' + color = 'BLUE' + @conf def find_msgfmt(conf): - conf.find_program('msgfmt',var='MSGFMT') + """ + Detects msgfmt and sets the ``MSGFMT`` variable + """ + conf.find_program('msgfmt', var='MSGFMT') + @conf def find_intltool_merge(conf): + """ + Detects intltool-merge + """ if not conf.env.PERL: - conf.find_program('perl',var='PERL') - conf.env.INTLCACHE_ST='--cache=%s' - conf.env.INTLFLAGS_DEFAULT=['-q','-u'] - conf.find_program('intltool-merge',interpreter='PERL',var='INTLTOOL') + conf.find_program('perl', var='PERL') + conf.env.INTLCACHE_ST = '--cache=%s' + conf.env.INTLFLAGS_DEFAULT = ['-q', '-u'] + conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL') + def configure(conf): + """ + Detects the program *msgfmt* and set *conf.env.MSGFMT*. + Detects the program *intltool-merge* and set *conf.env.INTLTOOL*. + It is possible to set INTLTOOL in the environment, but it must not have spaces in it:: + + $ INTLTOOL="/path/to/the program/intltool" waf configure + + If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*. + """ conf.find_msgfmt() conf.find_intltool_merge() if conf.env.CC or conf.env.CXX: conf.check(header_name='locale.h') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/irixcc.py lilv-0.24.6/waflib/Tools/irixcc.py --- lilv-0.24.4~dfsg0/waflib/Tools/irixcc.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/irixcc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,46 +1,60 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# imported from samba + +""" +Compiler definition for irix/MIPSpro cc compiler +""" from waflib import Errors -from waflib.Tools import ccroot,ar +from waflib.Tools import ccroot, ar from waflib.Configure import conf + @conf def find_irixcc(conf): - v=conf.env - cc=None + v = conf.env + cc = None if v.CC: - cc=v.CC - elif'CC'in conf.environ: - cc=conf.environ['CC'] + cc = v.CC + elif 'CC' in conf.environ: + cc = conf.environ['CC'] if not cc: - cc=conf.find_program('cc',var='CC') + cc = conf.find_program('cc', var='CC') if not cc: conf.fatal('irixcc was not found') + try: - conf.cmd_and_log(cc+['-version']) + conf.cmd_and_log(cc + ['-version']) except Errors.WafError: - conf.fatal('%r -version could not be executed'%cc) - v.CC=cc - v.CC_NAME='irix' + conf.fatal('%r -version could not be executed' % cc) + + v.CC = cc + v.CC_NAME = 'irix' + @conf def irixcc_common_flags(conf): - v=conf.env - v.CC_SRC_F='' - v.CC_TGT_F=['-c','-o'] - v.CPPPATH_ST='-I%s' - v.DEFINES_ST='-D%s' + v = conf.env + + v.CC_SRC_F = '' + v.CC_TGT_F = ['-c', '-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' + if not v.LINK_CC: - v.LINK_CC=v.CC - v.CCLNK_SRC_F='' - v.CCLNK_TGT_F=['-o'] - v.LIB_ST='-l%s' - v.LIBPATH_ST='-L%s' - v.STLIB_ST='-l%s' - v.STLIBPATH_ST='-L%s' - v.cprogram_PATTERN='%s' - v.cshlib_PATTERN='lib%s.so' - v.cstlib_PATTERN='lib%s.a' + v.LINK_CC = v.CC + + v.CCLNK_SRC_F = '' + v.CCLNK_TGT_F = ['-o'] + + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + + v.cprogram_PATTERN = '%s' + v.cshlib_PATTERN = 'lib%s.so' + v.cstlib_PATTERN = 'lib%s.a' + def configure(conf): conf.find_irixcc() conf.find_cpp() @@ -49,3 +63,4 @@ conf.cc_load_tools() conf.cc_add_flags() conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/javaw.py lilv-0.24.6/waflib/Tools/javaw.py --- lilv-0.24.4~dfsg0/waflib/Tools/javaw.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/javaw.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,16 +1,126 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) -import os,shutil -from waflib import Task,Utils,Errors,Node +""" +Java support + +Javac is one of the few compilers that behaves very badly: + +#. it outputs files where it wants to (-d is only for the package root) + +#. it recompiles files silently behind your back + +#. it outputs an undefined amount of files (inner classes) + +Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of +running one of the following commands:: + + ./waf configure + python waf configure + +You would have to run:: + + java -jar /path/to/jython.jar waf configure + +[1] http://www.jython.org/ + +Usage +===== + +Load the "java" tool. + +def configure(conf): + conf.load('java') + +Java tools will be autodetected and eventually, if present, the quite +standard JAVA_HOME environment variable will be used. The also standard +CLASSPATH variable is used for library searching. + +In configuration phase checks can be done on the system environment, for +example to check if a class is known in the classpath:: + + conf.check_java_class('java.io.FileOutputStream') + +or if the system supports JNI applications building:: + + conf.check_jni_headers() + + +The java tool supports compiling java code, creating jar files and +creating javadoc documentation. This can be either done separately or +together in a single definition. For example to manage them separately:: + + bld(features = 'javac', + srcdir = 'src', + compat = '1.7', + use = 'animals', + name = 'cats-src', + ) + + bld(features = 'jar', + basedir = '.', + destfile = '../cats.jar', + name = 'cats', + use = 'cats-src' + ) + + +Or together by defining all the needed attributes:: + + bld(features = 'javac jar javadoc', + srcdir = 'src/', # folder containing the sources to compile + outdir = 'src', # folder where to output the classes (in the build directory) + compat = '1.6', # java compatibility version number + classpath = ['.', '..'], + + # jar + basedir = 'src', # folder containing the classes and other files to package (must match outdir) + destfile = 'foo.jar', # do not put the destfile in the folder of the java classes! + use = 'NNN', + jaropts = ['-C', 'default/src/', '.'], # can be used to give files + manifest = 'src/Manifest.mf', # Manifest file to include + + # javadoc + javadoc_package = ['com.meow' , 'com.meow.truc.bar', 'com.meow.truc.foo'], + javadoc_output = 'javadoc', + ) + +External jar dependencies can be mapped to a standard waf "use" dependency by +setting an environment variable with a CLASSPATH prefix in the configuration, +for example:: + + conf.env.CLASSPATH_NNN = ['aaaa.jar', 'bbbb.jar'] + +and then NNN can be freely used in rules as:: + + use = 'NNN', + +In the java tool the dependencies via use are not transitive by default, as +this necessity depends on the code. To enable recursive dependency scanning +use on a specific rule: + + recurse_use = True + +Or build-wise by setting RECURSE_JAVA: + + bld.env.RECURSE_JAVA = True + +Unit tests can be integrated in the waf unit test environment using the javatest extra. +""" + +import os, shutil +from waflib import Task, Utils, Errors, Node from waflib.Configure import conf -from waflib.TaskGen import feature,before_method,after_method +from waflib.TaskGen import feature, before_method, after_method, taskgen_method + from waflib.Tools import ccroot -ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS']) -SOURCE_RE='**/*.java' -JAR_RE='**/*' -class_check_source=''' +ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS']) + +SOURCE_RE = '**/*.java' +JAR_RE = '**/*' + +class_check_source = ''' public class Test { public static void main(String[] argv) { Class lib; @@ -29,271 +139,455 @@ } } ''' + @feature('javac') @before_method('process_source') def apply_java(self): - Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[]) - outdir=getattr(self,'outdir',None) + """ + Create a javac task for compiling *.java files*. There can be + only one javac task by task generator. + """ + Utils.def_attrs(self, jarname='', classpath='', + sourcepath='.', srcdir='.', + jar_mf_attributes={}, jar_mf_classpath=[]) + + outdir = getattr(self, 'outdir', None) if outdir: - if not isinstance(outdir,Node.Node): - outdir=self.path.get_bld().make_node(self.outdir) + if not isinstance(outdir, Node.Node): + outdir = self.path.get_bld().make_node(self.outdir) else: - outdir=self.path.get_bld() + outdir = self.path.get_bld() outdir.mkdir() - self.outdir=outdir - self.env.OUTDIR=outdir.abspath() - self.javac_task=tsk=self.create_task('javac') - tmp=[] - srcdir=getattr(self,'srcdir','') - if isinstance(srcdir,Node.Node): - srcdir=[srcdir] + self.outdir = outdir + self.env.OUTDIR = outdir.abspath() + + self.javac_task = tsk = self.create_task('javac') + tmp = [] + + srcdir = getattr(self, 'srcdir', '') + if isinstance(srcdir, Node.Node): + srcdir = [srcdir] for x in Utils.to_list(srcdir): - if isinstance(x,Node.Node): - y=x + if isinstance(x, Node.Node): + y = x else: - y=self.path.find_dir(x) + y = self.path.find_dir(x) if not y: - self.bld.fatal('Could not find the folder %s from %s'%(x,self.path)) + self.bld.fatal('Could not find the folder %s from %s' % (x, self.path)) tmp.append(y) - tsk.srcdir=tmp - if getattr(self,'compat',None): - tsk.env.append_value('JAVACFLAGS',['-source',str(self.compat)]) - if hasattr(self,'sourcepath'): - fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)] - names=os.pathsep.join([x.srcpath()for x in fold]) + + tsk.srcdir = tmp + + if getattr(self, 'compat', None): + tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)]) + + if hasattr(self, 'sourcepath'): + fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)] + names = os.pathsep.join([x.srcpath() for x in fold]) else: - names=[x.srcpath()for x in tsk.srcdir] + names = [x.srcpath() for x in tsk.srcdir] + if names: - tsk.env.append_value('JAVACFLAGS',['-sourcepath',names]) + tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names]) + + +@taskgen_method +def java_use_rec(self, name, **kw): + """ + Processes recursively the *use* attribute for each referred java compilation + """ + if name in self.tmp_use_seen: + return + + self.tmp_use_seen.append(name) + + try: + y = self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.uselib.append(name) + return + else: + y.post() + # Add generated JAR name for CLASSPATH. Task ordering (set_run_after) + # is already guaranteed by ordering done between the single tasks + if hasattr(y, 'jar_task'): + self.use_lst.append(y.jar_task.outputs[0].abspath()) + else: + if hasattr(y,'outdir'): + self.use_lst.append(y.outdir.abspath()) + else: + self.use_lst.append(y.path.get_bld().abspath()) + + for x in self.to_list(getattr(y, 'use', [])): + self.java_use_rec(x) + @feature('javac') @before_method('propagate_uselib_vars') @after_method('apply_java') def use_javac_files(self): - lst=[] - self.uselib=self.to_list(getattr(self,'uselib',[])) - names=self.to_list(getattr(self,'use',[])) - get=self.bld.get_tgen_by_name + """ + Processes the *use* attribute referring to other java compilations + """ + self.use_lst = [] + self.tmp_use_seen = [] + self.uselib = self.to_list(getattr(self, 'uselib', [])) + names = self.to_list(getattr(self, 'use', [])) + get = self.bld.get_tgen_by_name for x in names: try: - y=get(x) + tg = get(x) except Errors.WafError: self.uselib.append(x) else: - y.post() - if hasattr(y,'jar_task'): - lst.append(y.jar_task.outputs[0].abspath()) - self.javac_task.set_run_after(y.jar_task) + tg.post() + if hasattr(tg, 'jar_task'): + self.use_lst.append(tg.jar_task.outputs[0].abspath()) + self.javac_task.set_run_after(tg.jar_task) + self.javac_task.dep_nodes.extend(tg.jar_task.outputs) else: - for tsk in y.tasks: + if hasattr(tg, 'outdir'): + base_node = tg.outdir + else: + base_node = tg.path.get_bld() + + self.use_lst.append(base_node.abspath()) + self.javac_task.dep_nodes.extend([x for x in base_node.ant_glob(JAR_RE, remove=False, quiet=True)]) + + for tsk in tg.tasks: self.javac_task.set_run_after(tsk) - self.env.append_value('CLASSPATH',lst) + + # If recurse use scan is enabled recursively add use attribute for each used one + if getattr(self, 'recurse_use', False) or self.bld.env.RECURSE_JAVA: + self.java_use_rec(x) + + self.env.append_value('CLASSPATH', self.use_lst) + @feature('javac') -@after_method('apply_java','propagate_uselib_vars','use_javac_files') +@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files') def set_classpath(self): - if getattr(self,'classpath',None): - self.env.append_unique('CLASSPATH',getattr(self,'classpath',[])) + """ + Sets the CLASSPATH value on the *javac* task previously created. + """ + if getattr(self, 'classpath', None): + self.env.append_unique('CLASSPATH', getattr(self, 'classpath', [])) for x in self.tasks: - x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep + x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep + @feature('jar') -@after_method('apply_java','use_javac_files') +@after_method('apply_java', 'use_javac_files') @before_method('process_source') def jar_files(self): - destfile=getattr(self,'destfile','test.jar') - jaropts=getattr(self,'jaropts',[]) - manifest=getattr(self,'manifest',None) - basedir=getattr(self,'basedir',None) + """ + Creates a jar task (one maximum per task generator) + """ + destfile = getattr(self, 'destfile', 'test.jar') + jaropts = getattr(self, 'jaropts', []) + manifest = getattr(self, 'manifest', None) + + basedir = getattr(self, 'basedir', None) if basedir: - if not isinstance(self.basedir,Node.Node): - basedir=self.path.get_bld().make_node(basedir) + if not isinstance(self.basedir, Node.Node): + basedir = self.path.get_bld().make_node(basedir) else: - basedir=self.path.get_bld() + basedir = self.path.get_bld() if not basedir: - self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self)) - self.jar_task=tsk=self.create_task('jar_create') + self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self)) + + self.jar_task = tsk = self.create_task('jar_create') if manifest: - jarcreate=getattr(self,'jarcreate','cfm') + jarcreate = getattr(self, 'jarcreate', 'cfm') if not isinstance(manifest,Node.Node): - node=self.path.find_resource(manifest) + node = self.path.find_resource(manifest) else: - node=manifest + node = manifest if not node: - self.bld.fatal('invalid manifest file %r for %r'%(manifest,self)) + self.bld.fatal('invalid manifest file %r for %r' % (manifest, self)) tsk.dep_nodes.append(node) - jaropts.insert(0,node.abspath()) + jaropts.insert(0, node.abspath()) else: - jarcreate=getattr(self,'jarcreate','cf') - if not isinstance(destfile,Node.Node): - destfile=self.path.find_or_declare(destfile) + jarcreate = getattr(self, 'jarcreate', 'cf') + if not isinstance(destfile, Node.Node): + destfile = self.path.find_or_declare(destfile) if not destfile: - self.bld.fatal('invalid destfile %r for %r'%(destfile,self)) + self.bld.fatal('invalid destfile %r for %r' % (destfile, self)) tsk.set_outputs(destfile) - tsk.basedir=basedir + tsk.basedir = basedir + jaropts.append('-C') jaropts.append(basedir.bldpath()) jaropts.append('.') - tsk.env.JAROPTS=jaropts - tsk.env.JARCREATE=jarcreate - if getattr(self,'javac_task',None): + + tsk.env.JAROPTS = jaropts + tsk.env.JARCREATE = jarcreate + + if getattr(self, 'javac_task', None): tsk.set_run_after(self.javac_task) + @feature('jar') @after_method('jar_files') def use_jar_files(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - names=self.to_list(getattr(self,'use',[])) - get=self.bld.get_tgen_by_name + """ + Processes the *use* attribute to set the build order on the + tasks created by another task generator. + """ + self.uselib = self.to_list(getattr(self, 'uselib', [])) + names = self.to_list(getattr(self, 'use', [])) + get = self.bld.get_tgen_by_name for x in names: try: - y=get(x) + y = get(x) except Errors.WafError: self.uselib.append(x) else: y.post() self.jar_task.run_after.update(y.tasks) + class JTask(Task.Task): - def split_argfile(self,cmd): - inline=[cmd[0]] - infile=[] + """ + Base class for java and jar tasks; provides functionality to run long commands + """ + def split_argfile(self, cmd): + inline = [cmd[0]] + infile = [] for x in cmd[1:]: + # jar and javac do not want -J flags in @file if x.startswith('-J'): inline.append(x) else: infile.append(self.quote_flag(x)) - return(inline,infile) + return (inline, infile) + class jar_create(JTask): - color='GREEN' - run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' + """ + Creates a jar file + """ + color = 'GREEN' + run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' + def runnable_status(self): + """ + Wait for dependent tasks to be executed, then read the + files to update the list of inputs. + """ for t in self.run_after: if not t.hasrun: return Task.ASK_LATER if not self.inputs: try: - self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])] + self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False, quiet=True) if id(x) != id(self.outputs[0])] except Exception: - raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self)) - return super(jar_create,self).runnable_status() + raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self)) + return super(jar_create, self).runnable_status() + class javac(JTask): - color='BLUE' - run_str='${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}' - vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR'] + """ + Compiles java files + """ + color = 'BLUE' + run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}' + vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR'] + """ + The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change. + """ def uid(self): - lst=[self.__class__.__name__,self.generator.outdir.abspath()] + """Identify java tasks by input&output folder""" + lst = [self.__class__.__name__, self.generator.outdir.abspath()] for x in self.srcdir: lst.append(x.abspath()) return Utils.h_list(lst) + def runnable_status(self): + """ + Waits for dependent tasks to be complete, then read the file system to find the input nodes. + """ for t in self.run_after: if not t.hasrun: return Task.ASK_LATER + if not self.inputs: - self.inputs=[] + self.inputs = [] for x in self.srcdir: if x.exists(): - self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False)) - return super(javac,self).runnable_status() + self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False, quiet=True)) + return super(javac, self).runnable_status() + def post_run(self): - for node in self.generator.outdir.ant_glob('**/*.class'): - self.generator.bld.node_sigs[node]=self.uid() - self.generator.bld.task_sigs[self.uid()]=self.cache_sig + """ + List class files created + """ + for node in self.generator.outdir.ant_glob('**/*.class', quiet=True): + self.generator.bld.node_sigs[node] = self.uid() + self.generator.bld.task_sigs[self.uid()] = self.cache_sig + @feature('javadoc') @after_method('process_rule') def create_javadoc(self): - tsk=self.create_task('javadoc') - tsk.classpath=getattr(self,'classpath',[]) - self.javadoc_package=Utils.to_list(self.javadoc_package) - if not isinstance(self.javadoc_output,Node.Node): - self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output) + """ + Creates a javadoc task (feature 'javadoc') + """ + tsk = self.create_task('javadoc') + tsk.classpath = getattr(self, 'classpath', []) + self.javadoc_package = Utils.to_list(self.javadoc_package) + if not isinstance(self.javadoc_output, Node.Node): + self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output) + class javadoc(Task.Task): - color='BLUE' + """ + Builds java documentation + """ + color = 'BLUE' + def __str__(self): - return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output) + return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output) + def run(self): - env=self.env - bld=self.generator.bld - wd=bld.bldnode - srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir - srcpath+=os.pathsep - srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir - classpath=env.CLASSPATH - classpath+=os.pathsep - classpath+=os.pathsep.join(self.classpath) - classpath="".join(classpath) - self.last_cmd=lst=[] + env = self.env + bld = self.generator.bld + wd = bld.bldnode + + #add src node + bld node (for generated java code) + srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir + srcpath += os.pathsep + srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir + + classpath = env.CLASSPATH + classpath += os.pathsep + classpath += os.pathsep.join(self.classpath) + classpath = "".join(classpath) + + self.last_cmd = lst = [] lst.extend(Utils.to_list(env.JAVADOC)) - lst.extend(['-d',self.generator.javadoc_output.abspath()]) - lst.extend(['-sourcepath',srcpath]) - lst.extend(['-classpath',classpath]) + lst.extend(['-d', self.generator.javadoc_output.abspath()]) + lst.extend(['-sourcepath', srcpath]) + lst.extend(['-classpath', classpath]) lst.extend(['-subpackages']) lst.extend(self.generator.javadoc_package) - lst=[x for x in lst if x] - self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + lst = [x for x in lst if x] + + self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) + def post_run(self): - nodes=self.generator.javadoc_output.ant_glob('**') + nodes = self.generator.javadoc_output.ant_glob('**', quiet=True) for node in nodes: - self.generator.bld.node_sigs[node]=self.uid() - self.generator.bld.task_sigs[self.uid()]=self.cache_sig + self.generator.bld.node_sigs[node] = self.uid() + self.generator.bld.task_sigs[self.uid()] = self.cache_sig + def configure(self): - java_path=self.environ['PATH'].split(os.pathsep) - v=self.env - if'JAVA_HOME'in self.environ: - java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path - self.env.JAVA_HOME=[self.environ['JAVA_HOME']] - for x in'javac java jar javadoc'.split(): - self.find_program(x,var=x.upper(),path_list=java_path) - if'CLASSPATH'in self.environ: - v.CLASSPATH=self.environ['CLASSPATH'] + """ + Detects the javac, java and jar programs + """ + # If JAVA_PATH is set, we prepend it to the path list + java_path = self.environ['PATH'].split(os.pathsep) + v = self.env + + if 'JAVA_HOME' in self.environ: + java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path + self.env.JAVA_HOME = [self.environ['JAVA_HOME']] + + for x in 'javac java jar javadoc'.split(): + self.find_program(x, var=x.upper(), path_list=java_path, mandatory=(x not in ('javadoc'))) + + if 'CLASSPATH' in self.environ: + v.CLASSPATH = self.environ['CLASSPATH'] + if not v.JAR: self.fatal('jar is required for making java packages') if not v.JAVAC: self.fatal('javac is required for compiling java classes') - v.JARCREATE='cf' - v.JAVACFLAGS=[] + + v.JARCREATE = 'cf' # can use cvf + v.JAVACFLAGS = [] + @conf -def check_java_class(self,classname,with_classpath=None): - javatestdir='.waf-javatest' - classpath=javatestdir +def check_java_class(self, classname, with_classpath=None): + """ + Checks if the specified java class exists + + :param classname: class to check, like java.util.HashMap + :type classname: string + :param with_classpath: additional classpath to give + :type with_classpath: string + """ + javatestdir = '.waf-javatest' + + classpath = javatestdir if self.env.CLASSPATH: - classpath+=os.pathsep+self.env.CLASSPATH - if isinstance(with_classpath,str): - classpath+=os.pathsep+with_classpath - shutil.rmtree(javatestdir,True) + classpath += os.pathsep + self.env.CLASSPATH + if isinstance(with_classpath, str): + classpath += os.pathsep + with_classpath + + shutil.rmtree(javatestdir, True) os.mkdir(javatestdir) - Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source) - self.exec_command(self.env.JAVAC+[os.path.join(javatestdir,'Test.java')],shell=False) - cmd=self.env.JAVA+['-cp',classpath,'Test',classname] - self.to_log("%s\n"%str(cmd)) - found=self.exec_command(cmd,shell=False) - self.msg('Checking for java class %s'%classname,not found) - shutil.rmtree(javatestdir,True) + + Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source) + + # Compile the source + self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False) + + # Try to run the app + cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname] + self.to_log("%s\n" % str(cmd)) + found = self.exec_command(cmd, shell=False) + + self.msg('Checking for java class %s' % classname, not found) + + shutil.rmtree(javatestdir, True) + return found + @conf def check_jni_headers(conf): + """ + Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets:: + + def options(opt): + opt.load('compiler_c') + + def configure(conf): + conf.load('compiler_c java') + conf.check_jni_headers() + + def build(bld): + bld.shlib(source='a.c', target='app', use='JAVA') + """ if not conf.env.CC_NAME and not conf.env.CXX_NAME: conf.fatal('load a compiler first (gcc, g++, ..)') + if not conf.env.JAVA_HOME: conf.fatal('set JAVA_HOME in the system environment') - javaHome=conf.env.JAVA_HOME[0] - dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include') + + # jni requires the jvm + javaHome = conf.env.JAVA_HOME[0] + + dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include') if dir is None: - dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers') + dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?! if dir is None: conf.fatal('JAVA_HOME does not seem to be set properly') - f=dir.ant_glob('**/(jni|jni_md).h') - incDirs=[x.parent.abspath()for x in f] - dir=conf.root.find_dir(conf.env.JAVA_HOME[0]) - f=dir.ant_glob('**/*jvm.(so|dll|dylib)') - libDirs=[x.parent.abspath()for x in f]or[javaHome] - f=dir.ant_glob('**/*jvm.(lib)') + + f = dir.ant_glob('**/(jni|jni_md).h') + incDirs = [x.parent.abspath() for x in f] + + dir = conf.root.find_dir(conf.env.JAVA_HOME[0]) + f = dir.ant_glob('**/*jvm.(so|dll|dylib)') + libDirs = [x.parent.abspath() for x in f] or [javaHome] + + # On windows, we need both the .dll and .lib to link. On my JDK, they are + # in different directories... + f = dir.ant_glob('**/*jvm.(lib)') if f: - libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f] - if conf.env.DEST_OS=='freebsd': - conf.env.append_unique('LINKFLAGS_JAVA','-pthread') + libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f] + + if conf.env.DEST_OS == 'freebsd': + conf.env.append_unique('LINKFLAGS_JAVA', '-pthread') for d in libDirs: try: - conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA') + conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm', + libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA') except Exception: pass else: break else: - conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs) + conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs) + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/ldc2.py lilv-0.24.6/waflib/Tools/ldc2.py --- lilv-0.24.4~dfsg0/waflib/Tools/ldc2.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/ldc2.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,36 +1,56 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Alex Rønne Petersen, 2012 (alexrp/Zor) -from waflib.Tools import ar,d +from waflib.Tools import ar, d from waflib.Configure import conf + @conf def find_ldc2(conf): - conf.find_program(['ldc2'],var='D') - out=conf.cmd_and_log(conf.env.D+['-version']) - if out.find("based on DMD v2.")==-1: + """ + Finds the program *ldc2* and set the variable *D* + """ + conf.find_program(['ldc2'], var='D') + + out = conf.cmd_and_log(conf.env.D + ['-version']) + if out.find("based on DMD v2.") == -1: conf.fatal("detected compiler is not ldc2") + @conf def common_flags_ldc2(conf): - v=conf.env - v.D_SRC_F=['-c'] - v.D_TGT_F='-of%s' - v.D_LINKER=v.D - v.DLNK_SRC_F='' - v.DLNK_TGT_F='-of%s' - v.DINC_ST='-I%s' - v.DSHLIB_MARKER=v.DSTLIB_MARKER='' - v.DSTLIB_ST=v.DSHLIB_ST='-L-l%s' - v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L-L%s' - v.LINKFLAGS_dshlib=['-L-shared'] - v.DHEADER_ext='.di' - v.DFLAGS_d_with_header=['-H','-Hf'] - v.D_HDR_F='%s' - v.LINKFLAGS=[] - v.DFLAGS_dshlib=['-relocation-model=pic'] + """ + Sets the D flags required by *ldc2* + """ + v = conf.env + + v.D_SRC_F = ['-c'] + v.D_TGT_F = '-of%s' + + v.D_LINKER = v.D + v.DLNK_SRC_F = '' + v.DLNK_TGT_F = '-of%s' + v.DINC_ST = '-I%s' + + v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' + v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s' + v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s' + + v.LINKFLAGS_dshlib = ['-L-shared'] + + v.DHEADER_ext = '.di' + v.DFLAGS_d_with_header = ['-H', '-Hf'] + v.D_HDR_F = '%s' + + v.LINKFLAGS = [] + v.DFLAGS_dshlib = ['-relocation-model=pic'] + def configure(conf): + """ + Configuration for *ldc2* + """ conf.find_ldc2() conf.load('ar') conf.load('d') conf.common_flags_ldc2() conf.d_platform_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/lua.py lilv-0.24.6/waflib/Tools/lua.py --- lilv-0.24.4~dfsg0/waflib/Tools/lua.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/lua.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,18 +1,38 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Sebastian Schlingmann, 2008 +# Thomas Nagy, 2008-2018 (ita) + +""" +Lua support. + +Compile *.lua* files into *.luac*:: + + def configure(conf): + conf.load('lua') + conf.env.LUADIR = '/usr/local/share/myapp/scripts/' + def build(bld): + bld(source='foo.lua') +""" from waflib.TaskGen import extension from waflib import Task + @extension('.lua') -def add_lua(self,node): - tsk=self.create_task('luac',node,node.change_ext('.luac')) - inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None) +def add_lua(self, node): + tsk = self.create_task('luac', node, node.change_ext('.luac')) + inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None) if inst_to: - self.add_install_files(install_to=inst_to,install_from=tsk.outputs) + self.add_install_files(install_to=inst_to, install_from=tsk.outputs) return tsk + class luac(Task.Task): - run_str='${LUAC} -s -o ${TGT} ${SRC}' - color='PINK' + run_str = '${LUAC} -s -o ${TGT} ${SRC}' + color = 'PINK' + def configure(conf): - conf.find_program('luac',var='LUAC') + """ + Detect the luac compiler and set *conf.env.LUAC* + """ + conf.find_program('luac', var='LUAC') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/md5_tstamp.py lilv-0.24.6/waflib/Tools/md5_tstamp.py --- lilv-0.24.4~dfsg0/waflib/Tools/md5_tstamp.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/md5_tstamp.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,24 +1,41 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file -import os,stat -from waflib import Utils,Build,Node -STRONGEST=True +""" +Re-calculate md5 hashes of files only when the file time have changed:: + + def options(opt): + opt.load('md5_tstamp') + +The hashes can also reflect either the file contents (STRONGEST=True) or the +file time and file size. + +The performance benefits of this module are usually insignificant. +""" + +import os, stat +from waflib import Utils, Build, Node + +STRONGEST = True + Build.SAVED_ATTRS.append('hashes_md5_tstamp') def h_file(self): - filename=self.abspath() - st=os.stat(filename) - cache=self.ctx.hashes_md5_tstamp - if filename in cache and cache[filename][0]==st.st_mtime: + filename = self.abspath() + st = os.stat(filename) + + cache = self.ctx.hashes_md5_tstamp + if filename in cache and cache[filename][0] == st.st_mtime: return cache[filename][1] + if STRONGEST: - ret=Utils.h_file(filename) + ret = Utils.h_file(filename) else: if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('Not a file') - ret=Utils.md5(str((st.st_mtime,st.st_size)).encode()).digest() - cache[filename]=(st.st_mtime,ret) + ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest() + + cache[filename] = (st.st_mtime, ret) return ret -h_file.__doc__=Node.Node.h_file.__doc__ -Node.Node.h_file=h_file +h_file.__doc__ = Node.Node.h_file.__doc__ +Node.Node.h_file = h_file + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/msvc.py lilv-0.24.6/waflib/Tools/msvc.py --- lilv-0.24.4~dfsg0/waflib/Tools/msvc.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/msvc.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,13 +1,71 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Carlos Rafael Giani, 2006 (dv) +# Tamas Pal, 2007 (folti) +# Nicolas Mercier, 2009 +# Matt Clarkson, 2012 + +""" +Microsoft Visual C++/Intel C++ compiler support + +If you get detection problems, first try any of the following:: + + chcp 65001 + set PYTHONIOENCODING=... + set PYTHONLEGACYWINDOWSSTDIO=1 + +Usage:: + + $ waf configure --msvc_version="msvc 10.0,msvc 9.0" --msvc_target="x64" + +or:: + + def configure(conf): + conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0'] + conf.env.MSVC_TARGETS = ['x64'] + conf.load('msvc') + +or:: + + def configure(conf): + conf.load('msvc', funs='no_autodetect') + conf.check_lib_msvc('gdi32') + conf.check_libs_msvc('kernel32 user32') + def build(bld): + tg = bld.program(source='main.c', target='app', use='KERNEL32 USER32 GDI32') + +Platforms and targets will be tested in the order they appear; +the first good configuration will be used. + +To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option +or set ``conf.env.MSVC_LAZY_AUTODETECT=False``. + +Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm + +Compilers supported: + +* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017) +* wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0 +* icl => Intel compiler, versions 9, 10, 11, 13 +* winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now) +* Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i) +* PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i) + +To use WAF in a VS2008 Make file project (see http://code.google.com/p/waf/issues/detail?id=894) +You may consider to set the environment variable "VS_UNICODE_OUTPUT" to nothing before calling waf. +So in your project settings use something like 'cmd.exe /C "set VS_UNICODE_OUTPUT=& set PYTHONUNBUFFERED=true & waf build"'. +cmd.exe /C "chcp 1252 & set PYTHONUNBUFFERED=true && set && waf configure" +Setting PYTHONUNBUFFERED gives the unbuffered output. +""" + +import os, sys, re, traceback +from waflib import Utils, Logs, Options, Errors +from waflib.TaskGen import after_method, feature -import os,sys,re,traceback -from waflib import Utils,Logs,Options,Errors -from waflib.TaskGen import after_method,feature from waflib.Configure import conf -from waflib.Tools import ccroot,c,cxx,ar -g_msvc_systemlibs=''' +from waflib.Tools import ccroot, c, cxx, ar + +g_msvc_systemlibs = ''' aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d @@ -27,61 +85,100 @@ version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp '''.split() -all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm'),('x86_arm64','arm64'),('amd64_x86','x86'),('amd64_arm','arm'),('amd64_arm64','arm64')] -all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')] -all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] +"""importlibs provided by MSVC/Platform SDK. Do NOT search them""" + +all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), + ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'), + ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ] +"""List of msvc platforms""" + +all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ] +"""List of wince platforms""" + +all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')] +"""List of icl platforms""" + def options(opt): - opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='') - opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='') - opt.add_option('--no-msvc-lazy',action='store_false',help='lazily check msvc target environments',default=True,dest='msvc_lazy') -@conf -def setup_msvc(conf,versiondict): - platforms=getattr(Options.options,'msvc_targets','').split(',') - if platforms==['']: - platforms=Utils.to_list(conf.env.MSVC_TARGETS)or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] - desired_versions=getattr(Options.options,'msvc_version','').split(',') - if desired_versions==['']: - desired_versions=conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys()))) - lazy_detect=getattr(Options.options,'msvc_lazy',True) + opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='') + opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='') + opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy') + +@conf +def setup_msvc(conf, versiondict): + """ + Checks installed compilers and targets and returns the first combination from the user's + options, env, or the global supported lists that checks. + + :param versiondict: dict(platform -> dict(architecture -> configuration)) + :type versiondict: dict(string -> dict(string -> target_compiler) + :return: the compiler, revision, path, include dirs, library paths and target architecture + :rtype: tuple of strings + """ + platforms = getattr(Options.options, 'msvc_targets', '').split(',') + if platforms == ['']: + platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] + desired_versions = getattr(Options.options, 'msvc_version', '').split(',') + if desired_versions == ['']: + desired_versions = conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys()))) + + # Override lazy detection by evaluating after the fact. + lazy_detect = getattr(Options.options, 'msvc_lazy', True) if conf.env.MSVC_LAZY_AUTODETECT is False: - lazy_detect=False + lazy_detect = False + if not lazy_detect: for val in versiondict.values(): for arch in list(val.keys()): - cfg=val[arch] + cfg = val[arch] cfg.evaluate() if not cfg.is_valid: del val[arch] - conf.env.MSVC_INSTALLED_VERSIONS=versiondict + conf.env.MSVC_INSTALLED_VERSIONS = versiondict + for version in desired_versions: - Logs.debug('msvc: detecting %r - %r',version,desired_versions) + Logs.debug('msvc: detecting %r - %r', version, desired_versions) try: - targets=versiondict[version] + targets = versiondict[version] except KeyError: continue - seen=set() + + seen = set() for arch in platforms: if arch in seen: continue else: seen.add(arch) try: - cfg=targets[arch] + cfg = targets[arch] except KeyError: continue + cfg.evaluate() if cfg.is_valid: - compiler,revision=version.rsplit(' ',1) + compiler,revision = version.rsplit(' ', 1) return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu - conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r'%(desired_versions,list(versiondict.keys()))) + conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys()))) + @conf -def get_msvc_version(conf,compiler,version,target,vcvars): - Logs.debug('msvc: get_msvc_version: %r %r %r',compiler,version,target) +def get_msvc_version(conf, compiler, version, target, vcvars): + """ + Checks that an installed compiler actually runs and uses vcvars to obtain the + environment needed by the compiler. + + :param compiler: compiler type, for looking up the executable name + :param version: compiler version, for debugging only + :param target: target architecture + :param vcvars: batch file to run to check the environment + :return: the location of the compiler executable, the location of include dirs, and the library paths + :rtype: tuple of strings + """ + Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) + try: - conf.msvc_cnt+=1 + conf.msvc_cnt += 1 except AttributeError: - conf.msvc_cnt=1 - batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt) + conf.msvc_cnt = 1 + batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt) batfile.write("""@echo off set INCLUDE= set LIB= @@ -89,290 +186,363 @@ echo PATH=%%PATH%% echo INCLUDE=%%INCLUDE%% echo LIB=%%LIB%%;%%LIBPATH%% -"""%(vcvars,target)) - sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()]) - lines=sout.splitlines() +""" % (vcvars,target)) + sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()]) + lines = sout.splitlines() + if not lines[0]: lines.pop(0) - MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None + + MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None for line in lines: if line.startswith('PATH='): - path=line[5:] - MSVC_PATH=path.split(';') + path = line[5:] + MSVC_PATH = path.split(';') elif line.startswith('INCLUDE='): - MSVC_INCDIR=[i for i in line[8:].split(';')if i] + MSVC_INCDIR = [i for i in line[8:].split(';') if i] elif line.startswith('LIB='): - MSVC_LIBDIR=[i for i in line[4:].split(';')if i] - if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): + MSVC_LIBDIR = [i for i in line[4:].split(';') if i] + if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR): conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') - env=dict(os.environ) - env.update(PATH=path) - compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) - cxx=conf.find_program(compiler_name,path_list=MSVC_PATH) - if'CL'in env: + + # Check if the compiler is usable at all. + # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. + env = dict(os.environ) + env.update(PATH = path) + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + cxx = conf.find_program(compiler_name, path_list=MSVC_PATH) + + # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically. + if 'CL' in env: del(env['CL']) + try: - conf.cmd_and_log(cxx+['/help'],env=env) + conf.cmd_and_log(cxx + ['/help'], env=env) except UnicodeError: - st=traceback.format_exc() + st = traceback.format_exc() if conf.logger: conf.logger.error(st) conf.fatal('msvc: Unicode error - check the code page?') except Exception as e: - Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s',compiler,version,target,str(e)) + Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e)) conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)') else: - Logs.debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target) + Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target) finally: - conf.env[compiler_name]='' - return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) + conf.env[compiler_name] = '' + + return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR) + def gather_wince_supported_platforms(): - supported_wince_platforms=[] + """ + Checks SmartPhones SDKs + + :param versions: list to modify + :type versions: list + """ + supported_wince_platforms = [] try: - ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') + ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') except OSError: try: - ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') + ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') except OSError: - ce_sdk='' + ce_sdk = '' if not ce_sdk: return supported_wince_platforms - index=0 + + index = 0 while 1: try: - sdk_device=Utils.winreg.EnumKey(ce_sdk,index) - sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device) + sdk_device = Utils.winreg.EnumKey(ce_sdk, index) + sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device) except OSError: break - index+=1 + index += 1 try: - path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir') + path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir') except OSError: try: - path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation') + path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation') except OSError: continue - path,xml=os.path.split(path) - path=str(path) - path,device=os.path.split(path) + path,xml = os.path.split(path) + path = str(path) + path,device = os.path.split(path) if not device: - path,device=os.path.split(path) - platforms=[] + path,device = os.path.split(path) + platforms = [] for arch,compiler in all_wince_platforms: - if os.path.isdir(os.path.join(path,device,'Lib',arch)): - platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch))) + if os.path.isdir(os.path.join(path, device, 'Lib', arch)): + platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch))) if platforms: - supported_wince_platforms.append((device,platforms)) + supported_wince_platforms.append((device, platforms)) return supported_wince_platforms + def gather_msvc_detected_versions(): - version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$') - detected_versions=[] - for vcver,vcvar in(('VCExpress','Exp'),('VisualStudio','')): - prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver + #Detected MSVC versions! + version_pattern = re.compile(r'^(\d\d?\.\d\d?)(Exp)?$') + detected_versions = [] + for vcver,vcvar in (('VCExpress','Exp'), ('VisualStudio','')): + prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix) except OSError: - prefix='SOFTWARE\\Microsoft\\'+vcver + prefix = 'SOFTWARE\\Microsoft\\' + vcver try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix) except OSError: continue - index=0 + + index = 0 while 1: try: - version=Utils.winreg.EnumKey(all_versions,index) + version = Utils.winreg.EnumKey(all_versions, index) except OSError: break - index+=1 - match=version_pattern.match(version) + index += 1 + match = version_pattern.match(version) if match: - versionnumber=float(match.group(1)) + versionnumber = float(match.group(1)) else: continue - detected_versions.append((versionnumber,version+vcvar,prefix+'\\'+version)) + detected_versions.append((versionnumber, version+vcvar, prefix+'\\'+version)) def fun(tup): return tup[0] - detected_versions.sort(key=fun) + + detected_versions.sort(key = fun) return detected_versions + class target_compiler(object): - def __init__(self,ctx,compiler,cpu,version,bat_target,bat,callback=None): - self.conf=ctx - self.name=None - self.is_valid=False - self.is_done=False - self.compiler=compiler - self.cpu=cpu - self.version=version - self.bat_target=bat_target - self.bat=bat - self.callback=callback + """ + Wrap a compiler configuration; call evaluate() to determine + whether the configuration is usable. + """ + def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None): + """ + :param ctx: configuration context to use to eventually get the version environment + :param compiler: compiler name + :param cpu: target cpu + :param version: compiler version number + :param bat_target: ? + :param bat: path to the batch file to run + """ + self.conf = ctx + self.name = None + self.is_valid = False + self.is_done = False + + self.compiler = compiler + self.cpu = cpu + self.version = version + self.bat_target = bat_target + self.bat = bat + self.callback = callback + def evaluate(self): if self.is_done: return - self.is_done=True + self.is_done = True try: - vs=self.conf.get_msvc_version(self.compiler,self.version,self.bat_target,self.bat) + vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat) except Errors.ConfigurationError: - self.is_valid=False + self.is_valid = False return if self.callback: - vs=self.callback(self,vs) - self.is_valid=True - (self.bindirs,self.incdirs,self.libdirs)=vs + vs = self.callback(self, vs) + self.is_valid = True + (self.bindirs, self.incdirs, self.libdirs) = vs + def __str__(self): - return str((self.compiler,self.cpu,self.version,self.bat_target,self.bat)) + return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat)) + def __repr__(self): - return repr((self.compiler,self.cpu,self.version,self.bat_target,self.bat)) + return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat)) + @conf -def gather_wsdk_versions(conf,versions): - version_pattern=re.compile('^v..?.?\...?.?') +def gather_wsdk_versions(conf, versions): + """ + Use winreg to add the msvc versions to the input list + + :param versions: list to modify + :type versions: list + """ + version_pattern = re.compile(r'^v..?.?\...?.?') try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') except OSError: try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') except OSError: return - index=0 + index = 0 while 1: try: - version=Utils.winreg.EnumKey(all_versions,index) + version = Utils.winreg.EnumKey(all_versions, index) except OSError: break - index+=1 + index += 1 if not version_pattern.match(version): continue try: - msvc_version=Utils.winreg.OpenKey(all_versions,version) - path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') + msvc_version = Utils.winreg.OpenKey(all_versions, version) + path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') except OSError: continue - if path and os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')): - targets={} + if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')): + targets = {} for target,arch in all_msvc_platforms: - targets[target]=target_compiler(conf,'wsdk',arch,version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')) - versions['wsdk '+version[1:]]=targets + targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')) + versions['wsdk ' + version[1:]] = targets + @conf -def gather_msvc_targets(conf,versions,version,vc_path): - targets={} - if os.path.isfile(os.path.join(vc_path,'VC','Auxiliary','Build','vcvarsall.bat')): +def gather_msvc_targets(conf, versions, version, vc_path): + #Looking for normal MSVC compilers! + targets = {} + + if os.path.isfile(os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')): for target,realtarget in all_msvc_platforms[::-1]: - targets[target]=target_compiler(conf,'msvc',realtarget,version,target,os.path.join(vc_path,'VC','Auxiliary','Build','vcvarsall.bat')) - elif os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')): + targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')) + elif os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')): for target,realtarget in all_msvc_platforms[::-1]: - targets[target]=target_compiler(conf,'msvc',realtarget,version,target,os.path.join(vc_path,'vcvarsall.bat')) - elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')): - targets['x86']=target_compiler(conf,'msvc','x86',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat')) - elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')): - targets['x86']=target_compiler(conf,'msvc','x86',version,'',os.path.join(vc_path,'Bin','vcvars32.bat')) + targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'vcvarsall.bat')) + elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')): + targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')) + elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')): + targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat')) if targets: - versions['msvc %s'%version]=targets + versions['msvc %s' % version] = targets + @conf -def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms): +def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms): + #Looking for Win CE compilers! for device,platforms in supported_platforms: - targets={} + targets = {} for platform,compiler,include,lib in platforms: - winCEpath=os.path.join(vc_path,'ce') + winCEpath = os.path.join(vc_path, 'ce') if not os.path.isdir(winCEpath): continue - if os.path.isdir(os.path.join(winCEpath,'lib',platform)): - bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)] - incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include] - libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib] - def combine_common(obj,compiler_env): - (common_bindirs,_1,_2)=compiler_env - return(bindirs+common_bindirs,incdirs,libdirs) - targets[platform]=target_compiler(conf,'msvc',platform,version,'x86',vsvars,combine_common) + + if os.path.isdir(os.path.join(winCEpath, 'lib', platform)): + bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include] + libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib] + def combine_common(obj, compiler_env): + # TODO this is likely broken, remove in waf 2.1 + (common_bindirs,_1,_2) = compiler_env + return (bindirs + common_bindirs, incdirs, libdirs) + targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common) if targets: - versions[device+' '+version]=targets + versions[device + ' ' + version] = targets + @conf -def gather_winphone_targets(conf,versions,version,vc_path,vsvars): - targets={} +def gather_winphone_targets(conf, versions, version, vc_path, vsvars): + #Looking for WinPhone compilers + targets = {} for target,realtarget in all_msvc_platforms[::-1]: - targets[target]=target_compiler(conf,'winphone',realtarget,version,target,vsvars) + targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars) if targets: - versions['winphone '+version]=targets + versions['winphone ' + version] = targets + @conf -def gather_vswhere_versions(conf,versions): +def gather_vswhere_versions(conf, versions): try: import json except ImportError: Logs.error('Visual Studio 2017 detection requires Python 2.6') return - prg_path=os.environ.get('ProgramFiles(x86)',os.environ.get('ProgramFiles','C:\\Program Files (x86)')) - vswhere=os.path.join(prg_path,'Microsoft Visual Studio','Installer','vswhere.exe') - args=[vswhere,'-products','*','-legacy','-format','json'] + + prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)')) + + vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe') + args = [vswhere, '-products', '*', '-legacy', '-format', 'json'] try: - txt=conf.cmd_and_log(args) + txt = conf.cmd_and_log(args) except Errors.WafError as e: - Logs.debug('msvc: vswhere.exe failed %s',e) + Logs.debug('msvc: vswhere.exe failed %s', e) return - if sys.version_info[0]<3: - txt=txt.decode(Utils.console_encoding()) - arr=json.loads(txt) - arr.sort(key=lambda x:x['installationVersion']) + + if sys.version_info[0] < 3: + txt = txt.decode(Utils.console_encoding()) + + arr = json.loads(txt) + arr.sort(key=lambda x: x['installationVersion']) for entry in arr: - ver=entry['installationVersion'] - ver=str('.'.join(ver.split('.')[:2])) - path=str(os.path.abspath(entry['installationPath'])) - if os.path.exists(path)and('msvc %s'%ver)not in versions: - conf.gather_msvc_targets(versions,ver,path) -@conf -def gather_msvc_versions(conf,versions): - vc_paths=[] - for(v,version,reg)in gather_msvc_detected_versions(): + ver = entry['installationVersion'] + ver = str('.'.join(ver.split('.')[:2])) + path = str(os.path.abspath(entry['installationPath'])) + if os.path.exists(path) and ('msvc %s' % ver) not in versions: + conf.gather_msvc_targets(versions, ver, path) + +@conf +def gather_msvc_versions(conf, versions): + vc_paths = [] + for (v,version,reg) in gather_msvc_detected_versions(): try: try: - msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC") + msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC") except OSError: - msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++") - path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir') + msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++") + path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir') except OSError: try: - msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,"SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7") - path,type=Utils.winreg.QueryValueEx(msvc_version,version) + msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7") + path,type = Utils.winreg.QueryValueEx(msvc_version, version) except OSError: continue else: - vc_paths.append((version,os.path.abspath(str(path)))) + vc_paths.append((version, os.path.abspath(str(path)))) continue else: - vc_paths.append((version,os.path.abspath(str(path)))) - wince_supported_platforms=gather_wince_supported_platforms() + vc_paths.append((version, os.path.abspath(str(path)))) + + wince_supported_platforms = gather_wince_supported_platforms() + for version,vc_path in vc_paths: - vs_path=os.path.dirname(vc_path) - vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat') + vs_path = os.path.dirname(vc_path) + vsvars = os.path.join(vs_path, 'Common7', 'Tools', 'vsvars32.bat') if wince_supported_platforms and os.path.isfile(vsvars): - conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms) + conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms) + + # WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path. + # Stop after one is found. for version,vc_path in vc_paths: - vs_path=os.path.dirname(vc_path) - vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat') + vs_path = os.path.dirname(vc_path) + vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat') if os.path.isfile(vsvars): - conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars) + conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars) break + for version,vc_path in vc_paths: - vs_path=os.path.dirname(vc_path) - conf.gather_msvc_targets(versions,version,vc_path) + vs_path = os.path.dirname(vc_path) + conf.gather_msvc_targets(versions, version, vc_path) + @conf -def gather_icl_versions(conf,versions): - version_pattern=re.compile('^...?.?\....?.?') +def gather_icl_versions(conf, versions): + """ + Checks ICL compilers + + :param versions: list to modify + :type versions: list + """ + version_pattern = re.compile(r'^...?.?\....?.?') try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') except OSError: try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++') except OSError: return - index=0 + index = 0 while 1: try: - version=Utils.winreg.EnumKey(all_versions,index) + version = Utils.winreg.EnumKey(all_versions, index) except OSError: break - index+=1 + index += 1 if not version_pattern.match(version): continue - targets={} + targets = {} for target,arch in all_icl_platforms: if target=='intel64': targetDir='EM64T_NATIVE' @@ -387,39 +557,46 @@ else: batch_file=os.path.join(path,'bin','iclvars.bat') if os.path.isfile(batch_file): - targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file) + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) for target,arch in all_icl_platforms: try: - icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target) + path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir') except OSError: continue else: batch_file=os.path.join(path,'bin','iclvars.bat') if os.path.isfile(batch_file): - targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file) - major=version[0:2] - versions['intel '+major]=targets + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) + major = version[0:2] + versions['intel ' + major] = targets + @conf -def gather_intel_composer_versions(conf,versions): - version_pattern=re.compile('^...?.?\...?.?.?') +def gather_intel_composer_versions(conf, versions): + """ + Checks ICL compilers that are part of Intel Composer Suites + + :param versions: list to modify + :type versions: list + """ + version_pattern = re.compile(r'^...?.?\...?.?.?') try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites') except OSError: try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites') + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites') except OSError: return - index=0 + index = 0 while 1: try: - version=Utils.winreg.EnumKey(all_versions,index) + version = Utils.winreg.EnumKey(all_versions, index) except OSError: break - index+=1 + index += 1 if not version_pattern.match(version): continue - targets={} + targets = {} for target,arch in all_icl_platforms: if target=='intel64': targetDir='EM64T_NATIVE' @@ -427,13 +604,13 @@ targetDir=target try: try: - defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) + defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) except OSError: - if targetDir=='EM64T_NATIVE': - defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') + if targetDir == 'EM64T_NATIVE': + defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') else: raise - uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey') + uid,type = Utils.winreg.QueryValueEx(defaults, 'SubKey') Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') @@ -442,104 +619,166 @@ else: batch_file=os.path.join(path,'bin','iclvars.bat') if os.path.isfile(batch_file): - targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file) - compilervars_warning_attr='_compilervars_warning_key' - if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True): - setattr(conf,compilervars_warning_attr,False) - patch_url='http://software.intel.com/en-us/forums/topic/328487' - compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat') - for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'): + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) + # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 + # http://software.intel.com/en-us/forums/topic/328487 + compilervars_warning_attr = '_compilervars_warning_key' + if version[0:2] == '13' and getattr(conf, compilervars_warning_attr, True): + setattr(conf, compilervars_warning_attr, False) + patch_url = 'http://software.intel.com/en-us/forums/topic/328487' + compilervars_arch = os.path.join(path, 'bin', 'compilervars_arch.bat') + for vscomntool in ('VS110COMNTOOLS', 'VS100COMNTOOLS'): if vscomntool in os.environ: - vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe' - dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe' - if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)): - Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url)) - major=version[0:2] - versions['intel '+major]=targets + vs_express_path = os.environ[vscomntool] + r'..\IDE\VSWinExpress.exe' + dev_env_path = os.environ[vscomntool] + r'..\IDE\devenv.exe' + if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and + not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)): + Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ' + '(VSWinExpress.exe) but it does not seem to be installed at %r. ' + 'The intel command line set up will fail to configure unless the file %r' + 'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url)) + major = version[0:2] + versions['intel ' + major] = targets + @conf def detect_msvc(self): return self.setup_msvc(self.get_msvc_versions()) + @conf def get_msvc_versions(self): - dct=Utils.ordered_iter_dict() + """ + :return: platform to compiler configurations + :rtype: dict + """ + dct = Utils.ordered_iter_dict() self.gather_icl_versions(dct) self.gather_intel_composer_versions(dct) self.gather_wsdk_versions(dct) self.gather_msvc_versions(dct) self.gather_vswhere_versions(dct) - Logs.debug('msvc: detected versions %r',list(dct.keys())) + Logs.debug('msvc: detected versions %r', list(dct.keys())) return dct + @conf -def find_lt_names_msvc(self,libname,is_static=False): - lt_names=['lib%s.la'%libname,'%s.la'%libname,] +def find_lt_names_msvc(self, libname, is_static=False): + """ + Win32/MSVC specific code to glean out information from libtool la files. + this function is not attached to the task_gen class. Returns a triplet: + (library absolute path, library name without extension, whether the library is static) + """ + lt_names=[ + 'lib%s.la' % libname, + '%s.la' % libname, + ] + for path in self.env.LIBPATH: for la in lt_names: laf=os.path.join(path,la) dll=None if os.path.exists(laf): - ltdict=Utils.read_la_file(laf) + ltdict = Utils.read_la_file(laf) lt_libdir=None - if ltdict.get('libdir',''): - lt_libdir=ltdict['libdir'] - if not is_static and ltdict.get('library_names',''): + if ltdict.get('libdir', ''): + lt_libdir = ltdict['libdir'] + if not is_static and ltdict.get('library_names', ''): dllnames=ltdict['library_names'].split() dll=dllnames[0].lower() - dll=re.sub('\.dll$','',dll) - return(lt_libdir,dll,False) - elif ltdict.get('old_library',''): + dll=re.sub(r'\.dll$', '', dll) + return (lt_libdir, dll, False) + elif ltdict.get('old_library', ''): olib=ltdict['old_library'] if os.path.exists(os.path.join(path,olib)): - return(path,olib,True) - elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)): - return(lt_libdir,olib,True) + return (path, olib, True) + elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)): + return (lt_libdir, olib, True) else: - return(None,olib,True) + return (None, olib, True) else: - raise self.errors.WafError('invalid libtool object file: %s'%laf) - return(None,None,None) + raise self.errors.WafError('invalid libtool object file: %s' % laf) + return (None, None, None) + @conf -def libname_msvc(self,libname,is_static=False): - lib=libname.lower() - lib=re.sub('\.lib$','',lib) +def libname_msvc(self, libname, is_static=False): + lib = libname.lower() + lib = re.sub(r'\.lib$','',lib) + if lib in g_msvc_systemlibs: return lib + lib=re.sub('^lib','',lib) - if lib=='m': + + if lib == 'm': return None - (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static) - if lt_path!=None and lt_libname!=None: + + (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static) + + if lt_path != None and lt_libname != None: if lt_static: + # file existence check has been made by find_lt_names return os.path.join(lt_path,lt_libname) - if lt_path!=None: - _libpaths=[lt_path]+self.env.LIBPATH + + if lt_path != None: + _libpaths = [lt_path] + self.env.LIBPATH else: - _libpaths=self.env.LIBPATH - static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,] - dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,] + _libpaths = self.env.LIBPATH + + static_libs=[ + 'lib%ss.lib' % lib, + 'lib%s.lib' % lib, + '%ss.lib' % lib, + '%s.lib' %lib, + ] + + dynamic_libs=[ + 'lib%s.dll.lib' % lib, + 'lib%s.dll.a' % lib, + '%s.dll.lib' % lib, + '%s.dll.a' % lib, + 'lib%s_d.lib' % lib, + '%s_d.lib' % lib, + '%s.lib' %lib, + ] + libnames=static_libs if not is_static: - libnames=dynamic_libs+static_libs + libnames=dynamic_libs + static_libs + for path in _libpaths: for libn in libnames: - if os.path.exists(os.path.join(path,libn)): - Logs.debug('msvc: lib found: %s',os.path.join(path,libn)) - return re.sub('\.lib$','',libn) - self.fatal('The library %r could not be found'%libname) - return re.sub('\.lib$','',libname) + if os.path.exists(os.path.join(path, libn)): + Logs.debug('msvc: lib found: %s', os.path.join(path,libn)) + return re.sub(r'\.lib$', '',libn) + + #if no lib can be found, just return the libname as msvc expects it + self.fatal('The library %r could not be found' % libname) + return re.sub(r'\.lib$', '', libname) + @conf -def check_lib_msvc(self,libname,is_static=False,uselib_store=None): - libn=self.libname_msvc(libname,is_static) +def check_lib_msvc(self, libname, is_static=False, uselib_store=None): + """ + Ideally we should be able to place the lib in the right env var, either STLIB or LIB, + but we don't distinguish static libs from shared libs. + This is ok since msvc doesn't have any special linker flag to select static libs (no env.STLIB_MARKER) + """ + libn = self.libname_msvc(libname, is_static) + if not uselib_store: - uselib_store=libname.upper() - if False and is_static: - self.env['STLIB_'+uselib_store]=[libn] + uselib_store = libname.upper() + + if False and is_static: # disabled + self.env['STLIB_' + uselib_store] = [libn] else: - self.env['LIB_'+uselib_store]=[libn] + self.env['LIB_' + uselib_store] = [libn] + @conf -def check_libs_msvc(self,libnames,is_static=False): +def check_libs_msvc(self, libnames, is_static=False): for libname in Utils.to_list(libnames): - self.check_lib_msvc(libname,is_static) + self.check_lib_msvc(libname, is_static) + def configure(conf): + """ + Configuration methods to call for detecting msvc + """ conf.autodetect(True) conf.find_msvc() conf.msvc_common_flags() @@ -549,156 +788,233 @@ conf.cxx_add_flags() conf.link_add_flags() conf.visual_studio_add_flags() + @conf def no_autodetect(conf): - conf.env.NO_MSVC_DETECT=1 + conf.env.NO_MSVC_DETECT = 1 configure(conf) + @conf -def autodetect(conf,arch=False): - v=conf.env +def autodetect(conf, arch=False): + v = conf.env if v.NO_MSVC_DETECT: return - compiler,version,path,includes,libdirs,cpu=conf.detect_msvc() + + compiler, version, path, includes, libdirs, cpu = conf.detect_msvc() if arch: - v.DEST_CPU=cpu - v.PATH=path - v.INCLUDES=includes - v.LIBPATH=libdirs - v.MSVC_COMPILER=compiler + v.DEST_CPU = cpu + + v.PATH = path + v.INCLUDES = includes + v.LIBPATH = libdirs + v.MSVC_COMPILER = compiler try: - v.MSVC_VERSION=float(version) + v.MSVC_VERSION = float(version) except ValueError: - v.MSVC_VERSION=float(version[:-3]) -def _get_prog_names(conf,compiler): - if compiler=='intel': - compiler_name='ICL' - linker_name='XILINK' - lib_name='XILIB' + v.MSVC_VERSION = float(version[:-3]) + +def _get_prog_names(conf, compiler): + if compiler == 'intel': + compiler_name = 'ICL' + linker_name = 'XILINK' + lib_name = 'XILIB' else: - compiler_name='CL' - linker_name='LINK' - lib_name='LIB' - return compiler_name,linker_name,lib_name + # assumes CL.exe + compiler_name = 'CL' + linker_name = 'LINK' + lib_name = 'LIB' + return compiler_name, linker_name, lib_name + @conf def find_msvc(conf): - if sys.platform=='cygwin': + """Due to path format limitations, limit operation only to native Win32. Yeah it sucks.""" + if sys.platform == 'cygwin': conf.fatal('MSVC module does not work under cygwin Python!') - v=conf.env - path=v.PATH - compiler=v.MSVC_COMPILER - version=v.MSVC_VERSION - compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) - v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11) - cxx=conf.find_program(compiler_name,var='CXX',path_list=path) - env=dict(conf.environ) + + # the autodetection is supposed to be performed before entering in this method + v = conf.env + path = v.PATH + compiler = v.MSVC_COMPILER + version = v.MSVC_VERSION + + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + v.MSVC_MANIFEST = (compiler == 'msvc' and version >= 8) or (compiler == 'wsdk' and version >= 6) or (compiler == 'intel' and version >= 11) + + # compiler + cxx = conf.find_program(compiler_name, var='CXX', path_list=path) + + # before setting anything, check if the compiler is really msvc + env = dict(conf.environ) if path: - env.update(PATH=';'.join(path)) - if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env): + env.update(PATH = ';'.join(path)) + if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env): conf.fatal('the msvc compiler could not be identified') - v.CC=v.CXX=cxx - v.CC_NAME=v.CXX_NAME='msvc' + + # c/c++ compiler + v.CC = v.CXX = cxx + v.CC_NAME = v.CXX_NAME = 'msvc' + + # linker if not v.LINK_CXX: - conf.find_program(linker_name,path_list=path,errmsg='%s was not found (linker)'%linker_name,var='LINK_CXX') + conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name, var='LINK_CXX') + if not v.LINK_CC: - v.LINK_CC=v.LINK_CXX + v.LINK_CC = v.LINK_CXX + + # staticlib linker if not v.AR: - stliblink=conf.find_program(lib_name,path_list=path,var='AR') + stliblink = conf.find_program(lib_name, path_list=path, var='AR') if not stliblink: return - v.ARFLAGS=['/nologo'] + v.ARFLAGS = ['/nologo'] + + # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later if v.MSVC_MANIFEST: - conf.find_program('MT',path_list=path,var='MT') - v.MTFLAGS=['/nologo'] + conf.find_program('MT', path_list=path, var='MT') + v.MTFLAGS = ['/nologo'] + try: conf.load('winres') except Errors.ConfigurationError: Logs.warn('Resource compiler not found. Compiling resource file is disabled') + @conf def visual_studio_add_flags(self): - v=self.env + """visual studio flags found in the system environment""" + v = self.env if self.environ.get('INCLUDE'): - v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x]) + v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S' if self.environ.get('LIB'): - v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x]) + v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x]) + @conf def msvc_common_flags(conf): - v=conf.env - v.DEST_BINFMT='pe' - v.append_value('CFLAGS',['/nologo']) - v.append_value('CXXFLAGS',['/nologo']) - v.append_value('LINKFLAGS',['/nologo']) - v.DEFINES_ST='/D%s' - v.CC_SRC_F='' - v.CC_TGT_F=['/c','/Fo'] - v.CXX_SRC_F='' - v.CXX_TGT_F=['/c','/Fo'] - if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6): - v.CC_TGT_F=['/FC']+v.CC_TGT_F - v.CXX_TGT_F=['/FC']+v.CXX_TGT_F - v.CPPPATH_ST='/I%s' - v.AR_TGT_F=v.CCLNK_TGT_F=v.CXXLNK_TGT_F='/OUT:' - v.CFLAGS_CRT_MULTITHREADED=v.CXXFLAGS_CRT_MULTITHREADED=['/MT'] - v.CFLAGS_CRT_MULTITHREADED_DLL=v.CXXFLAGS_CRT_MULTITHREADED_DLL=['/MD'] - v.CFLAGS_CRT_MULTITHREADED_DBG=v.CXXFLAGS_CRT_MULTITHREADED_DBG=['/MTd'] - v.CFLAGS_CRT_MULTITHREADED_DLL_DBG=v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG=['/MDd'] - v.LIB_ST='%s.lib' - v.LIBPATH_ST='/LIBPATH:%s' - v.STLIB_ST='%s.lib' - v.STLIBPATH_ST='/LIBPATH:%s' + """ + Setup the flags required for executing the msvc compiler + """ + v = conf.env + + v.DEST_BINFMT = 'pe' + v.append_value('CFLAGS', ['/nologo']) + v.append_value('CXXFLAGS', ['/nologo']) + v.append_value('LINKFLAGS', ['/nologo']) + v.DEFINES_ST = '/D%s' + + v.CC_SRC_F = '' + v.CC_TGT_F = ['/c', '/Fo'] + v.CXX_SRC_F = '' + v.CXX_TGT_F = ['/c', '/Fo'] + + if (v.MSVC_COMPILER == 'msvc' and v.MSVC_VERSION >= 8) or (v.MSVC_COMPILER == 'wsdk' and v.MSVC_VERSION >= 6): + v.CC_TGT_F = ['/FC'] + v.CC_TGT_F + v.CXX_TGT_F = ['/FC'] + v.CXX_TGT_F + + v.CPPPATH_ST = '/I%s' # template for adding include paths + + v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:' + + # CRT specific flags + v.CFLAGS_CRT_MULTITHREADED = v.CXXFLAGS_CRT_MULTITHREADED = ['/MT'] + v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD'] + + v.CFLAGS_CRT_MULTITHREADED_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DBG = ['/MTd'] + v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = ['/MDd'] + + v.LIB_ST = '%s.lib' + v.LIBPATH_ST = '/LIBPATH:%s' + v.STLIB_ST = '%s.lib' + v.STLIBPATH_ST = '/LIBPATH:%s' + if v.MSVC_MANIFEST: - v.append_value('LINKFLAGS',['/MANIFEST']) - v.CFLAGS_cshlib=[] - v.CXXFLAGS_cxxshlib=[] - v.LINKFLAGS_cshlib=v.LINKFLAGS_cxxshlib=['/DLL'] - v.cshlib_PATTERN=v.cxxshlib_PATTERN='%s.dll' - v.implib_PATTERN='%s.lib' - v.IMPLIB_ST='/IMPLIB:%s' - v.LINKFLAGS_cstlib=[] - v.cstlib_PATTERN=v.cxxstlib_PATTERN='%s.lib' - v.cprogram_PATTERN=v.cxxprogram_PATTERN='%s.exe' - v.def_PATTERN='/def:%s' + v.append_value('LINKFLAGS', ['/MANIFEST']) + + v.CFLAGS_cshlib = [] + v.CXXFLAGS_cxxshlib = [] + v.LINKFLAGS_cshlib = v.LINKFLAGS_cxxshlib = ['/DLL'] + v.cshlib_PATTERN = v.cxxshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.lib' + v.IMPLIB_ST = '/IMPLIB:%s' + + v.LINKFLAGS_cstlib = [] + v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.lib' + + v.cprogram_PATTERN = v.cxxprogram_PATTERN = '%s.exe' + + v.def_PATTERN = '/def:%s' + + +####################################################################################################### +##### conf above, build below + @after_method('apply_link') -@feature('c','cxx') +@feature('c', 'cxx') def apply_flags_msvc(self): - if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None): + """ + Add additional flags implied by msvc, such as subsystems and pdb files:: + + def build(bld): + bld.stlib(source='main.c', target='bar', subsystem='gruik') + """ + if self.env.CC_NAME != 'msvc' or not getattr(self, 'link_task', None): return - is_static=isinstance(self.link_task,ccroot.stlink_task) - subsystem=getattr(self,'subsystem','') + + is_static = isinstance(self.link_task, ccroot.stlink_task) + + subsystem = getattr(self, 'subsystem', '') if subsystem: - subsystem='/subsystem:%s'%subsystem - flags=is_static and'ARFLAGS'or'LINKFLAGS' - self.env.append_value(flags,subsystem) + subsystem = '/subsystem:%s' % subsystem + flags = is_static and 'ARFLAGS' or 'LINKFLAGS' + self.env.append_value(flags, subsystem) + if not is_static: for f in self.env.LINKFLAGS: - d=f.lower() - if d[1:]=='debug': - pdbnode=self.link_task.outputs[0].change_ext('.pdb') + d = f.lower() + if d[1:] in ('debug', 'debug:full', 'debug:fastlink'): + pdbnode = self.link_task.outputs[0].change_ext('.pdb') self.link_task.outputs.append(pdbnode) - if getattr(self,'install_task',None): - self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=pdbnode) + + if getattr(self, 'install_task', None): + self.pdb_install_task = self.add_install_files( + install_to=self.install_task.install_to, install_from=pdbnode) break -@feature('cprogram','cshlib','cxxprogram','cxxshlib') + +@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib') @after_method('apply_link') def apply_manifest(self): - if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None): - out_node=self.link_task.outputs[0] - man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') + """ + Special linker for MSVC with support for embedding manifests into DLL's + and executables compiled by Visual Studio 2005 or probably later. Without + the manifest file, the binaries are unusable. + See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx + """ + if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST and getattr(self, 'link_task', None): + out_node = self.link_task.outputs[0] + man_node = out_node.parent.find_or_declare(out_node.name + '.manifest') self.link_task.outputs.append(man_node) - self.env.DO_MANIFEST=True -def make_winapp(self,family): - append=self.env.append_unique - append('DEFINES','WINAPI_FAMILY=%s'%family) - append('CXXFLAGS',['/ZW','/TP']) + self.env.DO_MANIFEST = True + +def make_winapp(self, family): + append = self.env.append_unique + append('DEFINES', 'WINAPI_FAMILY=%s' % family) + append('CXXFLAGS', ['/ZW', '/TP']) for lib_path in self.env.LIBPATH: append('CXXFLAGS','/AI%s'%lib_path) + @feature('winphoneapp') @after_method('process_use') @after_method('propagate_uselib_vars') def make_winphone_app(self): - make_winapp(self,'WINAPI_FAMILY_PHONE_APP') - self.env.append_unique('LINKFLAGS',['/NODEFAULTLIB:ole32.lib','PhoneAppModelHost.lib']) + """ + Insert configuration flags for windows phone applications (adds /ZW, /TP...) + """ + make_winapp(self, 'WINAPI_FAMILY_PHONE_APP') + self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib']) + @feature('winapp') @after_method('process_use') @after_method('propagate_uselib_vars') def make_windows_app(self): - make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP') + """ + Insert configuration flags for windows applications (adds /ZW, /TP...) + """ + make_winapp(self, 'WINAPI_FAMILY_DESKTOP_APP') diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/nasm.py lilv-0.24.6/waflib/Tools/nasm.py --- lilv-0.24.4~dfsg0/waflib/Tools/nasm.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/nasm.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,16 +1,31 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2008-2018 (ita) + +""" +Nasm tool (asm processing) +""" import os -import waflib.Tools.asm +import waflib.Tools.asm # leave this from waflib.TaskGen import feature + @feature('asm') def apply_nasm_vars(self): - self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[]))) + """provided for compatibility""" + self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', []))) + def configure(conf): - conf.find_program(['nasm','yasm'],var='AS') - conf.env.AS_TGT_F=['-o'] - conf.env.ASLNK_TGT_F=['-o'] + """ + Detect nasm/yasm and set the variable *AS* + """ + conf.find_program(['nasm', 'yasm'], var='AS') + conf.env.AS_TGT_F = ['-o'] + conf.env.ASLNK_TGT_F = ['-o'] conf.load('asm') - conf.env.ASMPATH_ST='-I%s'+os.sep + conf.env.ASMPATH_ST = '-I%s' + os.sep + txt = conf.cmd_and_log(conf.env.AS + ['--version']) + if 'yasm' in txt.lower(): + conf.env.ASM_NAME = 'yasm' + else: + conf.env.ASM_NAME = 'nasm' diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/nobuild.py lilv-0.24.6/waflib/Tools/nobuild.py --- lilv-0.24.4~dfsg0/waflib/Tools/nobuild.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/nobuild.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,11 +1,24 @@ #! /usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2015 (ita) + +""" +Override the build commands to write empty files. +This is useful for profiling and evaluating the Python overhead. + +To use:: + + def build(bld): + ... + bld.load('nobuild') + +""" from waflib import Task def build(bld): def run(self): for x in self.outputs: x.write('') - for(name,cls)in Task.classes.items(): - cls.run=run + for (name, cls) in Task.classes.items(): + cls.run = run + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/perl.py lilv-0.24.6/waflib/Tools/perl.py --- lilv-0.24.4~dfsg0/waflib/Tools/perl.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/perl.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,85 +1,156 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# andersg at 0x63.nu 2007 +# Thomas Nagy 2016-2018 (ita) + +""" +Support for Perl extensions. A C/C++ compiler is required:: + + def options(opt): + opt.load('compiler_c perl') + def configure(conf): + conf.load('compiler_c perl') + conf.check_perl_version((5,6,0)) + conf.check_perl_ext_devel() + conf.check_perl_module('Cairo') + conf.check_perl_module('Devel::PPPort 4.89') + def build(bld): + bld( + features = 'c cshlib perlext', + source = 'Mytest.xs', + target = 'Mytest', + install_path = '${ARCHDIR_PERL}/auto') + bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm') +""" import os -from waflib import Task,Options,Utils,Errors +from waflib import Task, Options, Utils, Errors from waflib.Configure import conf -from waflib.TaskGen import extension,feature,before_method -@before_method('apply_incpaths','apply_link','propagate_uselib_vars') +from waflib.TaskGen import extension, feature, before_method + +@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars') @feature('perlext') def init_perlext(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - if not'PERLEXT'in self.uselib: + """ + Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the + *lib* prefix from library names. + """ + self.uselib = self.to_list(getattr(self, 'uselib', [])) + if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT') - self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.perlext_PATTERN + self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN + @extension('.xs') -def xsubpp_file(self,node): - outnode=node.change_ext('.c') - self.create_task('xsubpp',node,outnode) +def xsubpp_file(self, node): + """ + Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files + """ + outnode = node.change_ext('.c') + self.create_task('xsubpp', node, outnode) self.source.append(outnode) + class xsubpp(Task.Task): - run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' - color='BLUE' - ext_out=['.h'] + """ + Process *.xs* files + """ + run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' + color = 'BLUE' + ext_out = ['.h'] + @conf -def check_perl_version(self,minver=None): - res=True +def check_perl_version(self, minver=None): + """ + Check if Perl is installed, and set the variable PERL. + minver is supposed to be a tuple + """ + res = True if minver: - cver='.'.join(map(str,minver)) + cver = '.'.join(map(str,minver)) else: - cver='' - self.start_msg('Checking for minimum perl version %s'%cver) - perl=self.find_program('perl',var='PERL',value=getattr(Options.options,'perlbinary',None)) - version=self.cmd_and_log(perl+["-e",'printf \"%vd\", $^V']) + cver = '' + + self.start_msg('Checking for minimum perl version %s' % cver) + + perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None)) + version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V']) if not version: - res=False - version="Unknown" + res = False + version = "Unknown" elif not minver is None: - ver=tuple(map(int,version.split("."))) - if ver #ifdef __cplusplus extern "C" { @@ -24,324 +41,509 @@ return 0; } ''' -INST=''' +""" +Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers` +""" + +INST = ''' import sys, py_compile py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True) ''' -DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib'] +""" +Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files +""" + +DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib'] + @before_method('process_source') @feature('py') def feature_py(self): - self.install_path=getattr(self,'install_path','${PYTHONDIR}') - install_from=getattr(self,'install_from',None) - if install_from and not isinstance(install_from,Node.Node): - install_from=self.path.find_dir(install_from) - self.install_from=install_from - ver=self.env.PYTHON_VERSION + """ + Create tasks to byte-compile .py files and install them, if requested + """ + self.install_path = getattr(self, 'install_path', '${PYTHONDIR}') + install_from = getattr(self, 'install_from', None) + if install_from and not isinstance(install_from, Node.Node): + install_from = self.path.find_dir(install_from) + self.install_from = install_from + + ver = self.env.PYTHON_VERSION if not ver: self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version') - if int(ver.replace('.',''))>31: - self.install_32=True + + if int(ver.replace('.', '')) > 31: + self.install_32 = True + @extension('.py') -def process_py(self,node): - assert(hasattr(self,'install_path')),'add features="py"' +def process_py(self, node): + """ + Add signature of .py file, so it will be byte-compiled when necessary + """ + assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.nice_path()) + self.install_from = getattr(self, 'install_from', None) + relative_trick = getattr(self, 'relative_trick', True) + if self.install_from: + assert isinstance(self.install_from, Node.Node), \ + 'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.nice_path(), type(self.install_from)) + + # where to install the python file if self.install_path: if self.install_from: - self.add_install_files(install_to=self.install_path,install_from=node,cwd=self.install_from,relative_trick=True) + self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=relative_trick) else: - self.add_install_files(install_to=self.install_path,install_from=node,relative_trick=True) - lst=[] + self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=relative_trick) + + lst = [] if self.env.PYC: lst.append('pyc') if self.env.PYO: lst.append('pyo') + if self.install_path: if self.install_from: - pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.install_from)),self.env) + target_dir = node.path_from(self.install_from) if relative_trick else node.name + pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env) else: - pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.path)),self.env) + target_dir = node.path_from(self.path) if relative_trick else node.name + pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env) else: - pyd=node.abspath() + pyd = node.abspath() + for ext in lst: if self.env.PYTAG and not self.env.NOPYCACHE: - name=node.name[:-3] - pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext)) + # __pycache__ installation for python 3.2 - PEP 3147 + name = node.name[:-3] + pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext)) pyobj.parent.mkdir() else: - pyobj=node.change_ext(".%s"%ext) - tsk=self.create_task(ext,node,pyobj) - tsk.pyd=pyd + pyobj = node.change_ext(".%s" % ext) + + tsk = self.create_task(ext, node, pyobj) + tsk.pyd = pyd + if self.install_path: - self.add_install_files(install_to=os.path.dirname(pyd),install_from=pyobj,cwd=node.parent.get_bld(),relative_trick=True) + self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=relative_trick) + class pyc(Task.Task): - color='PINK' + """ + Byte-compiling python files + """ + color = 'PINK' def __str__(self): - node=self.outputs[0] + node = self.outputs[0] return node.path_from(node.ctx.launch_node()) def run(self): - cmd=[Utils.subst_vars('${PYTHON}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] - ret=self.generator.bld.exec_command(cmd) + cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd] + ret = self.generator.bld.exec_command(cmd) return ret + class pyo(Task.Task): - color='PINK' + """ + Byte-compiling python files + """ + color = 'PINK' def __str__(self): - node=self.outputs[0] + node = self.outputs[0] return node.path_from(node.ctx.launch_node()) def run(self): - cmd=[Utils.subst_vars('${PYTHON}',self.env),Utils.subst_vars('${PYFLAGS_OPT}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] - ret=self.generator.bld.exec_command(cmd) + cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd] + ret = self.generator.bld.exec_command(cmd) return ret + @feature('pyext') -@before_method('propagate_uselib_vars','apply_link') +@before_method('propagate_uselib_vars', 'apply_link') @after_method('apply_bundle') def init_pyext(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - if not'PYEXT'in self.uselib: + """ + Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the + *lib* prefix from library names. + """ + self.uselib = self.to_list(getattr(self, 'uselib', [])) + if not 'PYEXT' in self.uselib: self.uselib.append('PYEXT') - self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN - self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN + # override shlib_PATTERN set by the osx module + self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN + self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN + try: if not self.install_path: return except AttributeError: - self.install_path='${PYTHONARCHDIR}' + self.install_path = '${PYTHONARCHDIR}' + @feature('pyext') -@before_method('apply_link','apply_bundle') +@before_method('apply_link', 'apply_bundle') def set_bundle(self): - if Utils.unversioned_sys_platform()=='darwin': - self.mac_bundle=True + """Mac-specific pyext extension that enables bundles from c_osx.py""" + if Utils.unversioned_sys_platform() == 'darwin': + self.mac_bundle = True + @before_method('propagate_uselib_vars') @feature('pyembed') def init_pyembed(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - if not'PYEMBED'in self.uselib: + """ + Add the PYEMBED variable. + """ + self.uselib = self.to_list(getattr(self, 'uselib', [])) + if not 'PYEMBED' in self.uselib: self.uselib.append('PYEMBED') + @conf -def get_python_variables(self,variables,imports=None): +def get_python_variables(self, variables, imports=None): + """ + Spawn a new python process to dump configuration variables + + :param variables: variables to print + :type variables: list of string + :param imports: one import by element + :type imports: list of string + :return: the variable values + :rtype: list of string + """ if not imports: try: - imports=self.python_imports + imports = self.python_imports except AttributeError: - imports=DISTUTILS_IMP - program=list(imports) + imports = DISTUTILS_IMP + + program = list(imports) # copy program.append('') for v in variables: - program.append("print(repr(%s))"%v) - os_env=dict(os.environ) + program.append("print(repr(%s))" % v) + os_env = dict(os.environ) try: - del os_env['MACOSX_DEPLOYMENT_TARGET'] + del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool except KeyError: pass + try: - out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env) + out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env) except Errors.WafError: self.fatal('The distutils module is unusable: install "python-devel"?') self.to_log(out) - return_values=[] + return_values = [] for s in out.splitlines(): - s=s.strip() + s = s.strip() if not s: continue - if s=='None': + if s == 'None': return_values.append(None) - elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'): + elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'): return_values.append(eval(s)) elif s[0].isdigit(): return_values.append(int(s)) - else:break + else: break return return_values + @conf -def test_pyembed(self,mode,msg='Testing pyembed configuration'): - self.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg=msg,fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(mode,mode)) +def test_pyembed(self, mode, msg='Testing pyembed configuration'): + self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg, + fragment=FRAG, errmsg='Could not build a python embedded interpreter', + features='%s %sprogram pyembed' % (mode, mode)) + @conf -def test_pyext(self,mode,msg='Testing pyext configuration'): - self.check(header_name='Python.h',define_name='HAVE_PYEXT',msg=msg,fragment=FRAG,errmsg='Could not build python extensions',features='%s %sshlib pyext'%(mode,mode)) +def test_pyext(self, mode, msg='Testing pyext configuration'): + self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg, + fragment=FRAG, errmsg='Could not build python extensions', + features='%s %sshlib pyext' % (mode, mode)) + @conf -def python_cross_compile(self,features='pyembed pyext'): - features=Utils.to_list(features) - if not('PYTHON_LDFLAGS'in self.environ or'PYTHON_PYEXT_LDFLAGS'in self.environ or'PYTHON_PYEMBED_LDFLAGS'in self.environ): +def python_cross_compile(self, features='pyembed pyext'): + """ + For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want: + PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure + + The following variables are used: + PYTHON_VERSION required + PYTAG required + PYTHON_LDFLAGS required + pyext_PATTERN required + PYTHON_PYEXT_LDFLAGS + PYTHON_PYEMBED_LDFLAGS + """ + features = Utils.to_list(features) + if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ): return False - for x in'PYTHON_VERSION PYTAG pyext_PATTERN'.split(): + + for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split(): if not x in self.environ: - self.fatal('Please set %s in the os environment'%x) + self.fatal('Please set %s in the os environment' % x) else: - self.env[x]=self.environ[x] - xx=self.env.CXX_NAME and'cxx'or'c' - if'pyext'in features: - flags=self.environ.get('PYTHON_PYEXT_LDFLAGS',self.environ.get('PYTHON_LDFLAGS')) + self.env[x] = self.environ[x] + + xx = self.env.CXX_NAME and 'cxx' or 'c' + if 'pyext' in features: + flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS')) if flags is None: self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required') else: - self.parse_flags(flags,'PYEXT') + self.parse_flags(flags, 'PYEXT') self.test_pyext(xx) - if'pyembed'in features: - flags=self.environ.get('PYTHON_PYEMBED_LDFLAGS',self.environ.get('PYTHON_LDFLAGS')) + if 'pyembed' in features: + flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS')) if flags is None: self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required') else: - self.parse_flags(flags,'PYEMBED') + self.parse_flags(flags, 'PYEMBED') self.test_pyembed(xx) return True + @conf -def check_python_headers(conf,features='pyembed pyext'): - features=Utils.to_list(features) - assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'" - env=conf.env +def check_python_headers(conf, features='pyembed pyext'): + """ + Check for headers and libraries necessary to extend or embed python by using the module *distutils*. + On success the environment variables xxx_PYEXT and xxx_PYEMBED are added: + + * PYEXT: for compiling python extensions + * PYEMBED: for embedding a python interpreter + """ + features = Utils.to_list(features) + assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'" + env = conf.env if not env.CC_NAME and not env.CXX_NAME: conf.fatal('load a compiler first (gcc, g++, ..)') + + # bypass all the code below for cross-compilation if conf.python_cross_compile(features): return + if not env.PYTHON_VERSION: conf.check_python_version() - pybin=env.PYTHON + + pybin = env.PYTHON if not pybin: conf.fatal('Could not find the python executable') - v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() + + # so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below + v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() try: - lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v]) + lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v]) except RuntimeError: conf.fatal("Python development headers not found (-v for details).") - vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)] - conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals))) - dct=dict(zip(v,lst)) - x='MACOSX_DEPLOYMENT_TARGET' + + vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)] + conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals))) + + dct = dict(zip(v, lst)) + x = 'MACOSX_DEPLOYMENT_TARGET' if dct[x]: - env[x]=conf.environ[x]=dct[x] - env.pyext_PATTERN='%s'+dct['SO'] - num='.'.join(env.PYTHON_VERSION.split('.')[:2]) - conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False) + env[x] = conf.environ[x] = dct[x] + env.pyext_PATTERN = '%s' + dct['SO'] # not a mistake + + + # Try to get pythonX.Y-config + num = '.'.join(env.PYTHON_VERSION.split('.')[:2]) + conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False) + if env.PYTHON_CONFIG: - all_flags=[['--cflags','--libs','--ldflags']] - if sys.hexversion<0x2070000: - all_flags=[[k]for k in all_flags[0]] - xx=env.CXX_NAME and'cxx'or'c' - if'pyembed'in features: + # check python-config output only once + if conf.env.HAVE_PYTHON_H: + return + + # python2.6-config requires 3 runs + all_flags = [['--cflags', '--libs', '--ldflags']] + if sys.hexversion < 0x2070000: + all_flags = [[k] for k in all_flags[0]] + + xx = env.CXX_NAME and 'cxx' or 'c' + + if 'pyembed' in features: for flags in all_flags: - conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags) + # Python 3.8 has different flags for pyembed, needs --embed + embedflags = flags + ['--embed'] + try: + conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(embedflags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=embedflags) + except conf.errors.ConfigurationError: + # However Python < 3.8 doesn't accept --embed, so we need a fallback + conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags) + try: conf.test_pyembed(xx) except conf.errors.ConfigurationError: - if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: - env.append_unique('LIBPATH_PYEMBED',[dct['LIBDIR']]) + # python bug 7352 + if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']]) conf.test_pyembed(xx) else: raise - if'pyext'in features: + + if 'pyext' in features: for flags in all_flags: - conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags) + conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags) + try: conf.test_pyext(xx) except conf.errors.ConfigurationError: - if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: - env.append_unique('LIBPATH_PYEXT',[dct['LIBDIR']]) + # python bug 7352 + if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']]) conf.test_pyext(xx) else: raise - conf.define('HAVE_PYTHON_H',1) + + conf.define('HAVE_PYTHON_H', 1) return - all_flags=dct['LDFLAGS']+' '+dct['CFLAGS'] - conf.parse_flags(all_flags,'PYEMBED') - all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS'] - conf.parse_flags(all_flags,'PYEXT') - result=None + + # No python-config, do something else on windows systems + all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS'] + conf.parse_flags(all_flags, 'PYEMBED') + + all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS'] + conf.parse_flags(all_flags, 'PYEXT') + + result = None if not dct["LDVERSION"]: - dct["LDVERSION"]=env.PYTHON_VERSION - for name in('python'+dct['LDVERSION'],'python'+env.PYTHON_VERSION+'m','python'+env.PYTHON_VERSION.replace('.','')): + dct["LDVERSION"] = env.PYTHON_VERSION + + # further simplification will be complicated + for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')): + + # LIBPATH_PYEMBED is already set; see if it works. if not result and env.LIBPATH_PYEMBED: - path=env.LIBPATH_PYEMBED - conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path) - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name) + path = env.LIBPATH_PYEMBED + conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path) + result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name) + if not result and dct['LIBDIR']: - path=[dct['LIBDIR']] - conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path) - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name) + path = [dct['LIBDIR']] + conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path) + result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name) + if not result and dct['LIBPL']: - path=[dct['LIBPL']] + path = [dct['LIBPL']] conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name) + result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name) + if not result: - path=[os.path.join(dct['prefix'],"libs")] + path = [os.path.join(dct['prefix'], "libs")] conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name) + result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name) + if result: - break + break # do not forget to set LIBPATH_PYEMBED + if result: - env.LIBPATH_PYEMBED=path - env.append_value('LIB_PYEMBED',[name]) + env.LIBPATH_PYEMBED = path + env.append_value('LIB_PYEMBED', [name]) else: conf.to_log("\n\n### LIB NOT FOUND\n") + + # under certain conditions, python extensions must link to + # python libraries, not just python embedding programs. if Utils.is_win32 or dct['Py_ENABLE_SHARED']: - env.LIBPATH_PYEXT=env.LIBPATH_PYEMBED - env.LIB_PYEXT=env.LIB_PYEMBED - conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],)) - env.INCLUDES_PYEXT=[dct['INCLUDEPY']] - env.INCLUDES_PYEMBED=[dct['INCLUDEPY']] - if env.CC_NAME=='gcc': - env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) - env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing']) - if env.CXX_NAME=='gcc': - env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing']) - env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing']) - if env.CC_NAME=="msvc": + env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED + env.LIB_PYEXT = env.LIB_PYEMBED + + conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],)) + env.INCLUDES_PYEXT = [dct['INCLUDEPY']] + env.INCLUDES_PYEMBED = [dct['INCLUDEPY']] + + # Code using the Python API needs to be compiled with -fno-strict-aliasing + if env.CC_NAME == 'gcc': + env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing']) + env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing']) + if env.CXX_NAME == 'gcc': + env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing']) + env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing']) + + if env.CC_NAME == "msvc": from distutils.msvccompiler import MSVCCompiler - dist_compiler=MSVCCompiler() + dist_compiler = MSVCCompiler() dist_compiler.initialize() - env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) - env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) - env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared) - conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!') + env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options) + env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options) + env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared) + + # See if it compiles + conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!') + @conf -def check_python_version(conf,minver=None): - assert minver is None or isinstance(minver,tuple) - pybin=conf.env.PYTHON +def check_python_version(conf, minver=None): + """ + Check if the python interpreter is found matching a given minimum version. + minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver. + + If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4') + of the actual python version found, and PYTHONDIR and PYTHONARCHDIR + are defined, pointing to the site-packages directories appropriate for + this python version, where modules/packages/extensions should be + installed. + + :param minver: minimum version + :type minver: tuple of int + """ + assert minver is None or isinstance(minver, tuple) + pybin = conf.env.PYTHON if not pybin: conf.fatal('could not find the python executable') - cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))'] - Logs.debug('python: Running python command %r',cmd) - lines=conf.cmd_and_log(cmd).split() - assert len(lines)==5,"found %r lines, expected 5: %r"%(len(lines),lines) - pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4])) - result=(minver is None)or(pyver_tuple>=minver) + + # Get python version string + cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))'] + Logs.debug('python: Running python command %r', cmd) + lines = conf.cmd_and_log(cmd).split() + assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines) + pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) + + # Compare python version with the minimum required + result = (minver is None) or (pyver_tuple >= minver) + if result: - pyver='.'.join([str(x)for x in pyver_tuple[:2]]) - conf.env.PYTHON_VERSION=pyver - if'PYTHONDIR'in conf.env: - pydir=conf.env.PYTHONDIR - elif'PYTHONDIR'in conf.environ: - pydir=conf.environ['PYTHONDIR'] + # define useful environment variables + pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) + conf.env.PYTHON_VERSION = pyver + + if 'PYTHONDIR' in conf.env: + # Check if --pythondir was specified + pydir = conf.env.PYTHONDIR + elif 'PYTHONDIR' in conf.environ: + # Check environment for PYTHONDIR + pydir = conf.environ['PYTHONDIR'] else: + # Finally, try to guess if Utils.is_win32: - (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0) or ''"]) + (python_LIBDEST, pydir) = conf.get_python_variables( + ["get_config_var('LIBDEST') or ''", + "get_python_lib(standard_lib=0) or ''"]) else: - python_LIBDEST=None - (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) + python_LIBDEST = None + (pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX]) if python_LIBDEST is None: if conf.env.LIBDIR: - python_LIBDEST=os.path.join(conf.env.LIBDIR,'python'+pyver) + python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver) else: - python_LIBDEST=os.path.join(conf.env.PREFIX,'lib','python'+pyver) - if'PYTHONARCHDIR'in conf.env: - pyarchdir=conf.env.PYTHONARCHDIR - elif'PYTHONARCHDIR'in conf.environ: - pyarchdir=conf.environ['PYTHONARCHDIR'] + python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver) + + if 'PYTHONARCHDIR' in conf.env: + # Check if --pythonarchdir was specified + pyarchdir = conf.env.PYTHONARCHDIR + elif 'PYTHONARCHDIR' in conf.environ: + # Check environment for PYTHONDIR + pyarchdir = conf.environ['PYTHONARCHDIR'] else: - (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) + # Finally, try to guess + (pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX]) if not pyarchdir: - pyarchdir=pydir - if hasattr(conf,'define'): - conf.define('PYTHONDIR',pydir) - conf.define('PYTHONARCHDIR',pyarchdir) - conf.env.PYTHONDIR=pydir - conf.env.PYTHONARCHDIR=pyarchdir - pyver_full='.'.join(map(str,pyver_tuple[:3])) + pyarchdir = pydir + + if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist + conf.define('PYTHONDIR', pydir) + conf.define('PYTHONARCHDIR', pyarchdir) + + conf.env.PYTHONDIR = pydir + conf.env.PYTHONARCHDIR = pyarchdir + + # Feedback + pyver_full = '.'.join(map(str, pyver_tuple[:3])) if minver is None: - conf.msg('Checking for python version',pyver_full) + conf.msg('Checking for python version', pyver_full) else: - minver_str='.'.join(map(str,minver)) - conf.msg('Checking for python version >= %s'%(minver_str,),pyver_full,color=result and'GREEN'or'YELLOW') + minver_str = '.'.join(map(str, minver)) + conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW') + if not result: - conf.fatal('The python version is too old, expecting %r'%(minver,)) -PYTHON_MODULE_TEMPLATE=''' + conf.fatal('The python version is too old, expecting %r' % (minver,)) + +PYTHON_MODULE_TEMPLATE = ''' import %s as current_module version = getattr(current_module, '__version__', None) if version is not None: @@ -349,62 +551,94 @@ else: print('unknown version') ''' + @conf -def check_python_module(conf,module_name,condition=''): - msg="Checking for python module %r"%module_name +def check_python_module(conf, module_name, condition=''): + """ + Check if the selected python interpreter can import the given python module:: + + def configure(conf): + conf.check_python_module('pygccxml') + conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)") + + :param module_name: module + :type module_name: string + """ + msg = "Checking for python module %r" % module_name if condition: - msg='%s (%s)'%(msg,condition) + msg = '%s (%s)' % (msg, condition) conf.start_msg(msg) try: - ret=conf.cmd_and_log(conf.env.PYTHON+['-c',PYTHON_MODULE_TEMPLATE%module_name]) + ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name]) except Errors.WafError: conf.end_msg(False) - conf.fatal('Could not find the python module %r'%module_name) - ret=ret.strip() + conf.fatal('Could not find the python module %r' % module_name) + + ret = ret.strip() if condition: conf.end_msg(ret) - if ret=='unknown version': - conf.fatal('Could not check the %s version'%module_name) + if ret == 'unknown version': + conf.fatal('Could not check the %s version' % module_name) + from distutils.version import LooseVersion def num(*k): - if isinstance(k[0],int): - return LooseVersion('.'.join([str(x)for x in k])) + if isinstance(k[0], int): + return LooseVersion('.'.join([str(x) for x in k])) else: return LooseVersion(k[0]) - d={'num':num,'ver':LooseVersion(ret)} - ev=eval(condition,{},d) + d = {'num': num, 'ver': LooseVersion(ret)} + ev = eval(condition, {}, d) if not ev: - conf.fatal('The %s version does not satisfy the requirements'%module_name) + conf.fatal('The %s version does not satisfy the requirements' % module_name) else: - if ret=='unknown version': + if ret == 'unknown version': conf.end_msg(True) else: conf.end_msg(ret) + def configure(conf): - v=conf.env - if getattr(Options.options,'pythondir',None): - v.PYTHONDIR=Options.options.pythondir - if getattr(Options.options,'pythonarchdir',None): - v.PYTHONARCHDIR=Options.options.pythonarchdir - if getattr(Options.options,'nopycache',None): + """ + Detect the python interpreter + """ + v = conf.env + if getattr(Options.options, 'pythondir', None): + v.PYTHONDIR = Options.options.pythondir + if getattr(Options.options, 'pythonarchdir', None): + v.PYTHONARCHDIR = Options.options.pythonarchdir + if getattr(Options.options, 'nopycache', None): v.NOPYCACHE=Options.options.nopycache + if not v.PYTHON: - v.PYTHON=[getattr(Options.options,'python',None)or sys.executable] - v.PYTHON=Utils.to_list(v.PYTHON) - conf.find_program('python',var='PYTHON') - v.PYFLAGS='' - v.PYFLAGS_OPT='-O' - v.PYC=getattr(Options.options,'pyc',1) - v.PYO=getattr(Options.options,'pyo',1) + v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable] + v.PYTHON = Utils.to_list(v.PYTHON) + conf.find_program('python', var='PYTHON') + + v.PYFLAGS = '' + v.PYFLAGS_OPT = '-O' + + v.PYC = getattr(Options.options, 'pyc', 1) + v.PYO = getattr(Options.options, 'pyo', 1) + try: - v.PYTAG=conf.cmd_and_log(conf.env.PYTHON+['-c',"import imp;print(imp.get_tag())"]).strip() + v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip() except Errors.WafError: pass + def options(opt): + """ + Add python-specific options + """ pyopt=opt.add_option_group("Python Options") - pyopt.add_option('--nopyc',dest='pyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]') - pyopt.add_option('--nopyo',dest='pyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]') - pyopt.add_option('--nopycache',dest='nopycache',action='store_true',help='Do not use __pycache__ directory to install objects [Default:auto]') - pyopt.add_option('--python',dest="python",help='python binary to be used [Default: %s]'%sys.executable) - pyopt.add_option('--pythondir',dest='pythondir',help='Installation path for python modules (py, platform-independent .py and .pyc files)') - pyopt.add_option('--pythonarchdir',dest='pythonarchdir',help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)') + pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1, + help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]') + pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1, + help='Do not install optimised compiled .pyo files (configuration) [Default:install]') + pyopt.add_option('--nopycache',dest='nopycache', action='store_true', + help='Do not use __pycache__ directory to install objects [Default:auto]') + pyopt.add_option('--python', dest="python", + help='python binary to be used [Default: %s]' % sys.executable) + pyopt.add_option('--pythondir', dest='pythondir', + help='Installation path for python modules (py, platform-independent .py and .pyc files)') + pyopt.add_option('--pythonarchdir', dest='pythonarchdir', + help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/qt5.py lilv-0.24.6/waflib/Tools/qt5.py --- lilv-0.24.4~dfsg0/waflib/Tools/qt5.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/qt5.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,31 +1,123 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) + +""" +This tool helps with finding Qt5 tools and libraries, +and also provides syntactic sugar for using Qt5 tools. + +The following snippet illustrates the tool usage:: + + def options(opt): + opt.load('compiler_cxx qt5') + + def configure(conf): + conf.load('compiler_cxx qt5') + + def build(bld): + bld( + features = 'qt5 cxx cxxprogram', + uselib = 'QT5CORE QT5GUI QT5OPENGL QT5SVG', + source = 'main.cpp textures.qrc aboutDialog.ui', + target = 'window', + ) + +Here, the UI description and resource files will be processed +to generate code. + +Usage +===== + +Load the "qt5" tool. + +You also need to edit your sources accordingly: + +- the normal way of doing things is to have your C++ files + include the .moc file. + This is regarded as the best practice (and provides much faster + compilations). + It also implies that the include paths have beenset properly. + +- to have the include paths added automatically, use the following:: + + from waflib.TaskGen import feature, before_method, after_method + @feature('cxx') + @after_method('process_source') + @before_method('apply_incpaths') + def add_includes_paths(self): + incs = set(self.to_list(getattr(self, 'includes', ''))) + for x in self.compiled_tasks: + incs.add(x.inputs[0].parent.path_from(self.path)) + self.includes = sorted(incs) + +Note: another tool provides Qt processing that does not require +.moc includes, see 'playground/slow_qt/'. + +A few options (--qt{dir,bin,...}) and environment variables +(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool, +tool path selection, etc; please read the source for more info. + +The detection uses pkg-config on Linux by default. To force static library detection use: +QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure +""" from __future__ import with_statement + try: from xml.sax import make_parser from xml.sax.handler import ContentHandler except ImportError: - has_xml=False - ContentHandler=object + has_xml = False + ContentHandler = object else: - has_xml=True -import os,sys,re + has_xml = True + +import os, sys, re from waflib.Tools import cxx -from waflib import Task,Utils,Options,Errors,Context -from waflib.TaskGen import feature,after_method,extension,before_method +from waflib import Build, Task, Utils, Options, Errors, Context +from waflib.TaskGen import feature, after_method, extension, before_method from waflib.Configure import conf from waflib import Logs -MOC_H=['.h','.hpp','.hxx','.hh'] -EXT_RCC=['.qrc'] -EXT_UI=['.ui'] -EXT_QT5=['.cpp','.cc','.cxx','.C'] + +MOC_H = ['.h', '.hpp', '.hxx', '.hh'] +""" +File extensions associated to .moc files +""" + +EXT_RCC = ['.qrc'] +""" +File extension for the resource (.qrc) files +""" + +EXT_UI = ['.ui'] +""" +File extension for the user interface (.ui) files +""" + +EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C'] +""" +File extensions of C++ files that may require a .moc processing +""" + class qxx(Task.classes['cxx']): - def __init__(self,*k,**kw): - Task.Task.__init__(self,*k,**kw) - self.moc_done=0 + """ + Each C++ file can have zero or several .moc files to create. + They are known only when the files are scanned (preprocessor) + To avoid scanning the c++ files each time (parsing C/C++), the results + are retrieved from the task cache (bld.node_deps/bld.raw_deps). + The moc tasks are also created *dynamically* during the build. + """ + + def __init__(self, *k, **kw): + Task.Task.__init__(self, *k, **kw) + self.moc_done = 0 + def runnable_status(self): + """ + Compute the task signature to make sure the scanner was executed. Create the + moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary), + then postpone the task execution (there is no need to recompute the task signature). + """ if self.moc_done: return Task.Task.runnable_status(self) else: @@ -34,197 +126,346 @@ return Task.ASK_LATER self.add_moc_tasks() return Task.Task.runnable_status(self) - def create_moc_task(self,h_node,m_node): + + def create_moc_task(self, h_node, m_node): + """ + If several libraries use the same classes, it is possible that moc will run several times (Issue 1318) + It is not possible to change the file names, but we can assume that the moc transformation will be identical, + and the moc tasks can be shared in a global cache. + """ try: - moc_cache=self.generator.bld.moc_cache + moc_cache = self.generator.bld.moc_cache except AttributeError: - moc_cache=self.generator.bld.moc_cache={} + moc_cache = self.generator.bld.moc_cache = {} + try: return moc_cache[h_node] except KeyError: - tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) + tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator) tsk.set_inputs(h_node) tsk.set_outputs(m_node) - tsk.env.append_unique('MOC_FLAGS','-i') + tsk.env.append_unique('MOC_FLAGS', '-i') + if self.generator: self.generator.tasks.append(tsk) - gen=self.generator.bld.producer + + # direct injection in the build phase (safe because called from the main thread) + gen = self.generator.bld.producer gen.outstanding.append(tsk) - gen.total+=1 + gen.total += 1 + return tsk + else: - delattr(self,'cache_sig') + # remove the signature, it must be recomputed with the moc task + delattr(self, 'cache_sig') + def add_moc_tasks(self): - node=self.inputs[0] - bld=self.generator.bld + """ + Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]`` + """ + node = self.inputs[0] + bld = self.generator.bld + + # skip on uninstall due to generated files + if bld.is_install == Build.UNINSTALL: + return + try: + # compute the signature once to know if there is a moc file to create self.signature() except KeyError: + # the moc file may be referenced somewhere else pass else: - delattr(self,'cache_sig') - include_nodes=[node.parent]+self.generator.includes_nodes - moctasks=[] - mocfiles=set() - for d in bld.raw_deps.get(self.uid(),[]): + # remove the signature, it must be recomputed with the moc task + delattr(self, 'cache_sig') + + include_nodes = [node.parent] + self.generator.includes_nodes + + moctasks = [] + mocfiles = set() + for d in bld.raw_deps.get(self.uid(), []): if not d.endswith('.moc'): continue + + # process that base.moc only once if d in mocfiles: continue mocfiles.add(d) - h_node=None - base2=d[:-4] - prefix=node.name[:node.name.rfind('.')] - if base2==prefix: - h_node=node + + # find the source associated with the moc file + h_node = None + base2 = d[:-4] + + # foo.moc from foo.cpp + prefix = node.name[:node.name.rfind('.')] + if base2 == prefix: + h_node = node else: + # this deviates from the standard + # if bar.cpp includes foo.moc, then assume it is from foo.h for x in include_nodes: for e in MOC_H: - h_node=x.find_node(base2+e) + h_node = x.find_node(base2 + e) if h_node: break else: continue break if h_node: - m_node=h_node.change_ext('.moc') + m_node = h_node.change_ext('.moc') else: - raise Errors.WafError('No source found for %r which is a moc file'%d) - task=self.create_moc_task(h_node,m_node) + raise Errors.WafError('No source found for %r which is a moc file' % d) + + # create the moc task + task = self.create_moc_task(h_node, m_node) moctasks.append(task) + + # simple scheduler dependency: run the moc task before others self.run_after.update(set(moctasks)) - self.moc_done=1 + self.moc_done = 1 + class trans_update(Task.Task): - run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' - color='BLUE' + """Updates a .ts files from a list of C++ files""" + run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}' + color = 'BLUE' + class XMLHandler(ContentHandler): + """ + Parses ``.qrc`` files + """ def __init__(self): ContentHandler.__init__(self) - self.buf=[] - self.files=[] - def startElement(self,name,attrs): - if name=='file': - self.buf=[] - def endElement(self,name): - if name=='file': + self.buf = [] + self.files = [] + def startElement(self, name, attrs): + if name == 'file': + self.buf = [] + def endElement(self, name): + if name == 'file': self.files.append(str(''.join(self.buf))) - def characters(self,cars): + def characters(self, cars): self.buf.append(cars) + @extension(*EXT_RCC) -def create_rcc_task(self,node): - rcnode=node.change_ext('_rc.%d.cpp'%self.idx) - self.create_task('rcc',node,rcnode) - cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) +def create_rcc_task(self, node): + "Creates rcc and cxx tasks for ``.qrc`` files" + rcnode = node.change_ext('_rc.%d.cpp' % self.idx) + self.create_task('rcc', node, rcnode) + cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o')) try: self.compiled_tasks.append(cpptask) except AttributeError: - self.compiled_tasks=[cpptask] + self.compiled_tasks = [cpptask] return cpptask + @extension(*EXT_UI) -def create_uic_task(self,node): +def create_uic_task(self, node): + "Create uic tasks for user interface ``.ui`` definition files" + + """ + If UIC file is used in more than one bld, we would have a conflict in parallel execution + It is not possible to change the file names (like .self.idx. as for objects) as they have + to be referenced by the source file, but we can assume that the transformation will be identical + and the tasks can be shared in a global cache. + """ try: - uic_cache=self.bld.uic_cache + uic_cache = self.bld.uic_cache except AttributeError: - uic_cache=self.bld.uic_cache={} + uic_cache = self.bld.uic_cache = {} + if node not in uic_cache: - uictask=uic_cache[node]=self.create_task('ui5',node) - uictask.outputs=[node.parent.find_or_declare(self.env.ui_PATTERN%node.name[:-3])] + uictask = uic_cache[node] = self.create_task('ui5', node) + uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])] + @extension('.ts') -def add_lang(self,node): - self.lang=self.to_list(getattr(self,'lang',[]))+[node] +def add_lang(self, node): + """Adds all the .ts file into ``self.lang``""" + self.lang = self.to_list(getattr(self, 'lang', [])) + [node] + @feature('qt5') @before_method('process_source') def process_mocs(self): - lst=self.to_nodes(getattr(self,'moc',[])) - self.source=self.to_list(getattr(self,'source',[])) + """ + Processes MOC files included in headers:: + + def build(bld): + bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h') + + The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name + is provided to avoid name clashes when the same headers are used by several targets. + """ + lst = self.to_nodes(getattr(self, 'moc', [])) + self.source = self.to_list(getattr(self, 'source', [])) for x in lst: - prefix=x.name[:x.name.rfind('.')] - moc_target='moc_%s.%d.cpp'%(prefix,self.idx) - moc_node=x.parent.find_or_declare(moc_target) + prefix = x.name[:x.name.rfind('.')] # foo.h -> foo + moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx) + moc_node = x.parent.find_or_declare(moc_target) self.source.append(moc_node) - self.create_task('moc',x,moc_node) + + self.create_task('moc', x, moc_node) + @feature('qt5') @after_method('apply_link') def apply_qt5(self): - if getattr(self,'lang',None): - qmtasks=[] + """ + Adds MOC_FLAGS which may be necessary for moc:: + + def build(bld): + bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE') + + The additional parameters are: + + :param lang: list of translation files (\\*.ts) to process + :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension + :param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**) + :type update: bool + :param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file + :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension + """ + if getattr(self, 'lang', None): + qmtasks = [] for x in self.to_list(self.lang): - if isinstance(x,str): - x=self.path.find_resource(x+'.ts') - qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.%d.qm'%self.idx))) - if getattr(self,'update',None)and Options.options.trans_qt5: - cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')] + if isinstance(x, str): + x = self.path.find_resource(x + '.ts') + qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx))) + + if getattr(self, 'update', None) and Options.options.trans_qt5: + cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [ + a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')] for x in qmtasks: - self.create_task('trans_update',cxxnodes,x.inputs) - if getattr(self,'langname',None): - qmnodes=[x.outputs[0]for x in qmtasks] - rcnode=self.langname - if isinstance(rcnode,str): - rcnode=self.path.find_or_declare(rcnode+('.%d.qrc'%self.idx)) - t=self.create_task('qm2rcc',qmnodes,rcnode) - k=create_rcc_task(self,t.outputs[0]) + self.create_task('trans_update', cxxnodes, x.inputs) + + if getattr(self, 'langname', None): + qmnodes = [x.outputs[0] for x in qmtasks] + rcnode = self.langname + if isinstance(rcnode, str): + rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx)) + t = self.create_task('qm2rcc', qmnodes, rcnode) + k = create_rcc_task(self, t.outputs[0]) self.link_task.inputs.append(k.outputs[0]) - lst=[] + + lst = [] for flag in self.to_list(self.env.CXXFLAGS): - if len(flag)<2: + if len(flag) < 2: continue - f=flag[0:2] - if f in('-D','-I','/D','/I'): - if(f[0]=='/'): - lst.append('-'+flag[1:]) + f = flag[0:2] + if f in ('-D', '-I', '/D', '/I'): + if (f[0] == '/'): + lst.append('-' + flag[1:]) else: lst.append(flag) - self.env.append_value('MOC_FLAGS',lst) + self.env.append_value('MOC_FLAGS', lst) + @extension(*EXT_QT5) -def cxx_hook(self,node): - return self.create_compiled_task('qxx',node) +def cxx_hook(self, node): + """ + Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task. + """ + return self.create_compiled_task('qxx', node) + class rcc(Task.Task): - color='BLUE' - run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' - ext_out=['.h'] + """ + Processes ``.qrc`` files + """ + color = 'BLUE' + run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out = ['.h'] + def rcname(self): return os.path.splitext(self.inputs[0].name)[0] + def scan(self): + """Parse the *.qrc* files""" if not has_xml: Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') - return([],[]) - parser=make_parser() - curHandler=XMLHandler() + return ([], []) + + parser = make_parser() + curHandler = XMLHandler() parser.setContentHandler(curHandler) - with open(self.inputs[0].abspath(),'r')as f: + with open(self.inputs[0].abspath(), 'r') as f: parser.parse(f) - nodes=[] - names=[] - root=self.inputs[0].parent + + nodes = [] + names = [] + root = self.inputs[0].parent for x in curHandler.files: - nd=root.find_resource(x) + nd = root.find_resource(x) if nd: nodes.append(nd) else: names.append(x) - return(nodes,names) - def quote_flag(self,x): + return (nodes, names) + + def quote_flag(self, x): + """ + Override Task.quote_flag. QT parses the argument files + differently than cl.exe and link.exe + + :param x: flag + :type x: string + :return: quoted flag + :rtype: string + """ return x + + class moc(Task.Task): - color='BLUE' - run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' - def quote_flag(self,x): + """ + Creates ``.moc`` files + """ + color = 'BLUE' + run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' + + def quote_flag(self, x): + """ + Override Task.quote_flag. QT parses the argument files + differently than cl.exe and link.exe + + :param x: flag + :type x: string + :return: quoted flag + :rtype: string + """ return x + + class ui5(Task.Task): - color='BLUE' - run_str='${QT_UIC} ${SRC} -o ${TGT}' - ext_out=['.h'] + """ + Processes ``.ui`` files + """ + color = 'BLUE' + run_str = '${QT_UIC} ${SRC} -o ${TGT}' + ext_out = ['.h'] + class ts2qm(Task.Task): - color='BLUE' - run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' + """ + Generates ``.qm`` files from ``.ts`` files + """ + color = 'BLUE' + run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' + class qm2rcc(Task.Task): - color='BLUE' - after='ts2qm' + """ + Generates ``.qrc`` files from ``.qm`` files + """ + color = 'BLUE' + after = 'ts2qm' def run(self): - txt='\n'.join(['%s'%k.path_from(self.outputs[0].parent)for k in self.inputs]) - code='\n\n%s\n\n'%txt + """Create a qrc file including the inputs""" + txt = '\n'.join(['%s' % k.path_from(self.outputs[0].parent) for k in self.inputs]) + code = '\n\n%s\n\n' % txt self.outputs[0].write(code) + def configure(self): + """ + Besides the configuration options, the environment variable QT5_ROOT may be used + to give the location of the qt5 libraries (absolute path). + + The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg` + """ self.find_qt5_binaries() self.set_qt5_libs_dir() self.set_qt5_libs_to_check() @@ -232,266 +473,328 @@ self.find_qt5_libraries() self.add_qt5_rpath() self.simplify_qt5_libs() + + # warn about this during the configuration too if not has_xml: Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') - if'COMPILER_CXX'not in self.env: + + if 'COMPILER_CXX' not in self.env: self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?') - frag='#include \nint main(int argc, char **argv) {return 0;}\n' - uses='QT5CORE QT5WIDGETS QT5GUI' - for flag in[[],'-fPIE','-fPIC','-std=c++11',['-std=c++11','-fPIE'],['-std=c++11','-fPIC']]: - msg='See if Qt files compile ' + + # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC? + frag = '#include \nint main(int argc, char **argv) {return 0;}\n' + uses = 'QT5CORE QT5WIDGETS QT5GUI' + for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]: + msg = 'See if Qt files compile ' if flag: - msg+='with %s'%flag + msg += 'with %s' % flag try: - self.check(features='qt5 cxx',use=uses,uselib_store='qt5',cxxflags=flag,fragment=frag,msg=msg) + self.check(features='qt5 cxx', use=uses, uselib_store='qt5', cxxflags=flag, fragment=frag, msg=msg) except self.errors.ConfigurationError: pass else: break else: self.fatal('Could not build a simple Qt application') - if Utils.unversioned_sys_platform()=='freebsd': - frag='#include \nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n' + + # FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/ + if Utils.unversioned_sys_platform() == 'freebsd': + frag = '#include \nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n' try: - self.check(features='qt5 cxx cxxprogram',use=uses,fragment=frag,msg='Can we link Qt programs on FreeBSD directly?') + self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?') except self.errors.ConfigurationError: - self.check(features='qt5 cxx cxxprogram',use=uses,uselib_store='qt5',libpath='/usr/local/lib',fragment=frag,msg='Is /usr/local/lib required?') + self.check(features='qt5 cxx cxxprogram', use=uses, uselib_store='qt5', libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?') + @conf def find_qt5_binaries(self): - env=self.env - opt=Options.options - qtdir=getattr(opt,'qtdir','') - qtbin=getattr(opt,'qtbin','') - paths=[] + """ + Detects Qt programs such as qmake, moc, uic, lrelease + """ + env = self.env + opt = Options.options + + qtdir = getattr(opt, 'qtdir', '') + qtbin = getattr(opt, 'qtbin', '') + + paths = [] + if qtdir: - qtbin=os.path.join(qtdir,'bin') + qtbin = os.path.join(qtdir, 'bin') + + # the qt directory has been given from QT5_ROOT - deduce the qt binary path if not qtdir: - qtdir=self.environ.get('QT5_ROOT','') - qtbin=self.environ.get('QT5_BIN')or os.path.join(qtdir,'bin') + qtdir = self.environ.get('QT5_ROOT', '') + qtbin = self.environ.get('QT5_BIN') or os.path.join(qtdir, 'bin') + if qtbin: - paths=[qtbin] + paths = [qtbin] + + # no qtdir, look in the path and in /usr/local/Trolltech if not qtdir: - paths=self.environ.get('PATH','').split(os.pathsep) - paths.extend(['/usr/share/qt5/bin','/usr/local/lib/qt5/bin']) + paths = self.environ.get('PATH', '').split(os.pathsep) + paths.extend(['/usr/share/qt5/bin', '/usr/local/lib/qt5/bin']) try: - lst=Utils.listdir('/usr/local/Trolltech/') + lst = Utils.listdir('/usr/local/Trolltech/') except OSError: pass else: if lst: lst.sort() lst.reverse() - qtdir='/usr/local/Trolltech/%s/'%lst[0] - qtbin=os.path.join(qtdir,'bin') + + # keep the highest version + qtdir = '/usr/local/Trolltech/%s/' % lst[0] + qtbin = os.path.join(qtdir, 'bin') paths.append(qtbin) - cand=None - prev_ver=['5','0','0'] - for qmk in('qmake-qt5','qmake5','qmake'): + + # at the end, try to find qmake in the paths given + # keep the one with the highest version + cand = None + prev_ver = ['5', '0', '0'] + for qmk in ('qmake-qt5', 'qmake5', 'qmake'): try: - qmake=self.find_program(qmk,path_list=paths) + qmake = self.find_program(qmk, path_list=paths) except self.errors.ConfigurationError: pass else: try: - version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip() + version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip() except self.errors.WafError: pass else: if version: - new_ver=version.split('.') - if new_ver>prev_ver: - cand=qmake - prev_ver=new_ver + new_ver = version.split('.') + if new_ver > prev_ver: + cand = qmake + prev_ver = new_ver + + # qmake could not be found easily, rely on qtchooser if not cand: try: self.find_program('qtchooser') except self.errors.ConfigurationError: pass else: - cmd=self.env.QTCHOOSER+['-qt=5','-run-tool=qmake'] + cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake'] try: - version=self.cmd_and_log(cmd+['-query','QT_VERSION']) + version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION']) except self.errors.WafError: pass else: - cand=cmd + cand = cmd + if cand: - self.env.QMAKE=cand + self.env.QMAKE = cand else: self.fatal('Could not find qmake for qt5') - self.env.QT_HOST_BINS=qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_HOST_BINS']).strip() - paths.insert(0,qtbin) - def find_bin(lst,var): + + self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip() + paths.insert(0, qtbin) + + def find_bin(lst, var): if var in env: return for f in lst: try: - ret=self.find_program(f,path_list=paths) + ret = self.find_program(f, path_list=paths) except self.errors.ConfigurationError: pass else: env[var]=ret break - find_bin(['uic-qt5','uic'],'QT_UIC') + + find_bin(['uic-qt5', 'uic'], 'QT_UIC') if not env.QT_UIC: self.fatal('cannot find the uic compiler for qt5') + self.start_msg('Checking for uic version') - uicver=self.cmd_and_log(env.QT_UIC+['-version'],output=Context.BOTH) - uicver=''.join(uicver).strip() - uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') + uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH) + uicver = ''.join(uicver).strip() + uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '') self.end_msg(uicver) - if uicver.find(' 3.')!=-1 or uicver.find(' 4.')!=-1: + if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1: self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path') - find_bin(['moc-qt5','moc'],'QT_MOC') - find_bin(['rcc-qt5','rcc'],'QT_RCC') - find_bin(['lrelease-qt5','lrelease'],'QT_LRELEASE') - find_bin(['lupdate-qt5','lupdate'],'QT_LUPDATE') - env.UIC_ST='%s -o %s' - env.MOC_ST='-o' - env.ui_PATTERN='ui_%s.h' - env.QT_LRELEASE_FLAGS=['-silent'] - env.MOCCPPPATH_ST='-I%s' - env.MOCDEFINES_ST='-D%s' + + find_bin(['moc-qt5', 'moc'], 'QT_MOC') + find_bin(['rcc-qt5', 'rcc'], 'QT_RCC') + find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE') + find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE') + + env.UIC_ST = '%s -o %s' + env.MOC_ST = '-o' + env.ui_PATTERN = 'ui_%s.h' + env.QT_LRELEASE_FLAGS = ['-silent'] + env.MOCCPPPATH_ST = '-I%s' + env.MOCDEFINES_ST = '-D%s' + @conf def set_qt5_libs_dir(self): - env=self.env - qtlibs=getattr(Options.options,'qtlibs',None)or self.environ.get('QT5_LIBDIR') + env = self.env + qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT5_LIBDIR') if not qtlibs: try: - qtlibs=self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_LIBS']).strip() + qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip() except Errors.WafError: - qtdir=self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip() - qtlibs=os.path.join(qtdir,'lib') - self.msg('Found the Qt5 libraries in',qtlibs) - env.QTLIBS=qtlibs + qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + qtlibs = os.path.join(qtdir, 'lib') + self.msg('Found the Qt5 libraries in', qtlibs) + env.QTLIBS = qtlibs + @conf -def find_single_qt5_lib(self,name,uselib,qtlibs,qtincludes,force_static): - env=self.env +def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static): + env = self.env if force_static: - exts=('.a','.lib') - prefix='STLIB' + exts = ('.a', '.lib') + prefix = 'STLIB' else: - exts=('.so','.lib') - prefix='LIB' + exts = ('.so', '.lib') + prefix = 'LIB' + def lib_names(): for x in exts: - for k in('','5')if Utils.is_win32 else['']: - for p in('lib',''): - yield(p,name,k,x) + for k in ('', '5') if Utils.is_win32 else ['']: + for p in ('lib', ''): + yield (p, name, k, x) + for tup in lib_names(): - k=''.join(tup) - path=os.path.join(qtlibs,k) + k = ''.join(tup) + path = os.path.join(qtlibs, k) if os.path.exists(path): - if env.DEST_OS=='win32': - libval=''.join(tup[:-1]) + if env.DEST_OS == 'win32': + libval = ''.join(tup[:-1]) else: - libval=name - env.append_unique(prefix+'_'+uselib,libval) - env.append_unique('%sPATH_%s'%(prefix,uselib),qtlibs) - env.append_unique('INCLUDES_'+uselib,qtincludes) - env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,name.replace('Qt5','Qt'))) + libval = name + env.append_unique(prefix + '_' + uselib, libval) + env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs) + env.append_unique('INCLUDES_' + uselib, qtincludes) + env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt5', 'Qt'))) return k return False + @conf def find_qt5_libraries(self): - env=self.env - qtincludes=self.environ.get('QT5_INCLUDES')or self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip() - force_static=self.environ.get('QT5_FORCE_STATIC') + env = self.env + + qtincludes = self.environ.get('QT5_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() + force_static = self.environ.get('QT5_FORCE_STATIC') try: if self.environ.get('QT5_XCOMPILE'): self.fatal('QT5_XCOMPILE Disables pkg-config detection') self.check_cfg(atleast_pkgconfig_version='0.1') except self.errors.ConfigurationError: for i in self.qt5_vars: - uselib=i.upper() - if Utils.unversioned_sys_platform()=='darwin': - fwk=i.replace('Qt5','Qt') - frameworkName=fwk+'.framework' - qtDynamicLib=os.path.join(env.QTLIBS,frameworkName,fwk) + uselib = i.upper() + if Utils.unversioned_sys_platform() == 'darwin': + # Since at least qt 4.7.3 each library locates in separate directory + fwk = i.replace('Qt5', 'Qt') + frameworkName = fwk + '.framework' + + qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk) if os.path.exists(qtDynamicLib): - env.append_unique('FRAMEWORK_'+uselib,fwk) - env.append_unique('FRAMEWORKPATH_'+uselib,env.QTLIBS) - self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + env.append_unique('FRAMEWORK_' + uselib, fwk) + env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS) + self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('INCLUDES_'+uselib,os.path.join(env.QTLIBS,frameworkName,'Headers')) + self.msg('Checking for %s' % i, False, 'YELLOW') + env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers')) else: - ret=self.find_single_qt5_lib(i,uselib,env.QTLIBS,qtincludes,force_static) + ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static) if not force_static and not ret: - ret=self.find_single_qt5_lib(i,uselib,env.QTLIBS,qtincludes,True) - self.msg('Checking for %s'%i,ret,'GREEN'if ret else'YELLOW') + ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True) + self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW') else: - path='%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib'%(self.environ.get('PKG_CONFIG_PATH',''),env.QTLIBS,env.QTLIBS) + path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % ( + self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS) for i in self.qt5_vars: - self.check_cfg(package=i,args='--cflags --libs',mandatory=False,force_static=force_static,pkg_config_path=path) + self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path) + @conf def simplify_qt5_libs(self): - env=self.env - def process_lib(vars_,coreval): + """ + Since library paths make really long command-lines, + and since everything depends on qtcore, remove the qtcore ones from qtgui, etc + """ + env = self.env + def process_lib(vars_, coreval): for d in vars_: - var=d.upper() - if var=='QTCORE': + var = d.upper() + if var == 'QTCORE': continue - value=env['LIBPATH_'+var] + + value = env['LIBPATH_'+var] if value: - core=env[coreval] - accu=[] + core = env[coreval] + accu = [] for lib in value: if lib in core: continue accu.append(lib) - env['LIBPATH_'+var]=accu - process_lib(self.qt5_vars,'LIBPATH_QTCORE') + env['LIBPATH_'+var] = accu + process_lib(self.qt5_vars, 'LIBPATH_QTCORE') + @conf def add_qt5_rpath(self): - env=self.env - if getattr(Options.options,'want_rpath',False): - def process_rpath(vars_,coreval): + """ + Defines rpath entries for Qt libraries + """ + env = self.env + if getattr(Options.options, 'want_rpath', False): + def process_rpath(vars_, coreval): for d in vars_: - var=d.upper() - value=env['LIBPATH_'+var] + var = d.upper() + value = env['LIBPATH_' + var] if value: - core=env[coreval] - accu=[] + core = env[coreval] + accu = [] for lib in value: - if var!='QTCORE': + if var != 'QTCORE': if lib in core: continue accu.append('-Wl,--rpath='+lib) - env['RPATH_'+var]=accu - process_rpath(self.qt5_vars,'LIBPATH_QTCORE') + env['RPATH_' + var] = accu + process_rpath(self.qt5_vars, 'LIBPATH_QTCORE') + @conf def set_qt5_libs_to_check(self): - self.qt5_vars=Utils.to_list(getattr(self,'qt5_vars',[])) + self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', [])) if not self.qt5_vars: - dirlst=Utils.listdir(self.env.QTLIBS) - pat=self.env.cxxshlib_PATTERN + dirlst = Utils.listdir(self.env.QTLIBS) + + pat = self.env.cxxshlib_PATTERN if Utils.is_win32: - pat=pat.replace('.dll','.lib') + pat = pat.replace('.dll', '.lib') if self.environ.get('QT5_FORCE_STATIC'): - pat=self.env.cxxstlib_PATTERN - if Utils.unversioned_sys_platform()=='darwin': - pat="%s\.framework" - re_qt=re.compile(pat%'Qt5?(?P.*)'+'$') + pat = self.env.cxxstlib_PATTERN + if Utils.unversioned_sys_platform() == 'darwin': + pat = r"%s\.framework" + re_qt = re.compile(pat%'Qt5?(?P.*)'+'$') for x in dirlst: - m=re_qt.match(x) + m = re_qt.match(x) if m: - self.qt5_vars.append("Qt5%s"%m.group('name')) + self.qt5_vars.append("Qt5%s" % m.group('name')) if not self.qt5_vars: - self.fatal('cannot find any Qt5 library (%r)'%self.env.QTLIBS) - qtextralibs=getattr(Options.options,'qtextralibs',None) + self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS) + + qtextralibs = getattr(Options.options, 'qtextralibs', None) if qtextralibs: self.qt5_vars.extend(qtextralibs.split(',')) + @conf def set_qt5_defines(self): - if sys.platform!='win32': + if sys.platform != 'win32': return for x in self.qt5_vars: - y=x.replace('Qt5','Qt')[2:].upper() - self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) + y=x.replace('Qt5', 'Qt')[2:].upper() + self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y) + def options(opt): - opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') - for i in'qtdir qtbin qtlibs'.split(): - opt.add_option('--'+i,type='string',default='',dest=i) - opt.add_option('--translate',action='store_true',help='collect translation strings',dest='trans_qt5',default=False) - opt.add_option('--qtextralibs',type='string',default='',dest='qtextralibs',help='additional qt libraries on the system to add to default ones, comma separated') + """ + Command-line options + """ + opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries') + for i in 'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i, type='string', default='', dest=i) + + opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False) + opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/ruby.py lilv-0.24.6/waflib/Tools/ruby.py --- lilv-0.24.4~dfsg0/waflib/Tools/ruby.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/ruby.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,97 +1,186 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# daniel.svensson at purplescout.se 2008 +# Thomas Nagy 2016-2018 (ita) + +""" +Support for Ruby extensions. A C/C++ compiler is required:: + + def options(opt): + opt.load('compiler_c ruby') + def configure(conf): + conf.load('compiler_c ruby') + conf.check_ruby_version((1,8,0)) + conf.check_ruby_ext_devel() + conf.check_ruby_module('libxml') + def build(bld): + bld( + features = 'c cshlib rubyext', + source = 'rb_mytest.c', + target = 'mytest_ext', + install_path = '${ARCHDIR_RUBY}') + bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb') +""" import os -from waflib import Errors,Options,Task,Utils -from waflib.TaskGen import before_method,feature,extension +from waflib import Errors, Options, Task, Utils +from waflib.TaskGen import before_method, feature, extension from waflib.Configure import conf + @feature('rubyext') -@before_method('apply_incpaths','process_source','apply_bundle','apply_link') +@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link') def init_rubyext(self): - self.install_path='${ARCHDIR_RUBY}' - self.uselib=self.to_list(getattr(self,'uselib','')) - if not'RUBY'in self.uselib: + """ + Add required variables for ruby extensions + """ + self.install_path = '${ARCHDIR_RUBY}' + self.uselib = self.to_list(getattr(self, 'uselib', '')) + if not 'RUBY' in self.uselib: self.uselib.append('RUBY') - if not'RUBYEXT'in self.uselib: + if not 'RUBYEXT' in self.uselib: self.uselib.append('RUBYEXT') + @feature('rubyext') -@before_method('apply_link','propagate_uselib_vars') +@before_method('apply_link', 'propagate_uselib_vars') def apply_ruby_so_name(self): - self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.rubyext_PATTERN + """ + Strip the *lib* prefix from ruby extensions + """ + self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN + @conf -def check_ruby_version(self,minver=()): - ruby=self.find_program('ruby',var='RUBY',value=Options.options.rubybinary) +def check_ruby_version(self, minver=()): + """ + Checks if ruby is installed. + If installed the variable RUBY will be set in environment. + The ruby binary can be overridden by ``--with-ruby-binary`` command-line option. + """ + + ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary) + try: - version=self.cmd_and_log(ruby+['-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() + version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() except Errors.WafError: self.fatal('could not determine ruby version') - self.env.RUBY_VERSION=version + self.env.RUBY_VERSION = version + try: - ver=tuple(map(int,version.split('.'))) + ver = tuple(map(int, version.split('.'))) except Errors.WafError: - self.fatal('unsupported ruby version %r'%version) - cver='' + self.fatal('unsupported ruby version %r' % version) + + cver = '' if minver: - cver='> '+'.'.join(str(x)for x in minver) - if ver=(1,9,0): - ruby_hdrdir=read_config('rubyhdrdir') - cpppath+=ruby_hdrdir - if version>=(2,0,0): - cpppath+=read_config('rubyarchhdrdir') - cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])] - self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file',link_header_test=False) - self.env.LIBPATH_RUBYEXT=read_config('libdir') - self.env.LIBPATH_RUBYEXT+=archdir - self.env.INCLUDES_RUBYEXT=cpppath - self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS') - self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0] - flags=read_config('LDSHARED') - while flags and flags[0][0]!='-': - flags=flags[1:] - if len(flags)>1 and flags[1]=="ppc": - flags=flags[2:] - self.env.LINKFLAGS_RUBYEXT=flags - self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS') - self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED') + return read_out('puts RbConfig::CONFIG[%r]' % key) + + cpppath = archdir = read_config('archdir') + + if version >= (1, 9, 0): + ruby_hdrdir = read_config('rubyhdrdir') + cpppath += ruby_hdrdir + if version >= (2, 0, 0): + cpppath += read_config('rubyarchhdrdir') + cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])] + + self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False) + + self.env.LIBPATH_RUBYEXT = read_config('libdir') + self.env.LIBPATH_RUBYEXT += archdir + self.env.INCLUDES_RUBYEXT = cpppath + self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS') + self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0] + + # ok this is really stupid, but the command and flags are combined. + # so we try to find the first argument... + flags = read_config('LDSHARED') + while flags and flags[0][0] != '-': + flags = flags[1:] + + # we also want to strip out the deprecated ppc flags + if len(flags) > 1 and flags[1] == "ppc": + flags = flags[2:] + + self.env.LINKFLAGS_RUBYEXT = flags + self.env.LINKFLAGS_RUBYEXT += read_config('LIBS') + self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED') + if Options.options.rubyarchdir: - self.env.ARCHDIR_RUBY=Options.options.rubyarchdir + self.env.ARCHDIR_RUBY = Options.options.rubyarchdir else: - self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0] + self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0] + if Options.options.rubylibdir: - self.env.LIBDIR_RUBY=Options.options.rubylibdir + self.env.LIBDIR_RUBY = Options.options.rubylibdir else: - self.env.LIBDIR_RUBY=read_config('sitelibdir')[0] + self.env.LIBDIR_RUBY = read_config('sitelibdir')[0] + @conf -def check_ruby_module(self,module_name): - self.start_msg('Ruby module %s'%module_name) +def check_ruby_module(self, module_name): + """ + Check if the selected ruby interpreter can require the given ruby module:: + + def configure(conf): + conf.check_ruby_module('libxml') + + :param module_name: module + :type module_name: string + """ + self.start_msg('Ruby module %s' % module_name) try: - self.cmd_and_log(self.env.RUBY+['-e','require \'%s\';puts 1'%module_name]) + self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name]) except Errors.WafError: self.end_msg(False) - self.fatal('Could not find the ruby module %r'%module_name) + self.fatal('Could not find the ruby module %r' % module_name) self.end_msg(True) + @extension('.rb') -def process(self,node): - return self.create_task('run_ruby',node) +def process(self, node): + return self.create_task('run_ruby', node) + class run_ruby(Task.Task): - run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' + """ + Task to run ruby files detected by file extension .rb:: + + def options(opt): + opt.load('ruby') + + def configure(ctx): + ctx.check_ruby_version() + + def build(bld): + bld.env.RBFLAGS = '-e puts "hello world"' + bld(source='a_ruby_file.rb') + """ + run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' + def options(opt): - opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files') - opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path') - opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary') + """ + Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options + """ + opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files') + opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path') + opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/suncc.py lilv-0.24.6/waflib/Tools/suncc.py --- lilv-0.24.4~dfsg0/waflib/Tools/suncc.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/suncc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,44 +1,62 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) +# Ralf Habacker, 2006 (rh) from waflib import Errors -from waflib.Tools import ccroot,ar +from waflib.Tools import ccroot, ar from waflib.Configure import conf + @conf def find_scc(conf): - v=conf.env - cc=conf.find_program('cc',var='CC') + """ + Detects the Sun C compiler + """ + v = conf.env + cc = conf.find_program('cc', var='CC') try: - conf.cmd_and_log(cc+['-flags']) + conf.cmd_and_log(cc + ['-flags']) except Errors.WafError: - conf.fatal('%r is not a Sun compiler'%cc) - v.CC_NAME='sun' + conf.fatal('%r is not a Sun compiler' % cc) + v.CC_NAME = 'sun' conf.get_suncc_version(cc) + @conf def scc_common_flags(conf): - v=conf.env - v.CC_SRC_F=[] - v.CC_TGT_F=['-c','-o',''] + """ + Flags required for executing the sun C compiler + """ + v = conf.env + + v.CC_SRC_F = [] + v.CC_TGT_F = ['-c', '-o', ''] + if not v.LINK_CC: - v.LINK_CC=v.CC - v.CCLNK_SRC_F='' - v.CCLNK_TGT_F=['-o',''] - v.CPPPATH_ST='-I%s' - v.DEFINES_ST='-D%s' - v.LIB_ST='-l%s' - v.LIBPATH_ST='-L%s' - v.STLIB_ST='-l%s' - v.STLIBPATH_ST='-L%s' - v.SONAME_ST='-Wl,-h,%s' - v.SHLIB_MARKER='-Bdynamic' - v.STLIB_MARKER='-Bstatic' - v.cprogram_PATTERN='%s' - v.CFLAGS_cshlib=['-xcode=pic32','-DPIC'] - v.LINKFLAGS_cshlib=['-G'] - v.cshlib_PATTERN='lib%s.so' - v.LINKFLAGS_cstlib=['-Bstatic'] - v.cstlib_PATTERN='lib%s.a' + v.LINK_CC = v.CC + + v.CCLNK_SRC_F = '' + v.CCLNK_TGT_F = ['-o', ''] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' + + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + + v.SONAME_ST = '-Wl,-h,%s' + v.SHLIB_MARKER = '-Bdynamic' + v.STLIB_MARKER = '-Bstatic' + + v.cprogram_PATTERN = '%s' + + v.CFLAGS_cshlib = ['-xcode=pic32', '-DPIC'] + v.LINKFLAGS_cshlib = ['-G'] + v.cshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cstlib = ['-Bstatic'] + v.cstlib_PATTERN = 'lib%s.a' + def configure(conf): conf.find_scc() conf.find_ar() @@ -46,3 +64,4 @@ conf.cc_load_tools() conf.cc_add_flags() conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/suncxx.py lilv-0.24.6/waflib/Tools/suncxx.py --- lilv-0.24.4~dfsg0/waflib/Tools/suncxx.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/suncxx.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,44 +1,62 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) +# Ralf Habacker, 2006 (rh) from waflib import Errors -from waflib.Tools import ccroot,ar +from waflib.Tools import ccroot, ar from waflib.Configure import conf + @conf def find_sxx(conf): - v=conf.env - cc=conf.find_program(['CC','c++'],var='CXX') + """ + Detects the sun C++ compiler + """ + v = conf.env + cc = conf.find_program(['CC', 'c++'], var='CXX') try: - conf.cmd_and_log(cc+['-flags']) + conf.cmd_and_log(cc + ['-flags']) except Errors.WafError: - conf.fatal('%r is not a Sun compiler'%cc) - v.CXX_NAME='sun' + conf.fatal('%r is not a Sun compiler' % cc) + v.CXX_NAME = 'sun' conf.get_suncc_version(cc) + @conf def sxx_common_flags(conf): - v=conf.env - v.CXX_SRC_F=[] - v.CXX_TGT_F=['-c','-o',''] + """ + Flags required for executing the sun C++ compiler + """ + v = conf.env + + v.CXX_SRC_F = [] + v.CXX_TGT_F = ['-c', '-o', ''] + if not v.LINK_CXX: - v.LINK_CXX=v.CXX - v.CXXLNK_SRC_F=[] - v.CXXLNK_TGT_F=['-o',''] - v.CPPPATH_ST='-I%s' - v.DEFINES_ST='-D%s' - v.LIB_ST='-l%s' - v.LIBPATH_ST='-L%s' - v.STLIB_ST='-l%s' - v.STLIBPATH_ST='-L%s' - v.SONAME_ST='-Wl,-h,%s' - v.SHLIB_MARKER='-Bdynamic' - v.STLIB_MARKER='-Bstatic' - v.cxxprogram_PATTERN='%s' - v.CXXFLAGS_cxxshlib=['-xcode=pic32','-DPIC'] - v.LINKFLAGS_cxxshlib=['-G'] - v.cxxshlib_PATTERN='lib%s.so' - v.LINKFLAGS_cxxstlib=['-Bstatic'] - v.cxxstlib_PATTERN='lib%s.a' + v.LINK_CXX = v.CXX + + v.CXXLNK_SRC_F = [] + v.CXXLNK_TGT_F = ['-o', ''] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' + + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + + v.SONAME_ST = '-Wl,-h,%s' + v.SHLIB_MARKER = '-Bdynamic' + v.STLIB_MARKER = '-Bstatic' + + v.cxxprogram_PATTERN = '%s' + + v.CXXFLAGS_cxxshlib = ['-xcode=pic32', '-DPIC'] + v.LINKFLAGS_cxxshlib = ['-G'] + v.cxxshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cxxstlib = ['-Bstatic'] + v.cxxstlib_PATTERN = 'lib%s.a' + def configure(conf): conf.find_sxx() conf.find_ar() @@ -46,3 +64,4 @@ conf.cxx_load_tools() conf.cxx_add_flags() conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/tex.py lilv-0.24.6/waflib/Tools/tex.py --- lilv-0.24.4~dfsg0/waflib/Tools/tex.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/tex.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,327 +1,543 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) -import os,re -from waflib import Utils,Task,Errors,Logs,Node -from waflib.TaskGen import feature,before_method -re_bibunit=re.compile(r'\\(?Pputbib)\[(?P[^\[\]]*)\]',re.M) +""" +TeX/LaTeX/PDFLaTeX/XeLaTeX support + +Example:: + + def configure(conf): + conf.load('tex') + if not conf.env.LATEX: + conf.fatal('The program LaTex is required') + + def build(bld): + bld( + features = 'tex', + type = 'latex', # pdflatex or xelatex + source = 'document.ltx', # mandatory, the source + outs = 'ps', # 'pdf' or 'ps pdf' + deps = 'crossreferencing.lst', # to give dependencies directly + prompt = 1, # 0 for the batch mode + ) + +Notes: + +- To configure with a special program, use:: + + $ PDFLATEX=luatex waf configure + +- This tool does not use the target attribute of the task generator + (``bld(target=...)``); the target file name is built from the source + base name and the output type(s) +""" + +import os, re +from waflib import Utils, Task, Errors, Logs, Node +from waflib.TaskGen import feature, before_method + +re_bibunit = re.compile(r'\\(?Pputbib)\[(?P[^\[\]]*)\]',re.M) def bibunitscan(self): - node=self.inputs[0] - nodes=[] + """ + Parses TeX inputs and try to find the *bibunit* file dependencies + + :return: list of bibunit files + :rtype: list of :py:class:`waflib.Node.Node` + """ + node = self.inputs[0] + + nodes = [] if not node: return nodes - code=node.read() + + code = node.read() for match in re_bibunit.finditer(code): - path=match.group('file') + path = match.group('file') if path: - found=None - for k in('','.bib'): - Logs.debug('tex: trying %s%s',path,k) - fi=node.parent.find_resource(path+k) + found = None + for k in ('', '.bib'): + # add another loop for the tex include paths? + Logs.debug('tex: trying %s%s', path, k) + fi = node.parent.find_resource(path + k) if fi: - found=True + found = True nodes.append(fi) + # no break if not found: - Logs.debug('tex: could not find %s',path) - Logs.debug('tex: found the following bibunit files: %s',nodes) + Logs.debug('tex: could not find %s', path) + + Logs.debug('tex: found the following bibunit files: %s', nodes) return nodes -exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps','.sty'] -exts_tex=['.ltx','.tex'] -re_tex=re.compile(r'\\(?Pusepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P[^{}]*)}',re.M) -g_bibtex_re=re.compile('bibdata',re.M) -g_glossaries_re=re.compile('\\@newglossary',re.M) + +exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty'] +"""List of typical file extensions included in latex files""" + +exts_tex = ['.ltx', '.tex'] +"""List of typical file extensions that contain latex""" + +re_tex = re.compile(r'\\(?Pusepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P[^{}]*)}',re.M) +"""Regexp for expressions that may include latex files""" + +g_bibtex_re = re.compile('bibdata', re.M) +"""Regexp for bibtex files""" + +g_glossaries_re = re.compile('\\@newglossary', re.M) +"""Regexp for expressions that create glossaries""" + class tex(Task.Task): - bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False) - bibtex_fun.__doc__=""" + """ + Compiles a tex/latex file. + + .. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex + """ + + bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False) + bibtex_fun.__doc__ = """ Execute the program **bibtex** """ - makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False) - makeindex_fun.__doc__=""" + + makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False) + makeindex_fun.__doc__ = """ Execute the program **makeindex** """ - makeglossaries_fun,_=Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',shell=False) - makeglossaries_fun.__doc__=""" + + makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False) + makeglossaries_fun.__doc__ = """ Execute the program **makeglossaries** """ - def exec_command(self,cmd,**kw): + + def exec_command(self, cmd, **kw): + """ + Executes TeX commands without buffering (latex may prompt for inputs) + + :return: the return code + :rtype: int + """ if self.env.PROMPT_LATEX: - kw['stdout']=kw['stderr']=None - return super(tex,self).exec_command(cmd,**kw) - def scan_aux(self,node): - nodes=[node] - re_aux=re.compile(r'\\@input{(?P[^{}]*)}',re.M) + # capture the outputs in configuration tests + kw['stdout'] = kw['stderr'] = None + return super(tex, self).exec_command(cmd, **kw) + + def scan_aux(self, node): + """ + Recursive regex-based scanner that finds included auxiliary files. + """ + nodes = [node] + re_aux = re.compile(r'\\@input{(?P[^{}]*)}', re.M) + def parse_node(node): - code=node.read() + code = node.read() for match in re_aux.finditer(code): - path=match.group('file') - found=node.parent.find_or_declare(path) + path = match.group('file') + found = node.parent.find_or_declare(path) if found and found not in nodes: - Logs.debug('tex: found aux node %r',found) + Logs.debug('tex: found aux node %r', found) nodes.append(found) parse_node(found) parse_node(node) return nodes + def scan(self): - node=self.inputs[0] - nodes=[] - names=[] - seen=[] + """ + Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex` + + Depending on your needs you might want: + + * to change re_tex:: + + from waflib.Tools import tex + tex.re_tex = myregex + + * or to change the method scan from the latex tasks:: + + from waflib.Task import classes + classes['latex'].scan = myscanfunction + """ + node = self.inputs[0] + + nodes = [] + names = [] + seen = [] if not node: - return(nodes,names) + return (nodes, names) + def parse_node(node): if node in seen: return seen.append(node) - code=node.read() + code = node.read() for match in re_tex.finditer(code): - multibib=match.group('type') + + multibib = match.group('type') if multibib and multibib.startswith('bibliography'): - multibib=multibib[len('bibliography'):] + multibib = multibib[len('bibliography'):] if multibib.startswith('style'): continue else: - multibib=None + multibib = None + for path in match.group('file').split(','): if path: - add_name=True - found=None + add_name = True + found = None for k in exts_deps_tex: + + # issue 1067, scan in all texinputs folders for up in self.texinputs_nodes: - Logs.debug('tex: trying %s%s',path,k) - found=up.find_resource(path+k) + Logs.debug('tex: trying %s%s', path, k) + found = up.find_resource(path + k) if found: break + + for tsk in self.generator.tasks: if not found or found in tsk.outputs: break else: nodes.append(found) - add_name=False + add_name = False for ext in exts_tex: if found.name.endswith(ext): parse_node(found) break + + # multibib stuff if found and multibib and found.name.endswith('.bib'): try: self.multibibs.append(found) except AttributeError: - self.multibibs=[found] + self.multibibs = [found] + + # no break, people are crazy if add_name: names.append(path) parse_node(node) + for x in nodes: x.parent.get_bld().mkdir() - Logs.debug("tex: found the following : %s and names %s",nodes,names) - return(nodes,names) - def check_status(self,msg,retcode): - if retcode!=0: - raise Errors.WafError('%r command exit status %r'%(msg,retcode)) - def info(self,*k,**kw): + + Logs.debug("tex: found the following : %s and names %s", nodes, names) + return (nodes, names) + + def check_status(self, msg, retcode): + """ + Checks an exit status and raise an error with a particular message + + :param msg: message to display if the code is non-zero + :type msg: string + :param retcode: condition + :type retcode: boolean + """ + if retcode != 0: + raise Errors.WafError('%r command exit status %r' % (msg, retcode)) + + def info(self, *k, **kw): try: - info=self.generator.bld.conf.logger.info + info = self.generator.bld.conf.logger.info except AttributeError: - info=Logs.info - info(*k,**kw) + info = Logs.info + info(*k, **kw) + def bibfile(self): + """ + Parses *.aux* files to find bibfiles to process. + If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` + """ for aux_node in self.aux_nodes: try: - ct=aux_node.read() + ct = aux_node.read() except EnvironmentError: - Logs.error('Error reading %s: %r',aux_node.abspath()) + Logs.error('Error reading %s: %r', aux_node.abspath()) continue + if g_bibtex_re.findall(ct): self.info('calling bibtex') - self.env.env={} + + self.env.env = {} self.env.env.update(os.environ) - self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) - self.env.SRCFILE=aux_node.name[:-4] - self.check_status('error when calling bibtex',self.bibtex_fun()) - for node in getattr(self,'multibibs',[]): - self.env.env={} + self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) + self.env.SRCFILE = aux_node.name[:-4] + self.check_status('error when calling bibtex', self.bibtex_fun()) + + for node in getattr(self, 'multibibs', []): + self.env.env = {} self.env.env.update(os.environ) - self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) - self.env.SRCFILE=node.name[:-4] - self.check_status('error when calling bibtex',self.bibtex_fun()) + self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}) + self.env.SRCFILE = node.name[:-4] + self.check_status('error when calling bibtex', self.bibtex_fun()) + def bibunits(self): + """ + Parses *.aux* file to find bibunit files. If there are bibunit files, + runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`. + """ try: - bibunits=bibunitscan(self) + bibunits = bibunitscan(self) except OSError: Logs.error('error bibunitscan') else: if bibunits: - fn=['bu'+str(i)for i in range(1,len(bibunits)+1)] + fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)] if fn: self.info('calling bibtex on bibunits') + for f in fn: - self.env.env={'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()} - self.env.SRCFILE=f - self.check_status('error when calling bibtex',self.bibtex_fun()) + self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()} + self.env.SRCFILE = f + self.check_status('error when calling bibtex', self.bibtex_fun()) + def makeindex(self): - self.idx_node=self.inputs[0].change_ext('.idx') + """ + Searches the filesystem for *.idx* files to process. If present, + runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun` + """ + self.idx_node = self.inputs[0].change_ext('.idx') try: - idx_path=self.idx_node.abspath() + idx_path = self.idx_node.abspath() os.stat(idx_path) except OSError: - self.info('index file %s absent, not calling makeindex',idx_path) + self.info('index file %s absent, not calling makeindex', idx_path) else: self.info('calling makeindex') - self.env.SRCFILE=self.idx_node.name - self.env.env={} - self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun()) + + self.env.SRCFILE = self.idx_node.name + self.env.env = {} + self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun()) + def bibtopic(self): - p=self.inputs[0].parent.get_bld() - if os.path.exists(os.path.join(p.abspath(),'btaux.aux')): - self.aux_nodes+=p.ant_glob('*[0-9].aux') + """ + Lists additional .aux files from the bibtopic package + """ + p = self.inputs[0].parent.get_bld() + if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')): + self.aux_nodes += p.ant_glob('*[0-9].aux') + def makeglossaries(self): - src_file=self.inputs[0].abspath() - base_file=os.path.basename(src_file) - base,_=os.path.splitext(base_file) + """ + Lists additional glossaries from .aux files. If present, runs the makeglossaries program. + """ + src_file = self.inputs[0].abspath() + base_file = os.path.basename(src_file) + base, _ = os.path.splitext(base_file) for aux_node in self.aux_nodes: try: - ct=aux_node.read() + ct = aux_node.read() except EnvironmentError: - Logs.error('Error reading %s: %r',aux_node.abspath()) + Logs.error('Error reading %s: %r', aux_node.abspath()) continue + if g_glossaries_re.findall(ct): if not self.env.MAKEGLOSSARIES: raise Errors.WafError("The program 'makeglossaries' is missing!") Logs.warn('calling makeglossaries') - self.env.SRCFILE=base - self.check_status('error when calling makeglossaries %s'%base,self.makeglossaries_fun()) + self.env.SRCFILE = base + self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun()) return + def texinputs(self): - return os.pathsep.join([k.abspath()for k in self.texinputs_nodes])+os.pathsep + """ + Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables + + :rtype: string + """ + return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep + def run(self): - env=self.env + """ + Runs the whole TeX build process + + Multiple passes are required depending on the usage of cross-references, + bibliographies, glossaries, indexes and additional contents + The appropriate TeX compiler is called until the *.aux* files stop changing. + """ + env = self.env + if not env.PROMPT_LATEX: - env.append_value('LATEXFLAGS','-interaction=batchmode') - env.append_value('PDFLATEXFLAGS','-interaction=batchmode') - env.append_value('XELATEXFLAGS','-interaction=batchmode') - self.cwd=self.inputs[0].parent.get_bld() - self.info('first pass on %s',self.__class__.__name__) - cur_hash=self.hash_aux_nodes() + env.append_value('LATEXFLAGS', '-interaction=batchmode') + env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') + env.append_value('XELATEXFLAGS', '-interaction=batchmode') + + # important, set the cwd for everybody + self.cwd = self.inputs[0].parent.get_bld() + + self.info('first pass on %s', self.__class__.__name__) + + # Hash .aux files before even calling the LaTeX compiler + cur_hash = self.hash_aux_nodes() + self.call_latex() + + # Find the .aux files again since bibtex processing can require it self.hash_aux_nodes() + self.bibtopic() self.bibfile() self.bibunits() self.makeindex() self.makeglossaries() + for i in range(10): - prev_hash=cur_hash - cur_hash=self.hash_aux_nodes() + # There is no need to call latex again if the .aux hash value has not changed + prev_hash = cur_hash + cur_hash = self.hash_aux_nodes() if not cur_hash: Logs.error('No aux.h to process') - if cur_hash and cur_hash==prev_hash: + if cur_hash and cur_hash == prev_hash: break - self.info('calling %s',self.__class__.__name__) + + # run the command + self.info('calling %s', self.__class__.__name__) self.call_latex() + def hash_aux_nodes(self): + """ + Returns a hash of the .aux file contents + + :rtype: string or bytes + """ try: self.aux_nodes except AttributeError: try: - self.aux_nodes=self.scan_aux(self.inputs[0].change_ext('.aux')) + self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux')) except IOError: return None - return Utils.h_list([Utils.h_file(x.abspath())for x in self.aux_nodes]) + return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes]) + def call_latex(self): - self.env.env={} + """ + Runs the TeX compiler once + """ + self.env.env = {} self.env.env.update(os.environ) - self.env.env.update({'TEXINPUTS':self.texinputs()}) - self.env.SRCFILE=self.inputs[0].abspath() - self.check_status('error when calling latex',self.texfun()) + self.env.env.update({'TEXINPUTS': self.texinputs()}) + self.env.SRCFILE = self.inputs[0].abspath() + self.check_status('error when calling latex', self.texfun()) + class latex(tex): - texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False) + "Compiles LaTeX files" + texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False) + class pdflatex(tex): - texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False) + "Compiles PdfLaTeX files" + texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False) + class xelatex(tex): - texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False) + "XeLaTeX files" + texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False) + class dvips(Task.Task): - run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' - color='BLUE' - after=['latex','pdflatex','xelatex'] + "Converts dvi files to postscript" + run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' + color = 'BLUE' + after = ['latex', 'pdflatex', 'xelatex'] + class dvipdf(Task.Task): - run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' - color='BLUE' - after=['latex','pdflatex','xelatex'] + "Converts dvi files to pdf" + run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' + color = 'BLUE' + after = ['latex', 'pdflatex', 'xelatex'] + class pdf2ps(Task.Task): - run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' - color='BLUE' - after=['latex','pdflatex','xelatex'] + "Converts pdf files to postscript" + run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' + color = 'BLUE' + after = ['latex', 'pdflatex', 'xelatex'] + @feature('tex') @before_method('process_source') def apply_tex(self): - if not getattr(self,'type',None)in('latex','pdflatex','xelatex'): - self.type='pdflatex' - outs=Utils.to_list(getattr(self,'outs',[])) + """ + Creates :py:class:`waflib.Tools.tex.tex` objects, and + dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc). + """ + if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'): + self.type = 'pdflatex' + + outs = Utils.to_list(getattr(self, 'outs', [])) + + # prompt for incomplete files (else the batchmode is used) try: self.generator.bld.conf except AttributeError: - default_prompt=False + default_prompt = False else: - default_prompt=True - self.env.PROMPT_LATEX=getattr(self,'prompt',default_prompt) - deps_lst=[] - if getattr(self,'deps',None): - deps=self.to_list(self.deps) + default_prompt = True + self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt) + + deps_lst = [] + + if getattr(self, 'deps', None): + deps = self.to_list(self.deps) for dep in deps: - if isinstance(dep,str): - n=self.path.find_resource(dep) + if isinstance(dep, str): + n = self.path.find_resource(dep) if not n: - self.bld.fatal('Could not find %r for %r'%(dep,self)) + self.bld.fatal('Could not find %r for %r' % (dep, self)) if not n in deps_lst: deps_lst.append(n) - elif isinstance(dep,Node.Node): + elif isinstance(dep, Node.Node): deps_lst.append(dep) + for node in self.to_nodes(self.source): - if self.type=='latex': - task=self.create_task('latex',node,node.change_ext('.dvi')) - elif self.type=='pdflatex': - task=self.create_task('pdflatex',node,node.change_ext('.pdf')) - elif self.type=='xelatex': - task=self.create_task('xelatex',node,node.change_ext('.pdf')) - task.env=self.env + if self.type == 'latex': + task = self.create_task('latex', node, node.change_ext('.dvi')) + elif self.type == 'pdflatex': + task = self.create_task('pdflatex', node, node.change_ext('.pdf')) + elif self.type == 'xelatex': + task = self.create_task('xelatex', node, node.change_ext('.pdf')) + + task.env = self.env + + # add the manual dependencies if deps_lst: for n in deps_lst: if not n in task.dep_nodes: task.dep_nodes.append(n) - if hasattr(self,'texinputs_nodes'): - task.texinputs_nodes=self.texinputs_nodes + + # texinputs is a nasty beast + if hasattr(self, 'texinputs_nodes'): + task.texinputs_nodes = self.texinputs_nodes else: - task.texinputs_nodes=[node.parent,node.parent.get_bld(),self.path,self.path.get_bld()] - lst=os.environ.get('TEXINPUTS','') + task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()] + lst = os.environ.get('TEXINPUTS', '') if self.env.TEXINPUTS: - lst+=os.pathsep+self.env.TEXINPUTS + lst += os.pathsep + self.env.TEXINPUTS if lst: - lst=lst.split(os.pathsep) + lst = lst.split(os.pathsep) for x in lst: if x: if os.path.isabs(x): - p=self.bld.root.find_node(x) + p = self.bld.root.find_node(x) if p: task.texinputs_nodes.append(p) else: - Logs.error('Invalid TEXINPUTS folder %s',x) + Logs.error('Invalid TEXINPUTS folder %s', x) else: - Logs.error('Cannot resolve relative paths in TEXINPUTS %s',x) - if self.type=='latex': - if'ps'in outs: - tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps')) - tsk.env.env=dict(os.environ) - if'pdf'in outs: - tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf')) - tsk.env.env=dict(os.environ) - elif self.type=='pdflatex': - if'ps'in outs: - self.create_task('pdf2ps',task.outputs,node.change_ext('.ps')) - self.source=[] + Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x) + + if self.type == 'latex': + if 'ps' in outs: + tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps')) + tsk.env.env = dict(os.environ) + if 'pdf' in outs: + tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf')) + tsk.env.env = dict(os.environ) + elif self.type == 'pdflatex': + if 'ps' in outs: + self.create_task('pdf2ps', task.outputs, node.change_ext('.ps')) + self.source = [] + def configure(self): - v=self.env - for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split(): + """ + Find the programs tex, latex and others without raising errors. + """ + v = self.env + for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split(): try: - self.find_program(p,var=p.upper()) + self.find_program(p, var=p.upper()) except self.errors.ConfigurationError: pass - v.DVIPSFLAGS='-Ppdf' + v.DVIPSFLAGS = '-Ppdf' + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/vala.py lilv-0.24.6/waflib/Tools/vala.py --- lilv-0.24.4~dfsg0/waflib/Tools/vala.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/vala.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,111 +1,159 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Ali Sabil, 2007 +# Radosław Szkodziński, 2010 + +""" +At this point, vala is still unstable, so do not expect +this tool to be too stable either (apis, etc) +""" import re -from waflib import Build,Context,Errors,Logs,Node,Options,Task,Utils -from waflib.TaskGen import extension,taskgen_method +from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils +from waflib.TaskGen import extension, taskgen_method from waflib.Configure import conf + class valac(Task.Task): - vars=["VALAC","VALAC_VERSION","VALAFLAGS"] - ext_out=['.h'] + """ + Compiles vala files + """ + #run_str = "${VALAC} ${VALAFLAGS}" # ideally + #vars = ['VALAC_VERSION'] + vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"] + ext_out = ['.h'] + def run(self): - cmd=self.env.VALAC+self.env.VALAFLAGS - resources=getattr(self,'vala_exclude',[]) - cmd.extend([a.abspath()for a in self.inputs if a not in resources]) - ret=self.exec_command(cmd,cwd=self.vala_dir_node.abspath()) + cmd = self.env.VALAC + self.env.VALAFLAGS + resources = getattr(self, 'vala_exclude', []) + cmd.extend([a.abspath() for a in self.inputs if a not in resources]) + ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath()) + if ret: return ret + if self.generator.dump_deps_node: self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) + return ret + @taskgen_method def init_vala_task(self): - self.profile=getattr(self,'profile','gobject') - self.packages=packages=Utils.to_list(getattr(self,'packages',[])) - self.use=Utils.to_list(getattr(self,'use',[])) + """ + Initializes the vala task with the relevant data (acts as a constructor) + """ + self.profile = getattr(self, 'profile', 'gobject') + + self.packages = packages = Utils.to_list(getattr(self, 'packages', [])) + self.use = Utils.to_list(getattr(self, 'use', [])) if packages and not self.use: - self.use=packages[:] - if self.profile=='gobject': - if not'GOBJECT'in self.use: + self.use = packages[:] # copy + + if self.profile == 'gobject': + if not 'GOBJECT' in self.use: self.use.append('GOBJECT') + def addflags(flags): - self.env.append_value('VALAFLAGS',flags) + self.env.append_value('VALAFLAGS', flags) + if self.profile: - addflags('--profile=%s'%self.profile) - valatask=self.valatask - if hasattr(self,'vala_dir'): - if isinstance(self.vala_dir,str): - valatask.vala_dir_node=self.path.get_bld().make_node(self.vala_dir) + addflags('--profile=%s' % self.profile) + + valatask = self.valatask + + # output directory + if hasattr(self, 'vala_dir'): + if isinstance(self.vala_dir, str): + valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir) try: valatask.vala_dir_node.mkdir() except OSError: - raise self.bld.fatal('Cannot create the vala dir %r'%valatask.vala_dir_node) + raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node) else: - valatask.vala_dir_node=self.vala_dir + valatask.vala_dir_node = self.vala_dir else: - valatask.vala_dir_node=self.path.get_bld() - addflags('--directory=%s'%valatask.vala_dir_node.abspath()) - if hasattr(self,'thread'): - if self.profile=='gobject': - if not'GTHREAD'in self.use: + valatask.vala_dir_node = self.path.get_bld() + addflags('--directory=%s' % valatask.vala_dir_node.abspath()) + + if hasattr(self, 'thread'): + if self.profile == 'gobject': + if not 'GTHREAD' in self.use: self.use.append('GTHREAD') else: - Logs.warn('Profile %s means no threading support',self.profile) - self.thread=False + #Vala doesn't have threading support for dova nor posix + Logs.warn('Profile %s means no threading support', self.profile) + self.thread = False + if self.thread: addflags('--thread') - self.is_lib='cprogram'not in self.features + + self.is_lib = 'cprogram' not in self.features if self.is_lib: - addflags('--library=%s'%self.target) - h_node=valatask.vala_dir_node.find_or_declare('%s.h'%self.target) + addflags('--library=%s' % self.target) + + h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target) valatask.outputs.append(h_node) - addflags('--header=%s'%h_node.name) - valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi'%self.target)) - if getattr(self,'gir',None): - gir_node=valatask.vala_dir_node.find_or_declare('%s.gir'%self.gir) - addflags('--gir=%s'%gir_node.name) + addflags('--header=%s' % h_node.name) + + valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target)) + + if getattr(self, 'gir', None): + gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir) + addflags('--gir=%s' % gir_node.name) valatask.outputs.append(gir_node) - self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None)) + + self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None)) if self.vala_target_glib: - addflags('--target-glib=%s'%self.vala_target_glib) - addflags(['--define=%s'%x for x in Utils.to_list(getattr(self,'vala_defines',[]))]) - packages_private=Utils.to_list(getattr(self,'packages_private',[])) - addflags(['--pkg=%s'%x for x in packages_private]) + addflags('--target-glib=%s' % self.vala_target_glib) + + addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))]) + + packages_private = Utils.to_list(getattr(self, 'packages_private', [])) + addflags(['--pkg=%s' % x for x in packages_private]) + def _get_api_version(): - api_version='1.0' - if hasattr(Context.g_module,'API_VERSION'): - version=Context.g_module.API_VERSION.split(".") - if version[0]=="0": - api_version="0."+version[1] + api_version = '1.0' + if hasattr(Context.g_module, 'API_VERSION'): + version = Context.g_module.API_VERSION.split(".") + if version[0] == "0": + api_version = "0." + version[1] else: - api_version=version[0]+".0" + api_version = version[0] + ".0" return api_version - self.includes=Utils.to_list(getattr(self,'includes',[])) - valatask.install_path=getattr(self,'install_path','') - valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi') - valatask.pkg_name=getattr(self,'pkg_name',self.env.PACKAGE) - valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version())) - valatask.install_binding=getattr(self,'install_binding',True) - self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[])) - if hasattr(self,'use'): - local_packages=Utils.to_list(self.use)[:] - seen=[] - while len(local_packages)>0: - package=local_packages.pop() + + self.includes = Utils.to_list(getattr(self, 'includes', [])) + valatask.install_path = getattr(self, 'install_path', '') + + valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi') + valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE) + valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version())) + valatask.install_binding = getattr(self, 'install_binding', True) + + self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', [])) + #includes = [] + + if hasattr(self, 'use'): + local_packages = Utils.to_list(self.use)[:] # make sure to have a copy + seen = [] + while len(local_packages) > 0: + package = local_packages.pop() if package in seen: continue seen.append(package) + + # check if the package exists try: - package_obj=self.bld.get_tgen_by_name(package) + package_obj = self.bld.get_tgen_by_name(package) except Errors.WafError: continue + + # in practice the other task is already processed + # but this makes it explicit package_obj.post() - package_name=package_obj.target - task=getattr(package_obj,'valatask',None) + package_name = package_obj.target + task = getattr(package_obj, 'valatask', None) if task: for output in task.outputs: - if output.name==package_name+".vapi": + if output.name == package_name + ".vapi": valatask.set_run_after(task) if package_name not in packages: packages.append(package_name) @@ -113,106 +161,195 @@ vapi_dirs.append(output.parent) if output.parent not in self.includes: self.includes.append(output.parent) - if hasattr(package_obj,'use'): - lst=self.to_list(package_obj.use) + + if hasattr(package_obj, 'use'): + lst = self.to_list(package_obj.use) lst.reverse() - local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages - addflags(['--pkg=%s'%p for p in packages]) + local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages + + addflags(['--pkg=%s' % p for p in packages]) + for vapi_dir in vapi_dirs: - if isinstance(vapi_dir,Node.Node): - v_node=vapi_dir + if isinstance(vapi_dir, Node.Node): + v_node = vapi_dir else: - v_node=self.path.find_dir(vapi_dir) + v_node = self.path.find_dir(vapi_dir) if not v_node: - Logs.warn('Unable to locate Vala API directory: %r',vapi_dir) + Logs.warn('Unable to locate Vala API directory: %r', vapi_dir) else: - addflags('--vapidir=%s'%v_node.abspath()) - self.dump_deps_node=None + addflags('--vapidir=%s' % v_node.abspath()) + + self.dump_deps_node = None if self.is_lib and self.packages: - self.dump_deps_node=valatask.vala_dir_node.find_or_declare('%s.deps'%self.target) + self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target) valatask.outputs.append(self.dump_deps_node) + if self.is_lib and valatask.install_binding: - headers_list=[o for o in valatask.outputs if o.suffix()==".h"] + headers_list = [o for o in valatask.outputs if o.suffix() == ".h"] if headers_list: - self.install_vheader=self.add_install_files(install_to=valatask.header_path,install_from=headers_list) - vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))] + self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list) + + vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))] if vapi_list: - self.install_vapi=self.add_install_files(install_to=valatask.vapi_path,install_from=vapi_list) - gir_list=[o for o in valatask.outputs if o.suffix()=='.gir'] + self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list) + + gir_list = [o for o in valatask.outputs if o.suffix() == '.gir'] if gir_list: - self.install_gir=self.add_install_files(install_to=getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),install_from=gir_list) - if hasattr(self,'vala_resources'): - nodes=self.to_nodes(self.vala_resources) - valatask.vala_exclude=getattr(valatask,'vala_exclude',[])+nodes + self.install_gir = self.add_install_files( + install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list) + + if hasattr(self, 'vala_resources'): + nodes = self.to_nodes(self.vala_resources) + valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes valatask.inputs.extend(nodes) for x in nodes: - addflags(['--gresources',x.abspath()]) -@extension('.vala','.gs') -def vala_file(self,node): + addflags(['--gresources', x.abspath()]) + +@extension('.vala', '.gs') +def vala_file(self, node): + """ + Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node + to its inputs. The typical example is:: + + def build(bld): + bld.program( + packages = 'gtk+-2.0', + target = 'vala-gtk-example', + use = 'GTK GLIB', + source = 'vala-gtk-example.vala foo.vala', + vala_defines = ['DEBUG'] # adds --define= values to the command-line + + # the following arguments are for libraries + #gir = 'hello-1.0', + #gir_path = '/tmp', + #vapi_path = '/tmp', + #pkg_name = 'hello' + # disable installing of gir, vapi and header + #install_binding = False + + # profile = 'xyz' # adds --profile= to enable profiling + # thread = True, # adds --thread, except if profile is on or not on 'gobject' + # vala_target_glib = 'xyz' # adds --target-glib=, can be given through the command-line option --vala-target-glib= + ) + + + :param node: vala file + :type node: :py:class:`waflib.Node.Node` + """ + try: - valatask=self.valatask + valatask = self.valatask except AttributeError: - valatask=self.valatask=self.create_task('valac') + valatask = self.valatask = self.create_task('valac') self.init_vala_task() + valatask.inputs.append(node) - name=node.name[:node.name.rfind('.')]+'.c' - c_node=valatask.vala_dir_node.find_or_declare(name) + name = node.name[:node.name.rfind('.')] + '.c' + c_node = valatask.vala_dir_node.find_or_declare(name) valatask.outputs.append(c_node) self.source.append(c_node) + @extension('.vapi') -def vapi_file(self,node): +def vapi_file(self, node): try: - valatask=self.valatask + valatask = self.valatask except AttributeError: - valatask=self.valatask=self.create_task('valac') + valatask = self.valatask = self.create_task('valac') self.init_vala_task() valatask.inputs.append(node) + @conf -def find_valac(self,valac_name,min_version): - valac=self.find_program(valac_name,var='VALAC') +def find_valac(self, valac_name, min_version): + """ + Find the valac program, and execute it to store the version + number in *conf.env.VALAC_VERSION* + + :param valac_name: program name + :type valac_name: string or list of string + :param min_version: minimum version acceptable + :type min_version: tuple of int + """ + valac = self.find_program(valac_name, var='VALAC') try: - output=self.cmd_and_log(valac+['--version']) + output = self.cmd_and_log(valac + ['--version']) except Errors.WafError: - valac_version=None + valac_version = None else: - ver=re.search(r'\d+.\d+.\d+',output).group().split('.') - valac_version=tuple([int(x)for x in ver]) - self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version) - if valac and valac_version= %r"%(valac_name,valac_version,min_version)) - self.env.VALAC_VERSION=valac_version + ver = re.search(r'\d+.\d+.\d+', output).group().split('.') + valac_version = tuple([int(x) for x in ver]) + + self.msg('Checking for %s version >= %r' % (valac_name, min_version), + valac_version, valac_version and valac_version >= min_version) + if valac and valac_version < min_version: + self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version)) + + self.env.VALAC_VERSION = valac_version return valac + @conf -def check_vala(self,min_version=(0,8,0),branch=None): +def check_vala(self, min_version=(0,8,0), branch=None): + """ + Check if vala compiler from a given branch exists of at least a given + version. + + :param min_version: minimum version acceptable (0.8.0) + :type min_version: tuple + :param branch: first part of the version number, in case a snapshot is used (0, 8) + :type branch: tuple of int + """ if self.env.VALA_MINVER: - min_version=self.env.VALA_MINVER + min_version = self.env.VALA_MINVER if self.env.VALA_MINVER_BRANCH: - branch=self.env.VALA_MINVER_BRANCH + branch = self.env.VALA_MINVER_BRANCH if not branch: - branch=min_version[:2] + branch = min_version[:2] try: - find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version) + find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version) except self.errors.ConfigurationError: - find_valac(self,'valac',min_version) + find_valac(self, 'valac', min_version) + @conf def check_vala_deps(self): + """ + Load the gobject and gthread packages if they are missing. + """ if not self.env.HAVE_GOBJECT: - pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'} - if getattr(Options.options,'vala_target_glib',None): - pkg_args['atleast_version']=Options.options.vala_target_glib + pkg_args = {'package': 'gobject-2.0', + 'uselib_store': 'GOBJECT', + 'args': '--cflags --libs'} + if getattr(Options.options, 'vala_target_glib', None): + pkg_args['atleast_version'] = Options.options.vala_target_glib self.check_cfg(**pkg_args) + if not self.env.HAVE_GTHREAD: - pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'} - if getattr(Options.options,'vala_target_glib',None): - pkg_args['atleast_version']=Options.options.vala_target_glib + pkg_args = {'package': 'gthread-2.0', + 'uselib_store': 'GTHREAD', + 'args': '--cflags --libs'} + if getattr(Options.options, 'vala_target_glib', None): + pkg_args['atleast_version'] = Options.options.vala_target_glib self.check_cfg(**pkg_args) + def configure(self): + """ + Use the following to enforce minimum vala version:: + + def configure(conf): + conf.env.VALA_MINVER = (0, 10, 0) + conf.load('vala') + """ self.load('gnu_dirs') self.check_vala_deps() self.check_vala() self.add_os_flags('VALAFLAGS') - self.env.append_unique('VALAFLAGS',['-C']) + self.env.append_unique('VALAFLAGS', ['-C']) + def options(opt): + """ + Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option + """ opt.load('gnu_dirs') - valaopts=opt.add_option_group('Vala Compiler Options') - valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation') + valaopts = opt.add_option_group('Vala Compiler Options') + valaopts.add_option('--vala-target-glib', default=None, + dest='vala_target_glib', metavar='MAJOR.MINOR', + help='Target version of glib for Vala GObject code generation') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/waf_unit_test.py lilv-0.24.6/waflib/Tools/waf_unit_test.py --- lilv-0.24.4~dfsg0/waflib/Tools/waf_unit_test.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/waf_unit_test.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,13 +1,48 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Carlos Rafael Giani, 2006 +# Thomas Nagy, 2010-2018 (ita) -import os,shlex,sys -from waflib.TaskGen import feature,after_method,taskgen_method -from waflib import Utils,Task,Logs,Options +""" +Unit testing system for C/C++/D and interpreted languages providing test execution: + +* in parallel, by using ``waf -j`` +* partial (only the tests that have changed) or full (by using ``waf --alltests``) + +The tests are declared by adding the **test** feature to programs:: + + def options(opt): + opt.load('compiler_cxx waf_unit_test') + def configure(conf): + conf.load('compiler_cxx waf_unit_test') + def build(bld): + bld(features='cxx cxxprogram test', source='main.cpp', target='app') + # or + bld.program(features='test', source='main2.cpp', target='app2') + +When the build is executed, the program 'test' will be built and executed without arguments. +The success/failure is detected by looking at the return code. The status and the standard output/error +are stored on the build context. + +The results can be displayed by registering a callback function. Here is how to call +the predefined callback:: + + def build(bld): + bld(features='cxx cxxprogram test', source='main.c', target='app') + from waflib.Tools import waf_unit_test + bld.add_post_fun(waf_unit_test.summary) + +By passing --dump-test-scripts the build outputs corresponding python files +(with extension _run.py) that are useful for debugging purposes. +""" + +import os, shlex, sys +from waflib.TaskGen import feature, after_method, taskgen_method +from waflib import Utils, Task, Logs, Options from waflib.Tools import ccroot -testlock=Utils.threading.Lock() -SCRIPT_TEMPLATE="""#! %(python)s +testlock = Utils.threading.Lock() + +SCRIPT_TEMPLATE = """#! %(python)s import subprocess, sys cmd = %(cmd)r # if you want to debug with gdb: @@ -16,157 +51,246 @@ status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str)) sys.exit(status) """ + @taskgen_method -def handle_ut_cwd(self,key): - cwd=getattr(self,key,None) +def handle_ut_cwd(self, key): + """ + Task generator method, used internally to limit code duplication. + This method may disappear anytime. + """ + cwd = getattr(self, key, None) if cwd: - if isinstance(cwd,str): + if isinstance(cwd, str): + # we want a Node instance if os.path.isabs(cwd): - self.ut_cwd=self.bld.root.make_node(cwd) + self.ut_cwd = self.bld.root.make_node(cwd) else: - self.ut_cwd=self.path.make_node(cwd) + self.ut_cwd = self.path.make_node(cwd) + @feature('test_scripts') def make_interpreted_test(self): - for x in['test_scripts_source','test_scripts_template']: - if not hasattr(self,x): - Logs.warn('a test_scripts taskgen i missing %s'%x) + """Create interpreted unit tests.""" + for x in ['test_scripts_source', 'test_scripts_template']: + if not hasattr(self, x): + Logs.warn('a test_scripts taskgen i missing %s' % x) return - self.ut_run,lst=Task.compile_fun(self.test_scripts_template,shell=getattr(self,'test_scripts_shell',False)) - script_nodes=self.to_nodes(self.test_scripts_source) + + self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False)) + + script_nodes = self.to_nodes(self.test_scripts_source) for script_node in script_nodes: - tsk=self.create_task('utest',[script_node]) - tsk.vars=lst+tsk.vars - tsk.env['SCRIPT']=script_node.path_from(tsk.get_cwd()) + tsk = self.create_task('utest', [script_node]) + tsk.vars = lst + tsk.vars + tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd()) + self.handle_ut_cwd('test_scripts_cwd') - env=getattr(self,'test_scripts_env',None) + + env = getattr(self, 'test_scripts_env', None) if env: - self.ut_env=env + self.ut_env = env else: - self.ut_env=dict(os.environ) - paths=getattr(self,'test_scripts_paths',{}) - for(k,v)in paths.items(): - p=self.ut_env.get(k,'').split(os.pathsep) - if isinstance(v,str): - v=v.split(os.pathsep) - self.ut_env[k]=os.pathsep.join(p+v) + self.ut_env = dict(os.environ) + + paths = getattr(self, 'test_scripts_paths', {}) + for (k,v) in paths.items(): + p = self.ut_env.get(k, '').split(os.pathsep) + if isinstance(v, str): + v = v.split(os.pathsep) + self.ut_env[k] = os.pathsep.join(p + v) + @feature('test') -@after_method('apply_link','process_use') +@after_method('apply_link', 'process_use') def make_test(self): - if not getattr(self,'link_task',None): + """Create the unit test task. There can be only one unit test task by task generator.""" + if not getattr(self, 'link_task', None): return - tsk=self.create_task('utest',self.link_task.outputs) - if getattr(self,'ut_str',None): - self.ut_run,lst=Task.compile_fun(self.ut_str,shell=getattr(self,'ut_shell',False)) - tsk.vars=lst+tsk.vars + + tsk = self.create_task('utest', self.link_task.outputs) + if getattr(self, 'ut_str', None): + self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False)) + tsk.vars = lst + tsk.vars + self.handle_ut_cwd('ut_cwd') - if not hasattr(self,'ut_paths'): - paths=[] + + if not hasattr(self, 'ut_paths'): + paths = [] for x in self.tmp_use_sorted: try: - y=self.bld.get_tgen_by_name(x).link_task + y = self.bld.get_tgen_by_name(x).link_task except AttributeError: pass else: - if not isinstance(y,ccroot.stlink_task): + if not isinstance(y, ccroot.stlink_task): paths.append(y.outputs[0].parent.abspath()) - self.ut_paths=os.pathsep.join(paths)+os.pathsep - if not hasattr(self,'ut_env'): - self.ut_env=dct=dict(os.environ) + self.ut_paths = os.pathsep.join(paths) + os.pathsep + + if not hasattr(self, 'ut_env'): + self.ut_env = dct = dict(os.environ) def add_path(var): - dct[var]=self.ut_paths+dct.get(var,'') + dct[var] = self.ut_paths + dct.get(var,'') if Utils.is_win32: add_path('PATH') - elif Utils.unversioned_sys_platform()=='darwin': + elif Utils.unversioned_sys_platform() == 'darwin': add_path('DYLD_LIBRARY_PATH') add_path('LD_LIBRARY_PATH') else: add_path('LD_LIBRARY_PATH') - if not hasattr(self,'ut_cmd'): - self.ut_cmd=getattr(Options.options,'testcmd',False) + + if not hasattr(self, 'ut_cmd'): + self.ut_cmd = getattr(Options.options, 'testcmd', False) + @taskgen_method -def add_test_results(self,tup): - Logs.debug("ut: %r",tup) +def add_test_results(self, tup): + """Override and return tup[1] to interrupt the build immediately if a test does not run""" + Logs.debug("ut: %r", tup) try: self.utest_results.append(tup) except AttributeError: - self.utest_results=[tup] + self.utest_results = [tup] try: self.bld.utest_results.append(tup) except AttributeError: - self.bld.utest_results=[tup] + self.bld.utest_results = [tup] + @Task.deep_inputs class utest(Task.Task): - color='PINK' - after=['vnum','inst'] - vars=[] + """ + Execute a unit test + """ + color = 'PINK' + after = ['vnum', 'inst'] + vars = [] + def runnable_status(self): - if getattr(Options.options,'no_tests',False): + """ + Always execute the task if `waf --alltests` was used or no + tests if ``waf --notests`` was used + """ + if getattr(Options.options, 'no_tests', False): return Task.SKIP_ME - ret=super(utest,self).runnable_status() - if ret==Task.SKIP_ME: - if getattr(Options.options,'all_tests',False): + + ret = super(utest, self).runnable_status() + if ret == Task.SKIP_ME: + if getattr(Options.options, 'all_tests', False): return Task.RUN_ME return ret + def get_test_env(self): + """ + In general, tests may require any library built anywhere in the project. + Override this method if fewer paths are needed + """ return self.generator.ut_env + def post_run(self): - super(utest,self).post_run() - if getattr(Options.options,'clear_failed_tests',False)and self.waf_unit_test_results[1]: - self.generator.bld.task_sigs[self.uid()]=None + super(utest, self).post_run() + if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]: + self.generator.bld.task_sigs[self.uid()] = None + def run(self): - if hasattr(self.generator,'ut_run'): + """ + Execute the test. The execution is always successful, and the results + are stored on ``self.generator.bld.utest_results`` for postprocessing. + + Override ``add_test_results`` to interrupt the build + """ + if hasattr(self.generator, 'ut_run'): return self.generator.ut_run(self) - self.ut_exec=getattr(self.generator,'ut_exec',[self.inputs[0].abspath()]) - ut_cmd=getattr(self.generator,'ut_cmd',False) + + self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()]) + ut_cmd = getattr(self.generator, 'ut_cmd', False) if ut_cmd: - self.ut_exec=shlex.split(ut_cmd%' '.join(self.ut_exec)) + self.ut_exec = shlex.split(ut_cmd % ' '.join(self.ut_exec)) + return self.exec_command(self.ut_exec) - def exec_command(self,cmd,**kw): - Logs.debug('runner: %r',cmd) - if getattr(Options.options,'dump_test_scripts',False): - script_code=SCRIPT_TEMPLATE%{'python':sys.executable,'env':self.get_test_env(),'cwd':self.get_cwd().abspath(),'cmd':cmd} - script_file=self.inputs[0].abspath()+'_run.py' - Utils.writef(script_file,script_code) - os.chmod(script_file,Utils.O755) - if Logs.verbose>1: - Logs.info('Test debug file written as %r'%script_file) - proc=Utils.subprocess.Popen(cmd,cwd=self.get_cwd().abspath(),env=self.get_test_env(),stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,shell=isinstance(cmd,str)) - (stdout,stderr)=proc.communicate() - self.waf_unit_test_results=tup=(self.inputs[0].abspath(),proc.returncode,stdout,stderr) + + def exec_command(self, cmd, **kw): + self.generator.bld.log_command(cmd, kw) + if getattr(Options.options, 'dump_test_scripts', False): + script_code = SCRIPT_TEMPLATE % { + 'python': sys.executable, + 'env': self.get_test_env(), + 'cwd': self.get_cwd().abspath(), + 'cmd': cmd + } + script_file = self.inputs[0].abspath() + '_run.py' + Utils.writef(script_file, script_code, encoding='utf-8') + os.chmod(script_file, Utils.O755) + if Logs.verbose > 1: + Logs.info('Test debug file written as %r' % script_file) + + proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(), + stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str)) + (stdout, stderr) = proc.communicate() + self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr) testlock.acquire() try: return self.generator.add_test_results(tup) finally: testlock.release() + def get_cwd(self): - return getattr(self.generator,'ut_cwd',self.inputs[0].parent) + return getattr(self.generator, 'ut_cwd', self.inputs[0].parent) + def summary(bld): - lst=getattr(bld,'utest_results',[]) + """ + Display an execution summary:: + + def build(bld): + bld(features='cxx cxxprogram test', source='main.c', target='app') + from waflib.Tools import waf_unit_test + bld.add_post_fun(waf_unit_test.summary) + """ + lst = getattr(bld, 'utest_results', []) if lst: - Logs.pprint('CYAN','execution summary') - total=len(lst) - tfail=len([x for x in lst if x[1]]) - Logs.pprint('GREEN',' tests that pass %d/%d'%(total-tfail,total)) - for(f,code,out,err)in lst: + Logs.pprint('CYAN', 'execution summary') + + total = len(lst) + tfail = len([x for x in lst if x[1]]) + + Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total)) + for (f, code, out, err) in lst: if not code: - Logs.pprint('GREEN',' %s'%f) - Logs.pprint('GREEN'if tfail==0 else'RED',' tests that fail %d/%d'%(tfail,total)) - for(f,code,out,err)in lst: + Logs.pprint('GREEN', ' %s' % f) + + Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total)) + for (f, code, out, err) in lst: if code: - Logs.pprint('RED',' %s'%f) + Logs.pprint('RED', ' %s' % f) + def set_exit_code(bld): - lst=getattr(bld,'utest_results',[]) - for(f,code,out,err)in lst: + """ + If any of the tests fail waf will exit with that exit code. + This is useful if you have an automated build system which need + to report on errors from the tests. + You may use it like this: + + def build(bld): + bld(features='cxx cxxprogram test', source='main.c', target='app') + from waflib.Tools import waf_unit_test + bld.add_post_fun(waf_unit_test.set_exit_code) + """ + lst = getattr(bld, 'utest_results', []) + for (f, code, out, err) in lst: if code: - msg=[] + msg = [] if out: - msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8'))) + msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8'))) if err: - msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8'))) + msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8'))) bld.fatal(os.linesep.join(msg)) + + def options(opt): - opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests') - opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests') - opt.add_option('--clear-failed',action='store_true',default=False,help='Force failed unit tests to run again next time',dest='clear_failed_tests') - opt.add_option('--testcmd',action='store',default=False,dest='testcmd',help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind') - opt.add_option('--dump-test-scripts',action='store_true',default=False,help='Create python scripts to help debug tests',dest='dump_test_scripts') + """ + Provide the ``--alltests``, ``--notests`` and ``--testcmd`` command-line options. + """ + opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests') + opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests') + opt.add_option('--clear-failed', action='store_true', default=False, + help='Force failed unit tests to run again next time', dest='clear_failed_tests') + opt.add_option('--testcmd', action='store', default=False, dest='testcmd', + help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind') + opt.add_option('--dump-test-scripts', action='store_true', default=False, + help='Create python scripts to help debug tests', dest='dump_test_scripts') + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/winres.py lilv-0.24.6/waflib/Tools/winres.py --- lilv-0.24.4~dfsg0/waflib/Tools/winres.py 2018-06-22 09:25:51.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/winres.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,52 +1,78 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Brant Young, 2007 + +"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}" import re from waflib import Task from waflib.TaskGen import extension from waflib.Tools import c_preproc + @extension('.rc') -def rc_file(self,node): - obj_ext='.rc.o' - if self.env.WINRC_TGT_F=='/fo': - obj_ext='.res' - rctask=self.create_task('winrc',node,node.change_ext(obj_ext)) +def rc_file(self, node): + """ + Binds the .rc extension to a winrc task + """ + obj_ext = '.rc.o' + if self.env.WINRC_TGT_F == '/fo': + obj_ext = '.res' + rctask = self.create_task('winrc', node, node.change_ext(obj_ext)) try: self.compiled_tasks.append(rctask) except AttributeError: - self.compiled_tasks=[rctask] -re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE) + self.compiled_tasks = [rctask] + +re_lines = re.compile( + r'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\ + r'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)', + re.IGNORECASE | re.MULTILINE) + class rc_parser(c_preproc.c_parser): - def filter_comments(self,node): - code=node.read() + """ + Calculates dependencies in .rc files + """ + def filter_comments(self, node): + """ + Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments` + """ + code = node.read() if c_preproc.use_trigraphs: - for(a,b)in c_preproc.trig_def: - code=code.split(a).join(b) - code=c_preproc.re_nl.sub('',code) - code=c_preproc.re_cpp.sub(c_preproc.repl,code) - ret=[] - for m in re.finditer(re_lines,code): + for (a, b) in c_preproc.trig_def: + code = code.split(a).join(b) + code = c_preproc.re_nl.sub('', code) + code = c_preproc.re_cpp.sub(c_preproc.repl, code) + ret = [] + for m in re.finditer(re_lines, code): if m.group(2): - ret.append((m.group(2),m.group(3))) + ret.append((m.group(2), m.group(3))) else: - ret.append(('include',m.group(5))) + ret.append(('include', m.group(5))) return ret + class winrc(Task.Task): - run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' - color='BLUE' + """ + Compiles resource files + """ + run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' + color = 'BLUE' def scan(self): - tmp=rc_parser(self.generator.includes_nodes) - tmp.start(self.inputs[0],self.env) - return(tmp.nodes,tmp.names) + tmp = rc_parser(self.generator.includes_nodes) + tmp.start(self.inputs[0], self.env) + return (tmp.nodes, tmp.names) + def configure(conf): - v=conf.env + """ + Detects the programs RC or windres, depending on the C/C++ compiler in use + """ + v = conf.env if not v.WINRC: - if v.CC_NAME=='msvc': - conf.find_program('RC',var='WINRC',path_list=v.PATH) - v.WINRC_TGT_F='/fo' - v.WINRC_SRC_F='' + if v.CC_NAME == 'msvc': + conf.find_program('RC', var='WINRC', path_list=v.PATH) + v.WINRC_TGT_F = '/fo' + v.WINRC_SRC_F = '' else: - conf.find_program('windres',var='WINRC',path_list=v.PATH) - v.WINRC_TGT_F='-o' - v.WINRC_SRC_F='-i' + conf.find_program('windres', var='WINRC', path_list=v.PATH) + v.WINRC_TGT_F = '-o' + v.WINRC_SRC_F = '-i' + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/xlc.py lilv-0.24.6/waflib/Tools/xlc.py --- lilv-0.24.4~dfsg0/waflib/Tools/xlc.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/xlc.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,40 +1,60 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) +# Ralf Habacker, 2006 (rh) +# Yinon Ehrlich, 2009 +# Michael Kuhn, 2009 -from waflib.Tools import ccroot,ar +from waflib.Tools import ccroot, ar from waflib.Configure import conf + @conf def find_xlc(conf): - cc=conf.find_program(['xlc_r','xlc'],var='CC') + """ + Detects the Aix C compiler + """ + cc = conf.find_program(['xlc_r', 'xlc'], var='CC') conf.get_xlc_version(cc) - conf.env.CC_NAME='xlc' + conf.env.CC_NAME = 'xlc' + @conf def xlc_common_flags(conf): - v=conf.env - v.CC_SRC_F=[] - v.CC_TGT_F=['-c','-o'] + """ + Flags required for executing the Aix C compiler + """ + v = conf.env + + v.CC_SRC_F = [] + v.CC_TGT_F = ['-c', '-o'] + if not v.LINK_CC: - v.LINK_CC=v.CC - v.CCLNK_SRC_F=[] - v.CCLNK_TGT_F=['-o'] - v.CPPPATH_ST='-I%s' - v.DEFINES_ST='-D%s' - v.LIB_ST='-l%s' - v.LIBPATH_ST='-L%s' - v.STLIB_ST='-l%s' - v.STLIBPATH_ST='-L%s' - v.RPATH_ST='-Wl,-rpath,%s' - v.SONAME_ST=[] - v.SHLIB_MARKER=[] - v.STLIB_MARKER=[] - v.LINKFLAGS_cprogram=['-Wl,-brtl'] - v.cprogram_PATTERN='%s' - v.CFLAGS_cshlib=['-fPIC'] - v.LINKFLAGS_cshlib=['-G','-Wl,-brtl,-bexpfull'] - v.cshlib_PATTERN='lib%s.so' - v.LINKFLAGS_cstlib=[] - v.cstlib_PATTERN='lib%s.a' + v.LINK_CC = v.CC + + v.CCLNK_SRC_F = [] + v.CCLNK_TGT_F = ['-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' + + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + v.RPATH_ST = '-Wl,-rpath,%s' + + v.SONAME_ST = [] + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + + v.LINKFLAGS_cprogram = ['-Wl,-brtl'] + v.cprogram_PATTERN = '%s' + + v.CFLAGS_cshlib = ['-fPIC'] + v.LINKFLAGS_cshlib = ['-G', '-Wl,-brtl,-bexpfull'] + v.cshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cstlib = [] + v.cstlib_PATTERN = 'lib%s.a' + def configure(conf): conf.find_xlc() conf.find_ar() @@ -42,3 +62,4 @@ conf.cc_load_tools() conf.cc_add_flags() conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Tools/xlcxx.py lilv-0.24.6/waflib/Tools/xlcxx.py --- lilv-0.24.4~dfsg0/waflib/Tools/xlcxx.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Tools/xlcxx.py 2019-06-06 20:19:08.000000000 +0000 @@ -1,40 +1,60 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2006-2018 (ita) +# Ralf Habacker, 2006 (rh) +# Yinon Ehrlich, 2009 +# Michael Kuhn, 2009 -from waflib.Tools import ccroot,ar +from waflib.Tools import ccroot, ar from waflib.Configure import conf + @conf def find_xlcxx(conf): - cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX') + """ + Detects the Aix C++ compiler + """ + cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX') conf.get_xlc_version(cxx) - conf.env.CXX_NAME='xlc++' + conf.env.CXX_NAME = 'xlc++' + @conf def xlcxx_common_flags(conf): - v=conf.env - v.CXX_SRC_F=[] - v.CXX_TGT_F=['-c','-o'] + """ + Flags required for executing the Aix C++ compiler + """ + v = conf.env + + v.CXX_SRC_F = [] + v.CXX_TGT_F = ['-c', '-o'] + if not v.LINK_CXX: - v.LINK_CXX=v.CXX - v.CXXLNK_SRC_F=[] - v.CXXLNK_TGT_F=['-o'] - v.CPPPATH_ST='-I%s' - v.DEFINES_ST='-D%s' - v.LIB_ST='-l%s' - v.LIBPATH_ST='-L%s' - v.STLIB_ST='-l%s' - v.STLIBPATH_ST='-L%s' - v.RPATH_ST='-Wl,-rpath,%s' - v.SONAME_ST=[] - v.SHLIB_MARKER=[] - v.STLIB_MARKER=[] - v.LINKFLAGS_cxxprogram=['-Wl,-brtl'] - v.cxxprogram_PATTERN='%s' - v.CXXFLAGS_cxxshlib=['-fPIC'] - v.LINKFLAGS_cxxshlib=['-G','-Wl,-brtl,-bexpfull'] - v.cxxshlib_PATTERN='lib%s.so' - v.LINKFLAGS_cxxstlib=[] - v.cxxstlib_PATTERN='lib%s.a' + v.LINK_CXX = v.CXX + + v.CXXLNK_SRC_F = [] + v.CXXLNK_TGT_F = ['-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' + + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + v.RPATH_ST = '-Wl,-rpath,%s' + + v.SONAME_ST = [] + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + + v.LINKFLAGS_cxxprogram= ['-Wl,-brtl'] + v.cxxprogram_PATTERN = '%s' + + v.CXXFLAGS_cxxshlib = ['-fPIC'] + v.LINKFLAGS_cxxshlib = ['-G', '-Wl,-brtl,-bexpfull'] + v.cxxshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cxxstlib = [] + v.cxxstlib_PATTERN = 'lib%s.a' + def configure(conf): conf.find_xlcxx() conf.find_ar() @@ -42,3 +62,4 @@ conf.cxx_load_tools() conf.cxx_add_flags() conf.link_add_flags() + diff -Nru lilv-0.24.4~dfsg0/waflib/Utils.py lilv-0.24.6/waflib/Utils.py --- lilv-0.24.4~dfsg0/waflib/Utils.py 2018-06-27 05:22:05.000000000 +0000 +++ lilv-0.24.6/waflib/Utils.py 2019-10-19 17:59:11.000000000 +0000 @@ -1,73 +1,116 @@ -#! /usr/bin/env python +#!/usr/bin/env python # encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file +# Thomas Nagy, 2005-2018 (ita) + +""" +Utilities and platform-specific fixes + +The portability fixes try to provide a consistent behavior of the Waf API +through Python versions 2.5 to 3.X and across different platforms (win32, linux, etc) +""" from __future__ import with_statement -import atexit,os,sys,errno,inspect,re,datetime,platform,base64,signal,functools,time + +import atexit, os, sys, errno, inspect, re, datetime, platform, base64, signal, functools, time + try: import cPickle except ImportError: import pickle as cPickle -if os.name=='posix'and sys.version_info[0]<3: + +# leave this +if os.name == 'posix' and sys.version_info[0] < 3: try: import subprocess32 as subprocess except ImportError: import subprocess else: import subprocess + try: - TimeoutExpired=subprocess.TimeoutExpired + TimeoutExpired = subprocess.TimeoutExpired except AttributeError: class TimeoutExpired(Exception): pass -from collections import deque,defaultdict + +from collections import deque, defaultdict + try: import _winreg as winreg except ImportError: try: import winreg except ImportError: - winreg=None + winreg = None + from waflib import Errors + try: from hashlib import md5 except ImportError: try: - from md5 import md5 + from hashlib import sha1 as md5 except ImportError: + # never fail to enable potential fixes from another module pass +else: + try: + md5().digest() + except ValueError: + # Fips? #2213 + from hashlib import sha1 as md5 + try: import threading except ImportError: - if not'JOBS'in os.environ: - os.environ['JOBS']='1' + if not 'JOBS' in os.environ: + # no threading :-( + os.environ['JOBS'] = '1' + class threading(object): + """ + A fake threading class for platforms lacking the threading module. + Use ``waf -j1`` on those platforms + """ pass class Lock(object): + """Fake Lock class""" def acquire(self): pass def release(self): pass - threading.Lock=threading.Thread=Lock -SIG_NIL='SIG_NIL_SIG_NIL_'.encode() -O644=420 -O755=493 -rot_chr=['\\','|','/','-'] -rot_idx=0 + threading.Lock = threading.Thread = Lock + +SIG_NIL = 'SIG_NIL_SIG_NIL_'.encode() +"""Arbitrary null value for hashes. Modify this value according to the hash function in use""" + +O644 = 420 +"""Constant representing the permissions for regular files (0644 raises a syntax error on python 3)""" + +O755 = 493 +"""Constant representing the permissions for executable files (0755 raises a syntax error on python 3)""" + +rot_chr = ['\\', '|', '/', '-'] +"List of characters to use when displaying the throbber (progress bar)" + +rot_idx = 0 +"Index of the current throbber character (progress bar)" + class ordered_iter_dict(dict): - def __init__(self,*k,**kw): - self.lst=deque() - dict.__init__(self,*k,**kw) + """Ordered dictionary that provides iteration from the most recently inserted keys first""" + def __init__(self, *k, **kw): + self.lst = deque() + dict.__init__(self, *k, **kw) def clear(self): dict.clear(self) - self.lst=deque() - def __setitem__(self,key,value): + self.lst = deque() + def __setitem__(self, key, value): if key in dict.keys(self): self.lst.remove(key) - dict.__setitem__(self,key,value) + dict.__setitem__(self, key, value) self.lst.append(key) - def __delitem__(self,key): - dict.__delitem__(self,key) + def __delitem__(self, key): + dict.__delitem__(self, key) try: self.lst.remove(key) except ValueError: @@ -76,201 +119,327 @@ return reversed(self.lst) def keys(self): return reversed(self.lst) + class lru_node(object): - __slots__=('next','prev','key','val') + """ + Used by :py:class:`waflib.Utils.lru_cache` + """ + __slots__ = ('next', 'prev', 'key', 'val') def __init__(self): - self.next=self - self.prev=self - self.key=None - self.val=None + self.next = self + self.prev = self + self.key = None + self.val = None + class lru_cache(object): - __slots__=('maxlen','table','head') - def __init__(self,maxlen=100): - self.maxlen=maxlen - self.table={} - self.head=lru_node() - self.head.next=self.head - self.head.prev=self.head - def __getitem__(self,key): - node=self.table[key] + """ + A simple least-recently used cache with lazy allocation + """ + __slots__ = ('maxlen', 'table', 'head') + def __init__(self, maxlen=100): + self.maxlen = maxlen + """ + Maximum amount of elements in the cache + """ + self.table = {} + """ + Mapping key-value + """ + self.head = lru_node() + self.head.next = self.head + self.head.prev = self.head + + def __getitem__(self, key): + node = self.table[key] + # assert(key==node.key) if node is self.head: return node.val - node.prev.next=node.next - node.next.prev=node.prev - node.next=self.head.next - node.prev=self.head - self.head=node.next.prev=node.prev.next=node + + # detach the node found + node.prev.next = node.next + node.next.prev = node.prev + + # replace the head + node.next = self.head.next + node.prev = self.head + self.head = node.next.prev = node.prev.next = node + return node.val - def __setitem__(self,key,val): + + def __setitem__(self, key, val): if key in self.table: - node=self.table[key] - node.val=val + # update the value for an existing key + node = self.table[key] + node.val = val self.__getitem__(key) else: - if len(self.table)0x3000000 and not'b'in m: - m+='b' - with open(fname,m)as f: - txt=f.read() + + next = __next__ + +is_win32 = os.sep == '\\' or sys.platform == 'win32' or os.name == 'nt' # msys2 +""" +Whether this system is a Windows series +""" + +def readf(fname, m='r', encoding='latin-1'): + """ + Reads an entire file into a string. See also :py:meth:`waflib.Node.Node.readf`:: + + def build(ctx): + from waflib import Utils + txt = Utils.readf(self.path.find_node('wscript').abspath()) + txt = ctx.path.find_node('wscript').read() + + :type fname: string + :param fname: Path to file + :type m: string + :param m: Open mode + :type encoding: string + :param encoding: encoding value, only used for python 3 + :rtype: string + :return: Content of the file + """ + + if sys.hexversion > 0x3000000 and not 'b' in m: + m += 'b' + with open(fname, m) as f: + txt = f.read() if encoding: - txt=txt.decode(encoding) + txt = txt.decode(encoding) else: - txt=txt.decode() + txt = txt.decode() else: - with open(fname,m)as f: - txt=f.read() + with open(fname, m) as f: + txt = f.read() return txt -def writef(fname,data,m='w',encoding='latin-1'): - if sys.hexversion>0x3000000 and not'b'in m: - data=data.encode(encoding) - m+='b' - with open(fname,m)as f: + +def writef(fname, data, m='w', encoding='latin-1'): + """ + Writes an entire file from a string. + See also :py:meth:`waflib.Node.Node.writef`:: + + def build(ctx): + from waflib import Utils + txt = Utils.writef(self.path.make_node('i_like_kittens').abspath(), 'some data') + self.path.make_node('i_like_kittens').write('some data') + + :type fname: string + :param fname: Path to file + :type data: string + :param data: The contents to write to the file + :type m: string + :param m: Open mode + :type encoding: string + :param encoding: encoding value, only used for python 3 + """ + if sys.hexversion > 0x3000000 and not 'b' in m: + data = data.encode(encoding) + m += 'b' + with open(fname, m) as f: f.write(data) + def h_file(fname): - m=md5() - with open(fname,'rb')as f: + """ + Computes a hash value for a file by using md5. Use the md5_tstamp + extension to get faster build hashes if necessary. + + :type fname: string + :param fname: path to the file to hash + :return: hash of the file contents + :rtype: string or bytes + """ + m = md5() + with open(fname, 'rb') as f: while fname: - fname=f.read(200000) + fname = f.read(200000) m.update(fname) return m.digest() -def readf_win32(f,m='r',encoding='latin-1'): - flags=os.O_NOINHERIT|os.O_RDONLY - if'b'in m: - flags|=os.O_BINARY - if'+'in m: - flags|=os.O_RDWR + +def readf_win32(f, m='r', encoding='latin-1'): + flags = os.O_NOINHERIT | os.O_RDONLY + if 'b' in m: + flags |= os.O_BINARY + if '+' in m: + flags |= os.O_RDWR try: - fd=os.open(f,flags) + fd = os.open(f, flags) except OSError: - raise IOError('Cannot read from %r'%f) - if sys.hexversion>0x3000000 and not'b'in m: - m+='b' - with os.fdopen(fd,m)as f: - txt=f.read() + raise IOError('Cannot read from %r' % f) + + if sys.hexversion > 0x3000000 and not 'b' in m: + m += 'b' + with os.fdopen(fd, m) as f: + txt = f.read() if encoding: - txt=txt.decode(encoding) + txt = txt.decode(encoding) else: - txt=txt.decode() + txt = txt.decode() else: - with os.fdopen(fd,m)as f: - txt=f.read() + with os.fdopen(fd, m) as f: + txt = f.read() return txt -def writef_win32(f,data,m='w',encoding='latin-1'): - if sys.hexversion>0x3000000 and not'b'in m: - data=data.encode(encoding) - m+='b' - flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT - if'b'in m: - flags|=os.O_BINARY - if'+'in m: - flags|=os.O_RDWR + +def writef_win32(f, data, m='w', encoding='latin-1'): + if sys.hexversion > 0x3000000 and not 'b' in m: + data = data.encode(encoding) + m += 'b' + flags = os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT + if 'b' in m: + flags |= os.O_BINARY + if '+' in m: + flags |= os.O_RDWR try: - fd=os.open(f,flags) + fd = os.open(f, flags) except OSError: - raise OSError('Cannot write to %r'%f) - with os.fdopen(fd,m)as f: + raise OSError('Cannot write to %r' % f) + with os.fdopen(fd, m) as f: f.write(data) + def h_file_win32(fname): try: - fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT) + fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT) except OSError: - raise OSError('Cannot read from %r'%fname) - m=md5() - with os.fdopen(fd,'rb')as f: + raise OSError('Cannot read from %r' % fname) + m = md5() + with os.fdopen(fd, 'rb') as f: while fname: - fname=f.read(200000) + fname = f.read(200000) m.update(fname) return m.digest() -readf_unix=readf -writef_unix=writef -h_file_unix=h_file -if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000: - readf=readf_win32 - writef=writef_win32 - h_file=h_file_win32 + +# always save these +readf_unix = readf +writef_unix = writef +h_file_unix = h_file +if hasattr(os, 'O_NOINHERIT') and sys.hexversion < 0x3040000: + # replace the default functions + readf = readf_win32 + writef = writef_win32 + h_file = h_file_win32 + try: - x=''.encode('hex') + x = ''.encode('hex') except LookupError: import binascii def to_hex(s): - ret=binascii.hexlify(s) - if not isinstance(ret,str): - ret=ret.decode('utf-8') + ret = binascii.hexlify(s) + if not isinstance(ret, str): + ret = ret.decode('utf-8') return ret else: def to_hex(s): return s.encode('hex') -to_hex.__doc__=""" + +to_hex.__doc__ = """ Return the hexadecimal representation of a string :param s: string to convert :type s: string """ + def listdir_win32(s): + """ + Lists the contents of a folder in a portable manner. + On Win32, returns the list of drive letters: ['C:', 'X:', 'Z:'] when an empty string is given. + + :type s: string + :param s: a string, which can be empty on Windows + """ if not s: try: import ctypes except ImportError: - return[x+':\\'for x in'ABCDEFGHIJKLMNOPQRSTUVWXYZ'] + # there is nothing much we can do + return [x + ':\\' for x in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'] else: - dlen=4 - maxdrives=26 - buf=ctypes.create_string_buffer(maxdrives*dlen) - ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf)) - return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))] - if len(s)==2 and s[1]==":": - s+=os.sep + dlen = 4 # length of "?:\\x00" + maxdrives = 26 + buf = ctypes.create_string_buffer(maxdrives * dlen) + ndrives = ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen, ctypes.byref(buf)) + return [ str(buf.raw[4*i:4*i+2].decode('ascii')) for i in range(int(ndrives/dlen)) ] + + if len(s) == 2 and s[1] == ":": + s += os.sep + if not os.path.isdir(s): - e=OSError('%s is not a directory'%s) - e.errno=errno.ENOENT + e = OSError('%s is not a directory' % s) + e.errno = errno.ENOENT raise e return os.listdir(s) -listdir=os.listdir + +listdir = os.listdir if is_win32: - listdir=listdir_win32 + listdir = listdir_win32 + def num2ver(ver): - if isinstance(ver,str): - ver=tuple(ver.split('.')) - if isinstance(ver,tuple): - ret=0 + """ + Converts a string, tuple or version number into an integer. The number is supposed to have at most 4 digits:: + + from waflib.Utils import num2ver + num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0)) + + :type ver: string or tuple of numbers + :param ver: a version number + """ + if isinstance(ver, str): + ver = tuple(ver.split('.')) + if isinstance(ver, tuple): + ret = 0 for i in range(4): - if i ls -l 'arg space' + """ + if isinstance(cmd, str): return cmd - return' '.join(repr(x)if re_sh.search(x)else x for x in cmd) + return ' '.join(repr(x) if re_sh.search(x) else x for x in cmd) + def h_list(lst): + """ + Hashes lists of ordered data. + + Using hash(tup) for tuples would be much more efficient, + but Python now enforces hash randomization + + :param lst: list to hash + :type lst: list of strings + :return: hash of the list + """ return md5(repr(lst).encode()).digest() + +if sys.hexversion < 0x3000000: + def h_list_python2(lst): + return md5(repr(lst)).digest() + h_list_python2.__doc__ = h_list.__doc__ + h_list = h_list_python2 + def h_fun(fun): + """ + Hash functions + + :param fun: function to hash + :type fun: function + :return: hash of the function + :rtype: string or bytes + """ try: return fun.code except AttributeError: - if isinstance(fun,functools.partial): - code=list(fun.args) + if isinstance(fun, functools.partial): + code = list(fun.args) + # The method items() provides a sequence of tuples where the first element + # represents an optional argument of the partial function application + # + # The sorting result outcome will be consistent because: + # 1. tuples are compared in order of their elements + # 2. optional argument namess are unique code.extend(sorted(fun.keywords.items())) code.append(h_fun(fun.func)) - fun.code=h_list(code) + fun.code = h_list(code) return fun.code try: - h=inspect.getsource(fun) + h = inspect.getsource(fun) except EnvironmentError: - h='nocode' + h = 'nocode' try: - fun.code=h + fun.code = h except AttributeError: pass return h + def h_cmd(ins): - if isinstance(ins,str): - ret=ins - elif isinstance(ins,list)or isinstance(ins,tuple): - ret=str([h_cmd(x)for x in ins]) - else: - ret=str(h_fun(ins)) - if sys.hexversion>0x3000000: - ret=ret.encode('latin-1','xmlcharrefreplace') + """ + Hashes objects recursively + + :param ins: input object + :type ins: string or list or tuple or function + :rtype: string or bytes + """ + # this function is not meant to be particularly fast + if isinstance(ins, str): + # a command is either a string + ret = ins + elif isinstance(ins, list) or isinstance(ins, tuple): + # or a list of functions/strings + ret = str([h_cmd(x) for x in ins]) + else: + # or just a python function + ret = str(h_fun(ins)) + if sys.hexversion > 0x3000000: + ret = ret.encode('latin-1', 'xmlcharrefreplace') return ret -reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") -def subst_vars(expr,params): + +reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") +def subst_vars(expr, params): + """ + Replaces ${VAR} with the value of VAR taken from a dict or a config set:: + + from waflib import Utils + s = Utils.subst_vars('${PREFIX}/bin', env) + + :type expr: string + :param expr: String to perform substitution on + :param params: Dictionary or config set to look up variable values. + """ def repl_var(m): if m.group(1): - return'\\' + return '\\' if m.group(2): - return'$' + return '$' try: + # ConfigSet instances may contain lists return params.get_flat(m.group(3)) except AttributeError: return params[m.group(3)] - return reg_subst.sub(repl_var,expr) + # if you get a TypeError, it means that 'expr' is not a string... + # Utils.subst_vars(None, env) will not work + return reg_subst.sub(repl_var, expr) + def destos_to_binfmt(key): - if key=='darwin': - return'mac-o' - elif key in('win32','cygwin','uwin','msys'): - return'pe' - return'elf' + """ + Returns the binary format based on the unversioned platform name, + and defaults to ``elf`` if nothing is found. + + :param key: platform name + :type key: string + :return: string representing the binary format + """ + if key == 'darwin': + return 'mac-o' + elif key in ('win32', 'cygwin', 'uwin', 'msys'): + return 'pe' + return 'elf' + def unversioned_sys_platform(): - s=sys.platform + """ + Returns the unversioned platform name. + Some Python platform names contain versions, that depend on + the build environment, e.g. linux2, freebsd6, etc. + This returns the name without the version number. Exceptions are + os2 and win32, which are returned verbatim. + + :rtype: string + :return: Unversioned platform name + """ + s = sys.platform if s.startswith('java'): + # The real OS is hidden under the JVM. from java.lang import System - s=System.getProperty('os.name') - if s=='Mac OS X': - return'darwin' + s = System.getProperty('os.name') + # see http://lopica.sourceforge.net/os.html for a list of possible values + if s == 'Mac OS X': + return 'darwin' elif s.startswith('Windows '): - return'win32' - elif s=='OS/2': - return'os2' - elif s=='HP-UX': - return'hp-ux' - elif s in('SunOS','Solaris'): - return'sunos' - else:s=s.lower() - if s=='powerpc': - return'darwin' - if s=='win32'or s=='os2': + return 'win32' + elif s == 'OS/2': + return 'os2' + elif s == 'HP-UX': + return 'hp-ux' + elif s in ('SunOS', 'Solaris'): + return 'sunos' + else: s = s.lower() + + # powerpc == darwin for our purposes + if s == 'powerpc': + return 'darwin' + if s == 'win32' or s == 'os2': return s - if s=='cli'and os.name=='nt': - return'win32' - return re.split('\d+$',s)[0] -def nada(*k,**kw): + if s == 'cli' and os.name == 'nt': + # ironpython is only on windows as far as we know + return 'win32' + return re.split(r'\d+$', s)[0] + +def nada(*k, **kw): + """ + Does nothing + + :return: None + """ pass + class Timer(object): + """ + Simple object for timing the execution of commands. + Its string representation is the duration:: + + from waflib.Utils import Timer + timer = Timer() + a_few_operations() + s = str(timer) + """ def __init__(self): - self.start_time=self.now() + self.start_time = self.now() + def __str__(self): - delta=self.now()-self.start_time - if not isinstance(delta,datetime.timedelta): - delta=datetime.timedelta(seconds=delta) - days=delta.days - hours,rem=divmod(delta.seconds,3600) - minutes,seconds=divmod(rem,60) - seconds+=delta.microseconds*1e-6 - result='' + delta = self.now() - self.start_time + if not isinstance(delta, datetime.timedelta): + delta = datetime.timedelta(seconds=delta) + days = delta.days + hours, rem = divmod(delta.seconds, 3600) + minutes, seconds = divmod(rem, 60) + seconds += delta.microseconds * 1e-6 + result = '' if days: - result+='%dd'%days + result += '%dd' % days if days or hours: - result+='%dh'%hours + result += '%dh' % hours if days or hours or minutes: - result+='%dm'%minutes - return'%s%.3fs'%(result,seconds) + result += '%dm' % minutes + return '%s%.3fs' % (result, seconds) + def now(self): return datetime.datetime.utcnow() - if hasattr(time,'perf_counter'): + + if hasattr(time, 'perf_counter'): def now(self): return time.perf_counter() + def read_la_file(path): - sp=re.compile(r'^([^=]+)=\'(.*)\'$') - dc={} + """ + Reads property files, used by msvc.py + + :param path: file to read + :type path: string + """ + sp = re.compile(r'^([^=]+)=\'(.*)\'$') + dc = {} for line in readf(path).splitlines(): try: - _,left,right,_=sp.split(line.strip()) - dc[left]=right + _, left, right, _ = sp.split(line.strip()) + dc[left] = right except ValueError: pass return dc + def run_once(fun): - cache={} + """ + Decorator: let a function cache its results, use like this:: + + @run_once + def foo(k): + return 345*2343 + + .. note:: in practice this can cause memory leaks, prefer a :py:class:`waflib.Utils.lru_cache` + + :param fun: function to execute + :type fun: function + :return: the return value of the function executed + """ + cache = {} def wrap(*k): try: return cache[k] except KeyError: - ret=fun(*k) - cache[k]=ret + ret = fun(*k) + cache[k] = ret return ret - wrap.__cache__=cache - wrap.__name__=fun.__name__ + wrap.__cache__ = cache + wrap.__name__ = fun.__name__ return wrap -def get_registry_app_path(key,filename): + +def get_registry_app_path(key, filename): + """ + Returns the value of a registry key for an executable + + :type key: string + :type filename: list of string + """ if not winreg: return None try: - result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0]) + result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0]) except OSError: pass else: if os.path.isfile(result): return result + def lib64(): - if os.sep=='/': - if platform.architecture()[0]=='64bit': - if os.path.exists('/usr/lib64')and not os.path.exists('/usr/lib32'): - return'64' - return'' + """ + Guess the default ``/usr/lib`` extension for 64-bit applications + + :return: '64' or '' + :rtype: string + """ + # default settings for /usr/lib + if os.sep == '/': + if platform.architecture()[0] == '64bit': + if os.path.exists('/usr/lib64') and not os.path.exists('/usr/lib32'): + return '64' + return '' + def sane_path(p): + # private function for the time being! return os.path.abspath(os.path.expanduser(p)) -process_pool=[] + +process_pool = [] +""" +List of processes started to execute sub-process commands +""" + def get_process(): + """ + Returns a process object that can execute commands as sub-processes + + :rtype: subprocess.Popen + """ try: return process_pool.pop() except IndexError: - filepath=os.path.dirname(os.path.abspath(__file__))+os.sep+'processor.py' - cmd=[sys.executable,'-c',readf(filepath)] - return subprocess.Popen(cmd,stdout=subprocess.PIPE,stdin=subprocess.PIPE,bufsize=0) -def run_prefork_process(cmd,kwargs,cargs): - if not'env'in kwargs: - kwargs['env']=dict(os.environ) - try: - obj=base64.b64encode(cPickle.dumps([cmd,kwargs,cargs])) - except(TypeError,AttributeError): - return run_regular_process(cmd,kwargs,cargs) - proc=get_process() + filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'processor.py' + cmd = [sys.executable, '-c', readf(filepath)] + return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0, close_fds=not is_win32) + +def run_prefork_process(cmd, kwargs, cargs): + """ + Delegates process execution to a pre-forked process instance. + """ + if not 'env' in kwargs: + kwargs['env'] = dict(os.environ) + try: + obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs])) + except (TypeError, AttributeError): + return run_regular_process(cmd, kwargs, cargs) + + proc = get_process() if not proc: - return run_regular_process(cmd,kwargs,cargs) + return run_regular_process(cmd, kwargs, cargs) + proc.stdin.write(obj) proc.stdin.write('\n'.encode()) proc.stdin.flush() - obj=proc.stdout.readline() + obj = proc.stdout.readline() if not obj: - raise OSError('Preforked sub-process %r died'%proc.pid) + raise OSError('Preforked sub-process %r died' % proc.pid) + process_pool.append(proc) - lst=cPickle.loads(base64.b64decode(obj)) - assert len(lst)==5 - ret,out,err,ex,trace=lst + lst = cPickle.loads(base64.b64decode(obj)) + # Jython wrapper failures (bash/execvp) + assert len(lst) == 5 + ret, out, err, ex, trace = lst if ex: - if ex=='OSError': + if ex == 'OSError': raise OSError(trace) - elif ex=='ValueError': + elif ex == 'ValueError': raise ValueError(trace) - elif ex=='TimeoutExpired': - exc=TimeoutExpired(cmd,timeout=cargs['timeout'],output=out) - exc.stderr=err + elif ex == 'TimeoutExpired': + exc = TimeoutExpired(cmd, timeout=cargs['timeout'], output=out) + exc.stderr = err raise exc else: raise Exception(trace) - return ret,out,err -def lchown(path,user=-1,group=-1): - if isinstance(user,str): + return ret, out, err + +def lchown(path, user=-1, group=-1): + """ + Change the owner/group of a path, raises an OSError if the + ownership change fails. + + :param user: user to change + :type user: int or str + :param group: group to change + :type group: int or str + """ + if isinstance(user, str): import pwd - entry=pwd.getpwnam(user) + entry = pwd.getpwnam(user) if not entry: - raise OSError('Unknown user %r'%user) - user=entry[2] - if isinstance(group,str): + raise OSError('Unknown user %r' % user) + user = entry[2] + if isinstance(group, str): import grp - entry=grp.getgrnam(group) + entry = grp.getgrnam(group) if not entry: - raise OSError('Unknown group %r'%group) - group=entry[2] - return os.lchown(path,user,group) -def run_regular_process(cmd,kwargs,cargs={}): - proc=subprocess.Popen(cmd,**kwargs) - if kwargs.get('stdout')or kwargs.get('stderr'): + raise OSError('Unknown group %r' % group) + group = entry[2] + return os.lchown(path, user, group) + +def run_regular_process(cmd, kwargs, cargs={}): + """ + Executes a subprocess command by using subprocess.Popen + """ + proc = subprocess.Popen(cmd, **kwargs) + if kwargs.get('stdout') or kwargs.get('stderr'): try: - out,err=proc.communicate(**cargs) + out, err = proc.communicate(**cargs) except TimeoutExpired: - if kwargs.get('start_new_session')and hasattr(os,'killpg'): - os.killpg(proc.pid,signal.SIGKILL) + if kwargs.get('start_new_session') and hasattr(os, 'killpg'): + os.killpg(proc.pid, signal.SIGKILL) else: proc.kill() - out,err=proc.communicate() - exc=TimeoutExpired(proc.args,timeout=cargs['timeout'],output=out) - exc.stderr=err + out, err = proc.communicate() + exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out) + exc.stderr = err raise exc - status=proc.returncode + status = proc.returncode else: - out,err=(None,None) + out, err = (None, None) try: - status=proc.wait(**cargs) + status = proc.wait(**cargs) except TimeoutExpired as e: - if kwargs.get('start_new_session')and hasattr(os,'killpg'): - os.killpg(proc.pid,signal.SIGKILL) + if kwargs.get('start_new_session') and hasattr(os, 'killpg'): + os.killpg(proc.pid, signal.SIGKILL) else: proc.kill() proc.wait() raise e - return status,out,err -def run_process(cmd,kwargs,cargs={}): - if kwargs.get('stdout')and kwargs.get('stderr'): - return run_prefork_process(cmd,kwargs,cargs) - else: - return run_regular_process(cmd,kwargs,cargs) -def alloc_process_pool(n,force=False): - global run_process,get_process,alloc_process_pool + return status, out, err + +def run_process(cmd, kwargs, cargs={}): + """ + Executes a subprocess by using a pre-forked process when possible + or falling back to subprocess.Popen. See :py:func:`waflib.Utils.run_prefork_process` + and :py:func:`waflib.Utils.run_regular_process` + """ + if kwargs.get('stdout') and kwargs.get('stderr'): + return run_prefork_process(cmd, kwargs, cargs) + else: + return run_regular_process(cmd, kwargs, cargs) + +def alloc_process_pool(n, force=False): + """ + Allocates an amount of processes to the default pool so its size is at least *n*. + It is useful to call this function early so that the pre-forked + processes use as little memory as possible. + + :param n: pool size + :type n: integer + :param force: if True then *n* more processes are added to the existing pool + :type force: bool + """ + # mandatory on python2, unnecessary on python >= 3.2 + global run_process, get_process, alloc_process_pool if not force: - n=max(n-len(process_pool),0) + n = max(n - len(process_pool), 0) try: - lst=[get_process()for x in range(n)] + lst = [get_process() for x in range(n)] except OSError: - run_process=run_regular_process - get_process=alloc_process_pool=nada + run_process = run_regular_process + get_process = alloc_process_pool = nada else: for x in lst: process_pool.append(x) + def atexit_pool(): for k in process_pool: try: - os.kill(k.pid,9) + os.kill(k.pid, 9) except OSError: pass else: k.wait() -if(sys.hexversion<0x207000f and not is_win32)or sys.hexversion>=0x306000f: +# see #1889 +if (sys.hexversion<0x207000f and not is_win32) or sys.hexversion>=0x306000f: atexit.register(atexit_pool) -if os.environ.get('WAF_NO_PREFORK')or sys.platform=='cli'or not sys.executable: - run_process=run_regular_process - get_process=alloc_process_pool=nada + +if os.environ.get('WAF_NO_PREFORK') or sys.platform == 'cli' or not sys.executable: + run_process = run_regular_process + get_process = alloc_process_pool = nada + diff -Nru lilv-0.24.4~dfsg0/waflib/waf lilv-0.24.6/waflib/waf --- lilv-0.24.4~dfsg0/waflib/waf 1970-01-01 00:00:00.000000000 +0000 +++ lilv-0.24.6/waflib/waf 2019-10-19 17:59:11.000000000 +0000 @@ -0,0 +1,27 @@ +#!/usr/bin/env python + +# Minimal waf script for projects that include waflib directly + +import sys +import inspect +import os + +try: + from waflib import Context, Scripting +except Exception as e: + sys.stderr.write('error: Failed to import waf (%s)\n' % e) + if os.path.exists('.git'): + sys.stderr.write("Are submodules up to date? " + "Try 'git submodule update --init --recursive'\n") + + sys.exit(1) + + +def main(): + script_path = os.path.abspath(inspect.getfile(inspect.getmodule(main))) + project_path = os.path.dirname(script_path) + Scripting.waf_entry_point(os.getcwd(), Context.WAFVERSION, project_path) + + +if __name__ == '__main__': + main() diff -Nru lilv-0.24.4~dfsg0/wscript lilv-0.24.6/wscript --- lilv-0.24.4~dfsg0/wscript 2018-07-22 18:44:48.000000000 +0000 +++ lilv-0.24.6/wscript 2019-11-10 21:39:53.000000000 +0000 @@ -1,18 +1,18 @@ #!/usr/bin/env python + import os import shutil import subprocess import sys -import waflib.Options as Options -import waflib.extras.autowaf as autowaf -import waflib.Build as Build -import waflib.Logs as Logs + +from waflib import Options, Logs +from waflib.extras import autowaf # Library and package version (UNIX style major, minor, micro) # major increment <=> incompatible changes # minor increment <=> compatible changes (additions) # micro increment <=> no interface changes -LILV_VERSION = '0.24.4' +LILV_VERSION = '0.24.6' LILV_MAJOR_VERSION = '0' # Mandatory waf variables @@ -21,6 +21,11 @@ top = '.' # Source directory out = 'build' # Build directory +# Release variables +uri = 'http://drobilla.net/sw/lilv' +dist_pattern = 'http://download.drobilla.net/lilv-%d.%d.%d.tar.bz2' +post_tags = ['Hacking', 'LAD', 'LV2', 'Lilv'] + test_plugins = [ 'bad_syntax', 'failed_instantiation', @@ -39,40 +44,34 @@ ctx.load('compiler_c') ctx.load('compiler_cxx') ctx.load('python') - autowaf.set_options(ctx, test=True) - opt = ctx.get_option_group('Configuration options') - opt.add_option('--no-utils', action='store_true', dest='no_utils', - help='do not build command line utilities') - opt.add_option('--bindings', action='store_true', dest='bindings', - help='build python bindings') - opt.add_option('--dyn-manifest', action='store_true', dest='dyn_manifest', - help='build support for dynamic manifests') - opt.add_option('--no-bash-completion', action='store_true', - dest='no_bash_completion', - help='do not install bash completion script in CONFIGDIR') - opt.add_option('--static', action='store_true', dest='static', - help='build static library') - opt.add_option('--no-shared', action='store_true', dest='no_shared', - help='do not build shared library') - opt.add_option('--static-progs', action='store_true', dest='static_progs', - help='build programs as static binaries') + opt = ctx.configuration_options() + ctx.add_flags( + opt, + {'no-utils': 'do not build command line utilities', + 'no-bindings': 'do not build python bindings', + 'dyn-manifest': 'build support for dynamic manifests', + 'no-bash-completion': 'do not install bash completion script', + 'static': 'build static library', + 'no-shared': 'do not build shared library', + 'static-progs': 'build programs as static binaries'}) + opt.add_option('--default-lv2-path', type='string', default='', dest='default_lv2_path', help='default LV2 path to use if LV2_PATH is unset') def configure(conf): - autowaf.display_header('Lilv Configuration') conf.load('compiler_c', cache=True) try: conf.load('compiler_cxx', cache=True) + conf.define('LILV_CXX', True) except: pass - if Options.options.bindings: + if not Options.options.no_bindings: try: conf.load('python', cache=True) - conf.check_python_headers() - autowaf.define(conf, 'LILV_PYTHON', 1); + conf.check_python_version((2,6,0)) + conf.env.LILV_PYTHON = 1 except: Logs.warn('Failed to configure Python (%s)\n' % sys.exc_info()[1]) @@ -89,16 +88,11 @@ if not conf.env.BUILD_SHARED and not conf.env.BUILD_STATIC: conf.fatal('Neither a shared nor a static build requested') - autowaf.check_pkg(conf, 'lv2', uselib_store='LV2', - atleast_version='1.14.0', mandatory=True) - autowaf.check_pkg(conf, 'serd-0', uselib_store='SERD', - atleast_version='0.18.0', mandatory=True) - autowaf.check_pkg(conf, 'sord-0', uselib_store='SORD', - atleast_version='0.14.0', mandatory=True) - autowaf.check_pkg(conf, 'sratom-0', uselib_store='SRATOM', - atleast_version='0.4.0', mandatory=True) - autowaf.check_pkg(conf, 'sndfile', uselib_store='SNDFILE', - atleast_version='1.0.0', mandatory=False) + conf.check_pkg('lv2 >= 1.16.0', uselib_store='LV2') + conf.check_pkg('serd-0 >= 0.30.0', uselib_store='SERD') + conf.check_pkg('sord-0 >= 0.14.0', uselib_store='SORD') + conf.check_pkg('sratom-0 >= 0.4.0', uselib_store='SRATOM') + conf.check_pkg('sndfile >= 1.0.0', uselib_store='SNDFILE', mandatory=False) defines = ['_POSIX_C_SOURCE=200809L', '_BSD_SOURCE', '_DEFAULT_SOURCE'] if conf.env.DEST_OS == 'darwin': @@ -108,39 +102,38 @@ if conf.env.DEST_OS == 'darwin' or conf.env.DEST_OS == 'win32': rt_lib = [] - autowaf.check_function(conf, 'c', 'lstat', - header_name = ['sys/stat.h'], - defines = defines, - define_name = 'HAVE_LSTAT', - mandatory = False) - - autowaf.check_function(conf, 'c', 'flock', - header_name = 'sys/file.h', - defines = defines, - define_name = 'HAVE_FLOCK', - mandatory = False) - - autowaf.check_function(conf, 'c', 'fileno', - header_name = 'stdio.h', - defines = defines, - define_name = 'HAVE_FILENO', - mandatory = False) - - autowaf.check_function(conf, 'c', 'clock_gettime', - header_name = ['sys/time.h','time.h'], - defines = ['_POSIX_C_SOURCE=200809L'], - define_name = 'HAVE_CLOCK_GETTIME', - uselib_store = 'CLOCK_GETTIME', - lib = rt_lib, - mandatory = False) + conf.check_function('c', 'lstat', + header_name = ['sys/stat.h'], + defines = defines, + define_name = 'HAVE_LSTAT', + mandatory = False) + + conf.check_function('c', 'flock', + header_name = 'sys/file.h', + defines = defines, + define_name = 'HAVE_FLOCK', + mandatory = False) + + conf.check_function('c', 'fileno', + header_name = 'stdio.h', + defines = defines, + define_name = 'HAVE_FILENO', + mandatory = False) + + conf.check_function('c', 'clock_gettime', + header_name = ['sys/time.h','time.h'], + defines = ['_POSIX_C_SOURCE=200809L'], + define_name = 'HAVE_CLOCK_GETTIME', + uselib_store = 'CLOCK_GETTIME', + lib = rt_lib, + mandatory = False) conf.check_cc(define_name = 'HAVE_LIBDL', lib = 'dl', mandatory = False) - autowaf.define(conf, 'LILV_VERSION', LILV_VERSION) if Options.options.dyn_manifest: - autowaf.define(conf, 'LILV_DYN_MANIFEST', 1) + conf.define('LILV_DYN_MANIFEST', 1) lilv_path_sep = ':' lilv_dir_sep = '/' @@ -148,8 +141,8 @@ lilv_path_sep = ';' lilv_dir_sep = '\\\\' - autowaf.define(conf, 'LILV_PATH_SEP', lilv_path_sep) - autowaf.define(conf, 'LILV_DIR_SEP', lilv_dir_sep) + conf.define('LILV_PATH_SEP', lilv_path_sep) + conf.define('LILV_DIR_SEP', lilv_dir_sep) # Set default LV2 path lv2_path = Options.options.default_lv2_path @@ -171,35 +164,30 @@ lv2_path = lilv_path_sep.join(['~/.lv2', '/usr/%s/lv2' % libdirname, '/usr/local/%s/lv2' % libdirname]) - autowaf.define(conf, 'LILV_DEFAULT_LV2_PATH', lv2_path) + conf.define('LILV_DEFAULT_LV2_PATH', lv2_path) autowaf.set_lib_env(conf, 'lilv', LILV_VERSION) conf.write_config_header('lilv_config.h', remove=False) - autowaf.display_summary(conf) - autowaf.display_msg(conf, 'Default LV2_PATH', - conf.env.LILV_DEFAULT_LV2_PATH) - autowaf.display_msg(conf, 'Utilities', - bool(conf.env.BUILD_UTILS)) - autowaf.display_msg(conf, 'Unit tests', - bool(conf.env.BUILD_TESTS)) - autowaf.display_msg(conf, 'Dynamic manifest support', - bool(conf.env.LILV_DYN_MANIFEST)) - autowaf.display_msg(conf, 'Python bindings', - conf.is_defined('LILV_PYTHON')) - conf.undefine('LILV_DEFAULT_LV2_PATH') # Cmd line errors with VC++ - print('') + + autowaf.display_summary( + conf, + {'Default LV2_PATH': lv2_path, + 'Utilities': bool(conf.env.BUILD_UTILS), + 'Unit tests': bool(conf.env.BUILD_TESTS), + 'Dynamic manifest support': conf.is_defined('LILV_DYN_MANIFEST'), + 'Python bindings': bool(conf.env.LILV_PYTHON)}) def build_util(bld, name, defines, libs=''): obj = bld(features = 'c cprogram', source = name + '.c', includes = ['.', './src', './utils'], use = 'liblilv', + uselib = 'SERD SORD SRATOM LV2 ' + libs, target = name, defines = defines, install_path = '${BINDIR}') - autowaf.use_lib(bld, obj, 'SERD SORD SRATOM LV2 ' + libs) if not bld.env.BUILD_SHARED or bld.env.STATIC_PROGS: obj.use = 'liblilv_static' if bld.env.STATIC_PROGS: @@ -260,8 +248,8 @@ install_path = '${LIBDIR}', defines = ['LILV_SHARED', 'LILV_INTERNAL'], cflags = libflags, - lib = lib) - autowaf.use_lib(bld, obj, 'SERD SORD SRATOM LV2') + lib = lib, + uselib = 'SERD SORD SRATOM LV2') # Static library if bld.env.BUILD_STATIC: @@ -273,11 +261,11 @@ target = 'lilv-%s' % LILV_MAJOR_VERSION, vnum = LILV_VERSION, install_path = '${LIBDIR}', - defines = defines + ['LILV_INTERNAL']) - autowaf.use_lib(bld, obj, 'SERD SORD SRATOM LV2') + defines = defines + ['LILV_INTERNAL'], + uselib = 'SERD SORD SRATOM LV2') # Python bindings - if bld.is_defined('LILV_PYTHON'): + if bld.env.LILV_PYTHON: bld(features = 'subst', is_copy = True, source = 'bindings/python/lilv.py', @@ -294,6 +282,14 @@ test_cflags += ['--coverage'] test_linkflags += ['--coverage'] + # Copy skeleton LV2 bundle for tests + for name in ('manifest.ttl', 'lv2core.ttl'): + bld(features = 'subst', + is_copy = True, + source = 'test/core.lv2/' + name, + target = 'test/test_lv2_path/core.lv2/' + name, + install_path = None) + # Make a pattern for shared objects without the 'lib' prefix module_pattern = re.sub('^lib', '', bld.env.cshlib_PATTERN) shlib_ext = module_pattern[module_pattern.rfind('.'):] @@ -325,8 +321,7 @@ cflags = test_cflags, linkflags = test_linkflags, lib = test_libs, - uselib = 'LV2') - autowaf.use_lib(bld, obj, 'SERD SORD SRATOM LV2') + uselib = 'SERD SORD SRATOM LV2') # Test plugin data files for p in ['test'] + test_plugins: @@ -348,11 +343,11 @@ defines = defines + ['LILV_INTERNAL'], cflags = test_cflags, linkflags = test_linkflags, - lib = test_libs) - autowaf.use_lib(bld, obj, 'SERD SORD SRATOM LV2') + lib = test_libs, + uselib = 'SERD SORD SRATOM LV2') # Unit test program - testdir = os.path.abspath(autowaf.build_dir(APPNAME, 'test')) + testdir = bld.path.get_bld().make_node('test').abspath() bpath = os.path.join(testdir, 'test.lv2') bpath = bpath.replace('\\', '/') testdir = testdir.replace('\\', '/') @@ -361,13 +356,13 @@ includes = ['.', './src'], use = 'liblilv_profiled', lib = test_libs, + uselib = 'SERD SORD SRATOM LV2', target = 'test/lilv_test', install_path = None, defines = (defines + ['LILV_TEST_BUNDLE=\"%s/\"' % bpath] + ['LILV_TEST_DIR=\"%s/\"' % testdir]), cflags = test_cflags, linkflags = test_linkflags) - autowaf.use_lib(bld, obj, 'SERD SORD SRATOM LV2') # C++ API test if 'COMPILER_CXX' in bld.env: @@ -376,13 +371,13 @@ includes = ['.', './src'], use = 'liblilv_profiled', lib = test_libs, + uselib = 'SERD SORD SRATOM LV2', target = 'test/lilv_cxx_test', install_path = None, cxxflags = test_cflags, linkflags = test_linkflags) - autowaf.use_lib(bld, obj, 'SERD SORD SRATOM LV2') - if bld.is_defined('LILV_PYTHON'): + if bld.env.LILV_PYTHON: # Copy Python unittest files for i in [ 'test_api.py' ]: bld(features = 'subst', @@ -423,14 +418,16 @@ for i in utils.split(): build_util(bld, i, defines) - if bld.env.HAVE_SNDFILE: - obj = build_util(bld, 'utils/lv2apply', defines, 'SNDFILE') + if bld.env.HAVE_SNDFILE: + obj = build_util(bld, 'utils/lv2apply', defines, 'SNDFILE') - # lv2bench (less portable than other utilities) - if bld.is_defined('HAVE_CLOCK_GETTIME') and not bld.env.STATIC_PROGS: - obj = build_util(bld, 'utils/lv2bench', defines) - if not bld.env.MSVC_COMPILER and not bld.env.DEST_OS == 'darwin': - obj.lib = ['rt'] + # lv2bench (less portable than other utilities) + if (bld.env.DEST_OS != 'win32' and + bld.is_defined('HAVE_CLOCK_GETTIME') and + not bld.env.STATIC_PROGS): + obj = build_util(bld, 'utils/lv2bench', defines) + if bld.env.DEST_OS != 'darwin': + obj.lib = ['rt'] # Documentation autowaf.build_dox(bld, 'LILV', LILV_VERSION, top, out) @@ -444,38 +441,23 @@ '${SYSCONFDIR}/bash_completion.d/lilv', 'utils/lilv.bash_completion') bld.add_post_fun(autowaf.run_ldconfig) - if bld.env.DOCS: - bld.add_post_fun(fix_docs) -def fix_docs(ctx): - if ctx.cmd == 'build': - autowaf.make_simple_dox(APPNAME) - -def upload_docs(ctx): - import glob - os.system('rsync -ravz --delete -e ssh build/doc/html/ drobilla@drobilla.net:~/drobilla.net/docs/lilv/') - for page in glob.glob('doc/*.[1-8]'): - os.system('soelim %s | pre-grohtml troff -man -wall -Thtml | post-grohtml > build/%s.html' % (page, page)) - os.system('rsync -avz --delete -e ssh build/%s.html drobilla@drobilla.net:~/drobilla.net/man/' % page) - -def test(ctx): - assert ctx.env.BUILD_TESTS, "You have run waf configure without the --test flag. No tests were run." - autowaf.pre_test(ctx, APPNAME) - if ctx.is_defined('LILV_PYTHON'): - os.environ['LD_LIBRARY_PATH'] = os.getcwd() - autowaf.run_tests(ctx, APPNAME, ['python -m unittest discover bindings/']) - os.environ['PATH'] = 'test' + os.pathsep + os.getenv('PATH') - - Logs.pprint('GREEN', '') - autowaf.run_test(ctx, APPNAME, 'lilv_test', dirs=['./src','./test'], name='lilv_test') - - for p in test_plugins: - test_prog = 'test_' + p + ' ' + ('test/%s.lv2/' % p) - if os.path.exists('test/test_' + p): - autowaf.run_test(ctx, APPNAME, test_prog, 0, - dirs=['./src','./test','./test/%s.lv2' % p]) +def test(tst): + with tst.group('unit') as check: + check(['./test/lilv_test']) + if tst.is_defined('LILV_CXX'): + check(['./test/lilv_cxx_test']) + + if tst.env.LILV_PYTHON: + with tst.group('python') as check: + check(['python', '-m', 'unittest', 'discover', 'bindings/']) + + with tst.group('plugin') as check: + for p in test_plugins: + prog_name = tst.env.cprogram_PATTERN % ('test_' + p) + if os.path.exists(os.path.join('test', prog_name)): + check(['./test/test_' + p, 'test/%s.lv2/' % p]) - autowaf.post_test(ctx, APPNAME) try: shutil.rmtree('state') except: @@ -493,14 +475,3 @@ "-readability-else-after-return\" " + "$(find .. -name '*.c')") subprocess.call(cmd, cwd='build', shell=True) - -def posts(ctx): - path = str(ctx.path.abspath()) - autowaf.news_to_posts( - os.path.join(path, 'NEWS'), - {'title' : 'Lilv', - 'description' : autowaf.get_blurb(os.path.join(path, 'README')), - 'dist_pattern' : 'http://download.drobilla.net/lilv-%s.tar.bz2'}, - { 'Author' : 'drobilla', - 'Tags' : 'Hacking, LAD, LV2, Lilv' }, - os.path.join(out, 'posts'))