diff -Nru serd-0.30.2/debian/changelog serd-0.30.4/debian/changelog --- serd-0.30.2/debian/changelog 2019-12-08 20:11:24.000000000 +0000 +++ serd-0.30.4/debian/changelog 2020-06-13 00:29:30.000000000 +0000 @@ -1,3 +1,22 @@ +serd (0.30.4-1) unstable; urgency=medium + + * New upstream version 0.30.4 + * Bump S-V to 4.5.0 + * Bump dh-compat to 13 + * Add me as uploader + * Fix bad-whatis-entry in serd.3 + * Mark libserd-doc as Multi-Arch: foreign + * Make libserd-doc depend on libjs-jquery + * Link the jquery.js from libjs-jquery + * Add libserd-dev as B-D to the symbols file + * Update d/copyright year + * d/watch: Use https protocol + * Remove obsolete DEB_LDFLAGS_MAINT_APPEND export + * Add d/upstream/signing-key.asc + * Add d/upstream/metadata + + -- Dennis Braun Sat, 13 Jun 2020 02:29:30 +0200 + serd (0.30.2-1) unstable; urgency=medium * Team upload diff -Nru serd-0.30.2/debian/control serd-0.30.4/debian/control --- serd-0.30.2/debian/control 2019-12-08 20:10:45.000000000 +0000 +++ serd-0.30.4/debian/control 2020-05-08 20:58:45.000000000 +0000 @@ -4,15 +4,16 @@ Maintainer: Debian Multimedia Maintainers Uploaders: Alessio Treglia , - Jaromír MikeÅ¡ + Jaromír MikeÅ¡ , + Dennis Braun Build-Depends: - debhelper-compat (= 12), + debhelper-compat (= 13), pkg-config, python3:any Build-Depends-Indep: doxygen, graphviz -Standards-Version: 4.4.1 +Standards-Version: 4.5.0 Homepage: https://drobilla.net/software/serd/ Vcs-Git: https://salsa.debian.org/multimedia-team/serd.git Vcs-Browser: https://salsa.debian.org/multimedia-team/serd @@ -86,9 +87,11 @@ Package: libserd-doc Section: doc Architecture: all +Multi-Arch: foreign Enhances: libserd-dev Depends: + libjs-jquery, ${misc:Depends} Description: lightweight RDF syntax library - documentation Serd is a lightweight C library for RDF syntax which supports reading diff -Nru serd-0.30.2/debian/copyright serd-0.30.4/debian/copyright --- serd-0.30.2/debian/copyright 2019-12-08 19:58:21.000000000 +0000 +++ serd-0.30.4/debian/copyright 2020-05-08 20:58:45.000000000 +0000 @@ -2,11 +2,10 @@ Upstream-Name: Serd Upstream-Contact: David Robillard Source: https://download.drobilla.net/ -Copyright: 2011-2012 David Robillard License: ISC Files: * -Copyright: 2011-2012 David Robillard +Copyright: 2011-2020 David Robillard License: ISC Files: waf @@ -57,4 +56,4 @@ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file + POSSIBILITY OF SUCH DAMAGE. diff -Nru serd-0.30.2/debian/gbp.conf serd-0.30.4/debian/gbp.conf --- serd-0.30.2/debian/gbp.conf 2019-12-08 19:58:21.000000000 +0000 +++ serd-0.30.4/debian/gbp.conf 2020-05-08 20:59:20.000000000 +0000 @@ -1,4 +1,3 @@ [DEFAULT] pristine-tar = True -compression = xz sign-tags = True diff -Nru serd-0.30.2/debian/libserd-0-0.symbols serd-0.30.4/debian/libserd-0-0.symbols --- serd-0.30.2/debian/libserd-0-0.symbols 2019-12-08 20:08:58.000000000 +0000 +++ serd-0.30.4/debian/libserd-0-0.symbols 2020-05-08 20:58:45.000000000 +0000 @@ -1,4 +1,5 @@ libserd-0.so.0 libserd-0-0 #MINVER# +* Build-Depends-Package: libserd-dev serd_base64_decode@Base 0.14.0~dfsg0 serd_chunk_sink@Base 0.14.0~dfsg0 serd_chunk_sink_finish@Base 0.14.0~dfsg0 diff -Nru serd-0.30.2/debian/libserd-doc.install serd-0.30.4/debian/libserd-doc.install --- serd-0.30.2/debian/libserd-doc.install 2019-12-08 19:58:21.000000000 +0000 +++ serd-0.30.4/debian/libserd-doc.install 2020-06-13 00:29:30.000000000 +0000 @@ -1,2 +1,2 @@ usr/share/doc/serd-*/html/* usr/share/doc/libserd-dev/ -usr/share/man/man3 +debian/serd.3 usr/share/man/man3 diff -Nru serd-0.30.2/debian/libserd-doc.links serd-0.30.4/debian/libserd-doc.links --- serd-0.30.2/debian/libserd-doc.links 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/debian/libserd-doc.links 2020-05-08 20:58:45.000000000 +0000 @@ -0,0 +1 @@ +usr/share/javascript/jquery/jquery.js usr/share/doc/libserd-dev/jquery.js diff -Nru serd-0.30.2/debian/not-installed serd-0.30.4/debian/not-installed --- serd-0.30.2/debian/not-installed 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/debian/not-installed 2020-06-13 00:29:30.000000000 +0000 @@ -0,0 +1 @@ +usr/share/man/man3/serd.3 diff -Nru serd-0.30.2/debian/rules serd-0.30.4/debian/rules --- serd-0.30.2/debian/rules 2019-12-08 20:04:03.000000000 +0000 +++ serd-0.30.4/debian/rules 2020-06-13 00:29:30.000000000 +0000 @@ -1,7 +1,6 @@ #!/usr/bin/make -f export DEB_BUILD_MAINT_OPTIONS = hardening=+bindnow -export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed export LINKFLAGS += $(LDFLAGS) include /usr/share/dpkg/architecture.mk diff -Nru serd-0.30.2/debian/serd.3 serd-0.30.4/debian/serd.3 --- serd-0.30.2/debian/serd.3 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/debian/serd.3 2020-06-13 00:29:30.000000000 +0000 @@ -0,0 +1,1075 @@ +.TH SERD 3 '2020-04-28' +.SH NAME +serd \- A lightweight RDF syntax library +.SH SYNOPSIS + + +.SS "Data Structures" + +.in +1c +.ti -1c +.RI "struct \fBSerdNode\fP" +.br +.ti -1c +.RI "struct \fBSerdChunk\fP" +.br +.ti -1c +.RI "struct \fBSerdError\fP" +.br +.ti -1c +.RI "struct \fBSerdURI\fP" +.br +.in -1c +.SS "Typedefs" + +.in +1c +.ti -1c +.RI "typedef struct SerdEnvImpl \fBSerdEnv\fP" +.br +.ti -1c +.RI "typedef struct SerdReaderImpl \fBSerdReader\fP" +.br +.ti -1c +.RI "typedef struct SerdWriterImpl \fBSerdWriter\fP" +.br +.ti -1c +.RI "typedef uint32_t \fBSerdStatementFlags\fP" +.br +.ti -1c +.RI "typedef uint32_t \fBSerdNodeFlags\fP" +.br +.in -1c +.SS "Enumerations" + +.in +1c +.ti -1c +.RI "enum \fBSerdStatus\fP " +.br +.ti -1c +.RI "enum \fBSerdSyntax\fP " +.br +.ti -1c +.RI "enum \fBSerdStatementFlag\fP " +.br +.ti -1c +.RI "enum \fBSerdType\fP " +.br +.ti -1c +.RI "enum \fBSerdNodeFlag\fP " +.br +.ti -1c +.RI "enum \fBSerdStyle\fP " +.br +.in -1c +.SS "Functions" + +.in +1c +.ti -1c +.RI "void \fBserd_free\fP (void *ptr)" +.br +.in -1c +.SS "String Utilities" + +.in +1c +.ti -1c +.RI "const uint8_t * \fBserd_strerror\fP (\fBSerdStatus\fP status)" +.br +.ti -1c +.RI "size_t \fBserd_strlen\fP (const uint8_t *str, size_t *n_bytes, \fBSerdNodeFlags\fP *flags)" +.br +.ti -1c +.RI "double \fBserd_strtod\fP (const char *str, char **endptr)" +.br +.ti -1c +.RI "void * \fBserd_base64_decode\fP (const uint8_t *str, size_t len, size_t *size)" +.br +.in -1c +.SS "Byte Streams" + +.in +1c +.ti -1c +.RI "typedef int(* \fBSerdStreamErrorFunc\fP) (void *stream)" +.br +.ti -1c +.RI "typedef size_t(* \fBSerdSource\fP) (void *buf, size_t size, size_t nmemb, void *stream)" +.br +.ti -1c +.RI "typedef size_t(* \fBSerdSink\fP) (const void *buf, size_t len, void *stream)" +.br +.in -1c +.SS "URI" + +.in +1c +.ti -1c +.RI "static const \fBSerdURI\fP \fBSERD_URI_NULL\fP" +.br +.ti -1c +.RI "const uint8_t * \fBserd_uri_to_path\fP (const uint8_t *uri)" +.br +.ti -1c +.RI "uint8_t * \fBserd_file_uri_parse\fP (const uint8_t *uri, uint8_t **hostname)" +.br +.ti -1c +.RI "bool \fBserd_uri_string_has_scheme\fP (const uint8_t *utf8)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_uri_parse\fP (const uint8_t *utf8, \fBSerdURI\fP *out)" +.br +.ti -1c +.RI "void \fBserd_uri_resolve\fP (const \fBSerdURI\fP *r, const \fBSerdURI\fP *base, \fBSerdURI\fP *t)" +.br +.ti -1c +.RI "size_t \fBserd_uri_serialise\fP (const \fBSerdURI\fP *uri, \fBSerdSink\fP sink, void *stream)" +.br +.ti -1c +.RI "size_t \fBserd_uri_serialise_relative\fP (const \fBSerdURI\fP *uri, const \fBSerdURI\fP *base, const \fBSerdURI\fP *root, \fBSerdSink\fP sink, void *stream)" +.br +.in -1c +.SS "Node" + +.in +1c +.ti -1c +.RI "static const \fBSerdNode\fP \fBSERD_NODE_NULL\fP = { NULL, 0, 0, 0, \fBSERD_NOTHING\fP }" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_from_string\fP (\fBSerdType\fP type, const uint8_t *str)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_from_substring\fP (\fBSerdType\fP type, const uint8_t *str, size_t len)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_copy\fP (const \fBSerdNode\fP *node)" +.br +.ti -1c +.RI "bool \fBserd_node_equals\fP (const \fBSerdNode\fP *a, const \fBSerdNode\fP *b)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_uri_from_node\fP (const \fBSerdNode\fP *uri_node, const \fBSerdURI\fP *base, \fBSerdURI\fP *out)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_uri_from_string\fP (const uint8_t *str, const \fBSerdURI\fP *base, \fBSerdURI\fP *out)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_file_uri\fP (const uint8_t *path, const uint8_t *hostname, \fBSerdURI\fP *out, bool escape)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_uri\fP (const \fBSerdURI\fP *uri, const \fBSerdURI\fP *base, \fBSerdURI\fP *out)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_relative_uri\fP (const \fBSerdURI\fP *uri, const \fBSerdURI\fP *base, const \fBSerdURI\fP *root, \fBSerdURI\fP *out)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_decimal\fP (double d, unsigned frac_digits)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_integer\fP (int64_t i)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_node_new_blob\fP (const void *buf, size_t size, bool wrap_lines)" +.br +.ti -1c +.RI "void \fBserd_node_free\fP (\fBSerdNode\fP *node)" +.br +.in -1c +.SS "Event Handlers" + +.in +1c +.ti -1c +.RI "typedef \fBSerdStatus\fP(* \fBSerdErrorSink\fP) (void *handle, const \fBSerdError\fP *error)" +.br +.ti -1c +.RI "typedef \fBSerdStatus\fP(* \fBSerdBaseSink\fP) (void *handle, const \fBSerdNode\fP *uri)" +.br +.ti -1c +.RI "typedef \fBSerdStatus\fP(* \fBSerdPrefixSink\fP) (void *handle, const \fBSerdNode\fP *name, const \fBSerdNode\fP *uri)" +.br +.ti -1c +.RI "typedef \fBSerdStatus\fP(* \fBSerdStatementSink\fP) (void *handle, \fBSerdStatementFlags\fP flags, const \fBSerdNode\fP *graph, const \fBSerdNode\fP *subject, const \fBSerdNode\fP *predicate, const \fBSerdNode\fP *object, const \fBSerdNode\fP *object_datatype, const \fBSerdNode\fP *object_lang)" +.br +.ti -1c +.RI "typedef \fBSerdStatus\fP(* \fBSerdEndSink\fP) (void *handle, const \fBSerdNode\fP *node)" +.br +.in -1c +.SS "Environment" + +.in +1c +.ti -1c +.RI "\fBSerdEnv\fP * \fBserd_env_new\fP (const \fBSerdNode\fP *base_uri)" +.br +.ti -1c +.RI "void \fBserd_env_free\fP (\fBSerdEnv\fP *env)" +.br +.ti -1c +.RI "const \fBSerdNode\fP * \fBserd_env_get_base_uri\fP (const \fBSerdEnv\fP *env, \fBSerdURI\fP *out)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_env_set_base_uri\fP (\fBSerdEnv\fP *env, const \fBSerdNode\fP *uri)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_env_set_prefix\fP (\fBSerdEnv\fP *env, const \fBSerdNode\fP *name, const \fBSerdNode\fP *uri)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_env_set_prefix_from_strings\fP (\fBSerdEnv\fP *env, const uint8_t *name, const uint8_t *uri)" +.br +.ti -1c +.RI "bool \fBserd_env_qualify\fP (const \fBSerdEnv\fP *env, const \fBSerdNode\fP *uri, \fBSerdNode\fP *prefix, \fBSerdChunk\fP *suffix)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_env_expand\fP (const \fBSerdEnv\fP *env, const \fBSerdNode\fP *curie, \fBSerdChunk\fP *uri_prefix, \fBSerdChunk\fP *uri_suffix)" +.br +.ti -1c +.RI "\fBSerdNode\fP \fBserd_env_expand_node\fP (const \fBSerdEnv\fP *env, const \fBSerdNode\fP *node)" +.br +.ti -1c +.RI "void \fBserd_env_foreach\fP (const \fBSerdEnv\fP *env, \fBSerdPrefixSink\fP func, void *handle)" +.br +.in -1c +.SS "Reader" + +.in +1c +.ti -1c +.RI "\fBSerdReader\fP * \fBserd_reader_new\fP (\fBSerdSyntax\fP syntax, void *handle, void(*free_handle)(void *), \fBSerdBaseSink\fP base_sink, \fBSerdPrefixSink\fP prefix_sink, \fBSerdStatementSink\fP statement_sink, \fBSerdEndSink\fP end_sink)" +.br +.ti -1c +.RI "void \fBserd_reader_set_strict\fP (\fBSerdReader\fP *reader, bool strict)" +.br +.ti -1c +.RI "void \fBserd_reader_set_error_sink\fP (\fBSerdReader\fP *reader, \fBSerdErrorSink\fP error_sink, void *error_handle)" +.br +.ti -1c +.RI "void * \fBserd_reader_get_handle\fP (const \fBSerdReader\fP *reader)" +.br +.ti -1c +.RI "void \fBserd_reader_add_blank_prefix\fP (\fBSerdReader\fP *reader, const uint8_t *prefix)" +.br +.ti -1c +.RI "void \fBserd_reader_set_default_graph\fP (\fBSerdReader\fP *reader, const \fBSerdNode\fP *graph)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_read_file\fP (\fBSerdReader\fP *reader, const uint8_t *uri)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_start_stream\fP (\fBSerdReader\fP *reader, FILE *file, const uint8_t *name, bool bulk)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_start_source_stream\fP (\fBSerdReader\fP *reader, \fBSerdSource\fP read_func, \fBSerdStreamErrorFunc\fP error_func, void *stream, const uint8_t *name, size_t page_size)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_read_chunk\fP (\fBSerdReader\fP *reader)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_end_stream\fP (\fBSerdReader\fP *reader)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_read_file_handle\fP (\fBSerdReader\fP *reader, FILE *file, const uint8_t *name)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_read_source\fP (\fBSerdReader\fP *reader, \fBSerdSource\fP source, \fBSerdStreamErrorFunc\fP error, void *stream, const uint8_t *name, size_t page_size)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_reader_read_string\fP (\fBSerdReader\fP *reader, const uint8_t *utf8)" +.br +.ti -1c +.RI "void \fBserd_reader_free\fP (\fBSerdReader\fP *reader)" +.br +.in -1c +.SS "Writer" + +.in +1c +.ti -1c +.RI "\fBSerdWriter\fP * \fBserd_writer_new\fP (\fBSerdSyntax\fP syntax, \fBSerdStyle\fP style, \fBSerdEnv\fP *env, const \fBSerdURI\fP *base_uri, \fBSerdSink\fP ssink, void *stream)" +.br +.ti -1c +.RI "void \fBserd_writer_free\fP (\fBSerdWriter\fP *writer)" +.br +.ti -1c +.RI "\fBSerdEnv\fP * \fBserd_writer_get_env\fP (\fBSerdWriter\fP *writer)" +.br +.ti -1c +.RI "size_t \fBserd_file_sink\fP (const void *buf, size_t len, void *stream)" +.br +.ti -1c +.RI "size_t \fBserd_chunk_sink\fP (const void *buf, size_t len, void *stream)" +.br +.ti -1c +.RI "uint8_t * \fBserd_chunk_sink_finish\fP (\fBSerdChunk\fP *stream)" +.br +.ti -1c +.RI "void \fBserd_writer_set_error_sink\fP (\fBSerdWriter\fP *writer, \fBSerdErrorSink\fP error_sink, void *error_handle)" +.br +.ti -1c +.RI "void \fBserd_writer_chop_blank_prefix\fP (\fBSerdWriter\fP *writer, const uint8_t *prefix)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_writer_set_base_uri\fP (\fBSerdWriter\fP *writer, const \fBSerdNode\fP *uri)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_writer_set_root_uri\fP (\fBSerdWriter\fP *writer, const \fBSerdNode\fP *uri)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_writer_set_prefix\fP (\fBSerdWriter\fP *writer, const \fBSerdNode\fP *name, const \fBSerdNode\fP *uri)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_writer_write_statement\fP (\fBSerdWriter\fP *writer, \fBSerdStatementFlags\fP flags, const \fBSerdNode\fP *graph, const \fBSerdNode\fP *subject, const \fBSerdNode\fP *predicate, const \fBSerdNode\fP *object, const \fBSerdNode\fP *datatype, const \fBSerdNode\fP *lang)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_writer_end_anon\fP (\fBSerdWriter\fP *writer, const \fBSerdNode\fP *node)" +.br +.ti -1c +.RI "\fBSerdStatus\fP \fBserd_writer_finish\fP (\fBSerdWriter\fP *writer)" +.br +.in -1c +.SH "Data Structure Documentation" +.PP +.SH "struct SerdNode" +.PP +A syntactic RDF node\&. +.PP +\fBData Fields:\fP +.RS 4 +const uint8_t * \fIbuf\fP Value string\&. +.br +.PP +size_t \fIn_bytes\fP Size in bytes (not including null) +.br +.PP +size_t \fIn_chars\fP Length in characters (not including null) +.br +.PP +\fBSerdNodeFlags\fP \fIflags\fP Node flags (e\&.g\&. string properties) +.br +.PP +\fBSerdType\fP \fItype\fP Node type\&. +.br +.PP +.RE +.PP +.SH "struct SerdChunk" +.PP +An unterminated string fragment\&. +.PP +\fBData Fields:\fP +.RS 4 +const uint8_t * \fIbuf\fP Start of chunk\&. +.br +.PP +size_t \fIlen\fP Length of chunk in bytes\&. +.br +.PP +.RE +.PP +.SH "struct SerdError" +.PP +An error description\&. +.PP +\fBData Fields:\fP +.RS 4 +\fBSerdStatus\fP \fIstatus\fP Error code\&. +.br +.PP +const uint8_t * \fIfilename\fP File where error was encountered, or NULL\&. +.br +.PP +unsigned \fIline\fP Line where error was encountered, or 0\&. +.br +.PP +unsigned \fIcol\fP Column where error was encountered\&. +.br +.PP +const char * \fIfmt\fP Message format string (printf style) +.br +.PP +va_list * \fIargs\fP Arguments for fmt\&. +.br +.PP +.RE +.PP +.SH "struct SerdURI" +.PP +A parsed URI\&. + +This struct directly refers to chunks in other strings, it does not own any memory itself\&. Thus, URIs can be parsed and/or resolved against a base URI in-place without allocating memory\&. +.PP +\fBData Fields:\fP +.RS 4 +\fBSerdChunk\fP \fIscheme\fP Scheme\&. +.br +.PP +\fBSerdChunk\fP \fIauthority\fP Authority\&. +.br +.PP +\fBSerdChunk\fP \fIpath_base\fP Path prefix if relative\&. +.br +.PP +\fBSerdChunk\fP \fIpath\fP Path suffix\&. +.br +.PP +\fBSerdChunk\fP \fIquery\fP Query\&. +.br +.PP +\fBSerdChunk\fP \fIfragment\fP Fragment\&. +.br +.PP +.RE +.PP +.SH "Typedef Documentation" +.PP +.SS "typedef struct SerdEnvImpl \fBSerdEnv\fP" + +.PP +Environment\&. Represents the state required to resolve a CURIE or relative URI, e\&.g\&. the base URI and set of namespace prefixes at a particular point\&. +.SS "typedef struct SerdReaderImpl \fBSerdReader\fP" + +.PP +RDF reader\&. Parses RDF by calling user-provided sink functions as input is consumed (much like an XML SAX parser)\&. +.SS "typedef struct SerdWriterImpl \fBSerdWriter\fP" + +.PP +RDF writer\&. Provides a number of functions to allow writing RDF syntax out to some stream\&. These functions are deliberately compatible with the sink functions used by SerdReader, so a reader can be directly connected to a writer to re-serialise a document with minimal overhead\&. +.SS "typedef uint32_t \fBSerdStatementFlags\fP" + +.PP +Bitwise OR of SerdStatementFlag values\&. +.SS "typedef uint32_t \fBSerdNodeFlags\fP" + +.PP +Bitwise OR of SerdNodeFlag values\&. +.SS "typedef int(* SerdStreamErrorFunc) (void *stream)" + +.PP +Function to detect I/O stream errors\&. Identical semantics to \fCferror\fP\&. +.PP +\fBReturns\fP +.RS 4 +Non-zero if \fCstream\fP has encountered an error\&. +.RE +.PP + +.SS "typedef size_t(* SerdSource) (void *buf, size_t size, size_t nmemb, void *stream)" + +.PP +Source function for raw string input\&. Identical semantics to \fCfread\fP, but may set errno for more informative error reporting than supported by SerdStreamErrorFunc\&. +.PP +\fBParameters\fP +.RS 4 +\fIbuf\fP Output buffer\&. +.br +\fIsize\fP Size of a single element of data in bytes (always 1)\&. +.br +\fInmemb\fP Number of elements to read\&. +.br +\fIstream\fP Stream to read from (FILE* for fread)\&. +.RE +.PP +\fBReturns\fP +.RS 4 +Number of elements (bytes) read\&. +.RE +.PP + +.SS "typedef size_t(* SerdSink) (const void *buf, size_t len, void *stream)" + +.PP +Sink function for raw string output\&. +.SS "typedef \fBSerdStatus\fP(* SerdErrorSink) (void *handle, const \fBSerdError\fP *error)" + +.PP +Sink (callback) for errors\&. +.PP +\fBParameters\fP +.RS 4 +\fIhandle\fP Handle for user data\&. +.br +\fIerror\fP Error description\&. +.RE +.PP + +.SS "typedef \fBSerdStatus\fP(* SerdBaseSink) (void *handle, const \fBSerdNode\fP *uri)" + +.PP +Sink (callback) for base URI changes\&. Called whenever the base URI of the serialisation changes\&. +.SS "typedef \fBSerdStatus\fP(* SerdPrefixSink) (void *handle, const \fBSerdNode\fP *name, const \fBSerdNode\fP *uri)" + +.PP +Sink (callback) for namespace definitions\&. Called whenever a prefix is defined in the serialisation\&. +.SS "typedef \fBSerdStatus\fP(* SerdStatementSink) (void *handle, \fBSerdStatementFlags\fP flags, const \fBSerdNode\fP *graph, const \fBSerdNode\fP *subject, const \fBSerdNode\fP *predicate, const \fBSerdNode\fP *object, const \fBSerdNode\fP *object_datatype, const \fBSerdNode\fP *object_lang)" + +.PP +Sink (callback) for statements\&. Called for every RDF statement in the serialisation\&. +.SS "typedef \fBSerdStatus\fP(* SerdEndSink) (void *handle, const \fBSerdNode\fP *node)" + +.PP +Sink (callback) for anonymous node end markers\&. This is called to indicate that the anonymous node with the given \fCvalue\fP will no longer be referred to by any future statements (i\&.e\&. the anonymous serialisation of the node is finished)\&. +.SH "Enumeration Type Documentation" +.PP +.SS "enum \fBSerdStatus\fP" + +.PP +Return status code\&. +.PP +\fBEnumerator\fP +.in +1c +.TP +\fB\fISERD_SUCCESS \fP\fP +No error\&. +.TP +\fB\fISERD_FAILURE \fP\fP +Non-fatal failure\&. +.TP +\fB\fISERD_ERR_UNKNOWN \fP\fP +Unknown error\&. +.TP +\fB\fISERD_ERR_BAD_SYNTAX \fP\fP +Invalid syntax\&. +.TP +\fB\fISERD_ERR_BAD_ARG \fP\fP +Invalid argument\&. +.TP +\fB\fISERD_ERR_NOT_FOUND \fP\fP +Not found\&. +.TP +\fB\fISERD_ERR_ID_CLASH \fP\fP +Encountered clashing blank node IDs\&. +.TP +\fB\fISERD_ERR_BAD_CURIE \fP\fP +Invalid CURIE (e\&.g\&. prefix does not exist) +.TP +\fB\fISERD_ERR_INTERNAL \fP\fP +Unexpected internal error (should not happen) +.SS "enum \fBSerdSyntax\fP" + +.PP +RDF syntax type\&. +.PP +\fBEnumerator\fP +.in +1c +.TP +\fB\fISERD_TURTLE \fP\fP +Turtle - Terse RDF Triple Language (UTF-8)\&. +.PP +\fBSee also\fP +.RS 4 +\fCTurtle\fP +.RE +.PP + +.TP +\fB\fISERD_NTRIPLES \fP\fP +NTriples - Line-based RDF triples (ASCII)\&. +.PP +\fBSee also\fP +.RS 4 +\fCNTriples\fP +.RE +.PP + +.TP +\fB\fISERD_NQUADS \fP\fP +NQuads - Line-based RDF quads (UTF-8)\&. +.PP +\fBSee also\fP +.RS 4 +\fCNQuads\fP +.RE +.PP + +.TP +\fB\fISERD_TRIG \fP\fP +TriG - Terse RDF quads (UTF-8)\&. +.PP +\fBSee also\fP +.RS 4 +\fCTrig\fP +.RE +.PP + +.SS "enum \fBSerdStatementFlag\fP" + +.PP +Flags indicating inline abbreviation information for a statement\&. +.PP +\fBEnumerator\fP +.in +1c +.TP +\fB\fISERD_EMPTY_S \fP\fP +Empty blank node subject\&. +.TP +\fB\fISERD_EMPTY_O \fP\fP +Empty blank node object\&. +.TP +\fB\fISERD_ANON_S_BEGIN \fP\fP +Start of anonymous subject\&. +.TP +\fB\fISERD_ANON_O_BEGIN \fP\fP +Start of anonymous object\&. +.TP +\fB\fISERD_ANON_CONT \fP\fP +Continuation of anonymous node\&. +.TP +\fB\fISERD_LIST_S_BEGIN \fP\fP +Start of list subject\&. +.TP +\fB\fISERD_LIST_O_BEGIN \fP\fP +Start of list object\&. +.TP +\fB\fISERD_LIST_CONT \fP\fP +Continuation of list\&. +.SS "enum \fBSerdType\fP" + +.PP +Type of a syntactic RDF node\&. This is more precise than the type of an abstract RDF node\&. An abstract node is either a resource, literal, or blank\&. In syntax there are two ways to refer to a resource (by URI or CURIE) and two ways to refer to a blank (by ID or anonymously)\&. Anonymous (inline) blank nodes are expressed using SerdStatementFlags rather than this type\&. +.PP +\fBEnumerator\fP +.in +1c +.TP +\fB\fISERD_NOTHING \fP\fP +The type of a nonexistent node\&. This type is useful as a sentinel, but is never emitted by the reader\&. +.TP +\fB\fISERD_LITERAL \fP\fP +Literal value\&. A literal optionally has either a language, or a datatype (not both)\&. +.TP +\fB\fISERD_URI \fP\fP +URI (absolute or relative)\&. Value is an unquoted URI string, which is either a relative reference with respect to the current base URI (e\&.g\&. 'foo/bar'), or an absolute URI (e\&.g\&. 'http://example\&.org/foo')\&. +.PP +\fBSee also\fP +.RS 4 +\fCRFC3986\fP\&. +.RE +.PP + +.TP +\fB\fISERD_CURIE \fP\fP +CURIE, a shortened URI\&. Value is an unquoted CURIE string relative to the current environment, e\&.g\&. 'rdf:type'\&. +.PP +\fBSee also\fP +.RS 4 +\fCCURIE Syntax 1\&.0\fP +.RE +.PP + +.TP +\fB\fISERD_BLANK \fP\fP +A blank node\&. Value is a blank node ID, e\&.g\&. 'id3', which is meaningful only within this serialisation\&. +.PP +\fBSee also\fP +.RS 4 +\fCTurtle \fCnodeID\fP\fP +.RE +.PP + +.SS "enum \fBSerdNodeFlag\fP" + +.PP +Flags indicating certain string properties relevant to serialisation\&. +.PP +\fBEnumerator\fP +.in +1c +.TP +\fB\fISERD_HAS_NEWLINE \fP\fP +Contains line breaks ('\\n' or '\\r') +.TP +\fB\fISERD_HAS_QUOTE \fP\fP +Contains quotes (''') +.SS "enum \fBSerdStyle\fP" + +.PP +Syntax style options\&. The style of the writer output can be controlled by ORing together values from this enumeration\&. Note that some options are only supported for some syntaxes (e\&.g\&. NTriples does not support abbreviation and is always ASCII)\&. +.PP +\fBEnumerator\fP +.in +1c +.TP +\fB\fISERD_STYLE_ABBREVIATED \fP\fP +Abbreviate triples when possible\&. +.TP +\fB\fISERD_STYLE_ASCII \fP\fP +Escape all non-ASCII characters\&. +.TP +\fB\fISERD_STYLE_RESOLVED \fP\fP +Resolve URIs against base URI\&. +.TP +\fB\fISERD_STYLE_CURIED \fP\fP +Shorten URIs into CURIEs\&. +.TP +\fB\fISERD_STYLE_BULK \fP\fP +Write output in pages\&. +.SH "Function Documentation" +.PP +.SS "void serd_free (void * ptr)" + +.PP +Free memory allocated by Serd\&. This function exists because some systems require memory allocated by a library to be freed by code in the same library\&. It is otherwise equivalent to the standard C free() function\&. +.SS "const uint8_t* serd_strerror (\fBSerdStatus\fP status)" + +.PP +Return a string describing a status code\&. +.SS "size_t serd_strlen (const uint8_t * str, size_t * n_bytes, \fBSerdNodeFlags\fP * flags)" + +.PP +Measure a UTF-8 string\&. +.PP +\fBReturns\fP +.RS 4 +Length of \fCstr\fP in characters (except NULL)\&. +.RE +.PP +\fBParameters\fP +.RS 4 +\fIstr\fP A null-terminated UTF-8 string\&. +.br +\fIn_bytes\fP (Output) Set to the size of \fCstr\fP in bytes (except NULL)\&. +.br +\fIflags\fP (Output) Set to the applicable flags\&. +.RE +.PP + +.SS "double serd_strtod (const char * str, char ** endptr)" + +.PP +Parse a string to a double\&. The API of this function is identical to the standard C strtod function, except this function is locale-independent and always matches the lexical format used in the Turtle grammar (the decimal point is always '\&.')\&. +.SS "void* serd_base64_decode (const uint8_t * str, size_t len, size_t * size)" + +.PP +Decode a base64 string\&. This function can be used to deserialise a blob node created with \fBserd_node_new_blob()\fP\&. +.PP +\fBParameters\fP +.RS 4 +\fIstr\fP Base64 string to decode\&. +.br +\fIlen\fP The length of \fCstr\fP\&. +.br +\fIsize\fP Set to the size of the returned blob in bytes\&. +.RE +.PP +\fBReturns\fP +.RS 4 +A newly allocated blob which must be freed with \fBserd_free()\fP\&. +.RE +.PP + +.SS "const uint8_t* serd_uri_to_path (const uint8_t * uri)" + +.PP +Return the local path for \fCuri\fP, or NULL if \fCuri\fP is not a file URI\&. Note this (inappropriately named) function only removes the file scheme if necessary, and returns \fCuri\fP unmodified if it is an absolute path\&. Percent encoding and other issues are not handled, to properly convert a file URI to a path, use \fBserd_file_uri_parse()\fP\&. +.SS "uint8_t* serd_file_uri_parse (const uint8_t * uri, uint8_t ** hostname)" + +.PP +Get the unescaped path and hostname from a file URI\&. +.PP +\fBParameters\fP +.RS 4 +\fIuri\fP A file URI\&. +.br +\fIhostname\fP If non-NULL, set to the hostname, if present\&. +.RE +.PP +\fBReturns\fP +.RS 4 +The path component of the URI\&. +.RE +.PP +The returned path and \fC*hostname\fP must be freed with \fBserd_free()\fP\&. +.SS "bool serd_uri_string_has_scheme (const uint8_t * utf8)" + +.PP +Return true iff \fCutf8\fP starts with a valid URI scheme\&. +.SS "\fBSerdStatus\fP serd_uri_parse (const uint8_t * utf8, \fBSerdURI\fP * out)" + +.PP +Parse \fCutf8\fP, writing result to \fCout\fP\&. +.SS "void serd_uri_resolve (const \fBSerdURI\fP * r, const \fBSerdURI\fP * base, \fBSerdURI\fP * t)" + +.PP +Set target \fCt\fP to reference \fCr\fP resolved against \fCbase\fP\&. +.PP +\fBSee also\fP +.RS 4 +http://tools.ietf.org/html/rfc3986#section-5.2.2 +.RE +.PP + +.SS "size_t serd_uri_serialise (const \fBSerdURI\fP * uri, \fBSerdSink\fP sink, void * stream)" + +.PP +Serialise \fCuri\fP with a series of calls to \fCsink\fP\&. +.SS "size_t serd_uri_serialise_relative (const \fBSerdURI\fP * uri, const \fBSerdURI\fP * base, const \fBSerdURI\fP * root, \fBSerdSink\fP sink, void * stream)" + +.PP +Serialise \fCuri\fP relative to \fCbase\fP with a series of calls to \fCsink\fP\&. The \fCuri\fP is written as a relative URI iff if it a child of \fCbase\fP and \fCroot\fP\&. The optional \fCroot\fP parameter must be a prefix of \fCbase\fP and can be used keep up-references ('\&.\&./') within a certain namespace\&. +.SS "\fBSerdNode\fP serd_node_from_string (\fBSerdType\fP type, const uint8_t * str)" + +.PP +Make a (shallow) node from \fCstr\fP\&. This measures, but does not copy, \fCstr\fP\&. No memory is allocated\&. +.SS "\fBSerdNode\fP serd_node_from_substring (\fBSerdType\fP type, const uint8_t * str, size_t len)" + +.PP +Make a (shallow) node from a prefix of \fCstr\fP\&. This measures, but does not copy, \fCstr\fP\&. No memory is allocated\&. Note that the returned node may not be null terminated\&. +.SS "\fBSerdNode\fP serd_node_copy (const \fBSerdNode\fP * node)" + +.PP +Make a deep copy of \fCnode\fP\&. +.PP +\fBReturns\fP +.RS 4 +a node that the caller must free with \fBserd_node_free()\fP\&. +.RE +.PP + +.SS "bool serd_node_equals (const \fBSerdNode\fP * a, const \fBSerdNode\fP * b)" + +.PP +Return true iff \fCa\fP is equal to \fCb\fP\&. +.SS "\fBSerdNode\fP serd_node_new_uri_from_node (const \fBSerdNode\fP * uri_node, const \fBSerdURI\fP * base, \fBSerdURI\fP * out)" + +.PP +Simple wrapper for \fBserd_node_new_uri()\fP to resolve a URI node\&. +.SS "\fBSerdNode\fP serd_node_new_uri_from_string (const uint8_t * str, const \fBSerdURI\fP * base, \fBSerdURI\fP * out)" + +.PP +Simple wrapper for \fBserd_node_new_uri()\fP to resolve a URI string\&. +.SS "\fBSerdNode\fP serd_node_new_file_uri (const uint8_t * path, const uint8_t * hostname, \fBSerdURI\fP * out, bool escape)" + +.PP +Create a new file URI node from a file system path and optional hostname\&. Backslashes in Windows paths will be converted and '' will always be percent encoded\&. If \fCescape\fP is true, all other invalid characters will be percent encoded as well\&. +.PP +If \fCpath\fP is relative, \fChostname\fP is ignored\&. If \fCout\fP is not NULL, it will be set to the parsed URI\&. +.SS "\fBSerdNode\fP serd_node_new_uri (const \fBSerdURI\fP * uri, const \fBSerdURI\fP * base, \fBSerdURI\fP * out)" + +.PP +Create a new node by serialising \fCuri\fP into a new string\&. +.PP +\fBParameters\fP +.RS 4 +\fIuri\fP The URI to serialise\&. +.br +\fIbase\fP Base URI to resolve \fCuri\fP against (or NULL for no resolution)\&. +.br +\fIout\fP Set to the parsing of the new URI (i\&.e\&. points only to memory owned by the new returned node)\&. +.RE +.PP + +.SS "\fBSerdNode\fP serd_node_new_relative_uri (const \fBSerdURI\fP * uri, const \fBSerdURI\fP * base, const \fBSerdURI\fP * root, \fBSerdURI\fP * out)" + +.PP +Create a new node by serialising \fCuri\fP into a new relative URI\&. +.PP +\fBParameters\fP +.RS 4 +\fIuri\fP The URI to serialise\&. +.br +\fIbase\fP Base URI to make \fCuri\fP relative to, if possible\&. +.br +\fIroot\fP Root URI for resolution (see \fBserd_uri_serialise_relative()\fP)\&. +.br +\fIout\fP Set to the parsing of the new URI (i\&.e\&. points only to memory owned by the new returned node)\&. +.RE +.PP + +.SS "\fBSerdNode\fP serd_node_new_decimal (double d, unsigned frac_digits)" + +.PP +Create a new node by serialising \fCd\fP into an xsd:decimal string\&. The resulting node will always contain a `\&.', start with a digit, and end with a digit (i\&.e\&. will have a leading and/or trailing `0' if necessary)\&. It will never be in scientific notation\&. A maximum of \fCfrac_digits\fP digits will be written after the decimal point, but trailing zeros will automatically be omitted (except one if \fCd\fP is a round integer)\&. +.PP +Note that about 16 and 8 fractional digits are required to precisely represent a double and float, respectively\&. +.PP +\fBParameters\fP +.RS 4 +\fId\fP The value for the new node\&. +.br +\fIfrac_digits\fP The maximum number of digits after the decimal place\&. +.RE +.PP + +.SS "\fBSerdNode\fP serd_node_new_integer (int64_t i)" + +.PP +Create a new node by serialising \fCi\fP into an xsd:integer string\&. +.SS "\fBSerdNode\fP serd_node_new_blob (const void * buf, size_t size, bool wrap_lines)" + +.PP +Create a node by serialising \fCbuf\fP into an xsd:base64Binary string\&. This function can be used to make a serialisable node out of arbitrary binary data, which can be decoded using \fBserd_base64_decode()\fP\&. +.PP +\fBParameters\fP +.RS 4 +\fIbuf\fP Raw binary input data\&. +.br +\fIsize\fP Size of \fCbuf\fP\&. +.br +\fIwrap_lines\fP Wrap lines at 76 characters to conform to RFC 2045\&. +.RE +.PP + +.SS "void serd_node_free (\fBSerdNode\fP * node)" + +.PP +Free any data owned by \fCnode\fP\&. Note that if \fCnode\fP is itself dynamically allocated (which is not the case for nodes created internally by serd), it will not be freed\&. +.SS "\fBSerdEnv\fP* serd_env_new (const \fBSerdNode\fP * base_uri)" + +.PP +Create a new environment\&. +.SS "void serd_env_free (\fBSerdEnv\fP * env)" + +.PP +Free \fCns\fP\&. +.SS "const \fBSerdNode\fP* serd_env_get_base_uri (const \fBSerdEnv\fP * env, \fBSerdURI\fP * out)" + +.PP +Get the current base URI\&. +.SS "\fBSerdStatus\fP serd_env_set_base_uri (\fBSerdEnv\fP * env, const \fBSerdNode\fP * uri)" + +.PP +Set the current base URI\&. +.SS "\fBSerdStatus\fP serd_env_set_prefix (\fBSerdEnv\fP * env, const \fBSerdNode\fP * name, const \fBSerdNode\fP * uri)" + +.PP +Set a namespace prefix\&. +.SS "\fBSerdStatus\fP serd_env_set_prefix_from_strings (\fBSerdEnv\fP * env, const uint8_t * name, const uint8_t * uri)" + +.PP +Set a namespace prefix\&. +.SS "bool serd_env_qualify (const \fBSerdEnv\fP * env, const \fBSerdNode\fP * uri, \fBSerdNode\fP * prefix, \fBSerdChunk\fP * suffix)" + +.PP +Qualify \fCuri\fP into a CURIE if possible\&. +.SS "\fBSerdStatus\fP serd_env_expand (const \fBSerdEnv\fP * env, const \fBSerdNode\fP * curie, \fBSerdChunk\fP * uri_prefix, \fBSerdChunk\fP * uri_suffix)" + +.PP +Expand \fCcurie\fP\&. Errors: SERD_ERR_BAD_ARG if \fCcurie\fP is not valid, or SERD_ERR_BAD_CURIE if prefix is not defined in \fCenv\fP\&. +.SS "\fBSerdNode\fP serd_env_expand_node (const \fBSerdEnv\fP * env, const \fBSerdNode\fP * node)" + +.PP +Expand \fCnode\fP, which must be a CURIE or URI, to a full URI\&. Returns null if \fCnode\fP can not be expanded\&. +.SS "void serd_env_foreach (const \fBSerdEnv\fP * env, \fBSerdPrefixSink\fP func, void * handle)" + +.PP +Call \fCfunc\fP for each prefix defined in \fCenv\fP\&. +.SS "\fBSerdReader\fP* serd_reader_new (\fBSerdSyntax\fP syntax, void * handle, void(*)(void *) free_handle, \fBSerdBaseSink\fP base_sink, \fBSerdPrefixSink\fP prefix_sink, \fBSerdStatementSink\fP statement_sink, \fBSerdEndSink\fP end_sink)" + +.PP +Create a new RDF reader\&. +.SS "void serd_reader_set_strict (\fBSerdReader\fP * reader, bool strict)" + +.PP +Enable or disable strict parsing\&. The reader is non-strict (lax) by default, which will tolerate URIs with invalid characters\&. Setting strict will fail when parsing such files\&. An error is printed for invalid input in either case\&. +.SS "void serd_reader_set_error_sink (\fBSerdReader\fP * reader, \fBSerdErrorSink\fP error_sink, void * error_handle)" + +.PP +Set a function to be called when errors occur during reading\&. The \fCerror_sink\fP will be called with \fChandle\fP as its first argument\&. If no error function is set, errors are printed to stderr in GCC style\&. +.SS "void* serd_reader_get_handle (const \fBSerdReader\fP * reader)" + +.PP +Return the \fChandle\fP passed to \fBserd_reader_new()\fP\&. +.SS "void serd_reader_add_blank_prefix (\fBSerdReader\fP * reader, const uint8_t * prefix)" + +.PP +Set a prefix to be added to all blank node identifiers\&. This is useful when multiple files are to be parsed into the same output (e\&.g\&. a store, or other files)\&. Since Serd preserves blank node IDs, this could cause conflicts where two non-equivalent blank nodes are merged, resulting in corrupt data\&. By setting a unique blank node prefix for each parsed file, this can be avoided, while preserving blank node names\&. +.SS "void serd_reader_set_default_graph (\fBSerdReader\fP * reader, const \fBSerdNode\fP * graph)" + +.PP +Set the URI of the default graph\&. If this is set, the reader will emit quads with the graph set to the given node for any statements that are not in a named graph (which is currently all of them since Serd currently does not support any graph syntaxes)\&. +.SS "\fBSerdStatus\fP serd_reader_read_file (\fBSerdReader\fP * reader, const uint8_t * uri)" + +.PP +Read a file at a given \fCuri\fP\&. +.SS "\fBSerdStatus\fP serd_reader_start_stream (\fBSerdReader\fP * reader, FILE * file, const uint8_t * name, bool bulk)" + +.PP +Start an incremental read from a file handle\&. Iff \fCbulk\fP is true, \fCfile\fP will be read a page at a time\&. This is more efficient, but uses a page of memory and means that an entire page of input must be ready before any callbacks will fire\&. To react as soon as input arrives, set \fCbulk\fP to false\&. +.SS "\fBSerdStatus\fP serd_reader_start_source_stream (\fBSerdReader\fP * reader, \fBSerdSource\fP read_func, \fBSerdStreamErrorFunc\fP error_func, void * stream, const uint8_t * name, size_t page_size)" + +.PP +Start an incremental read from a user-specified source\&. The \fCread_func\fP is guaranteed to only be called for \fCpage_size\fP elements with size 1 (i\&.e\&. \fCpage_size\fP bytes)\&. +.SS "\fBSerdStatus\fP serd_reader_read_chunk (\fBSerdReader\fP * reader)" + +.PP +Read a single 'chunk' of data during an incremental read\&. This function will read a single top level description, and return\&. This may be a directive, statement, or several statements; essentially it reads until a '\&.' is encountered\&. This is particularly useful for reading directly from a pipe or socket\&. +.SS "\fBSerdStatus\fP serd_reader_end_stream (\fBSerdReader\fP * reader)" + +.PP +Finish an incremental read from a file handle\&. +.SS "\fBSerdStatus\fP serd_reader_read_file_handle (\fBSerdReader\fP * reader, FILE * file, const uint8_t * name)" + +.PP +Read \fCfile\fP\&. +.SS "\fBSerdStatus\fP serd_reader_read_source (\fBSerdReader\fP * reader, \fBSerdSource\fP source, \fBSerdStreamErrorFunc\fP error, void * stream, const uint8_t * name, size_t page_size)" + +.PP +Read a user-specified byte source\&. +.SS "\fBSerdStatus\fP serd_reader_read_string (\fBSerdReader\fP * reader, const uint8_t * utf8)" + +.PP +Read \fCutf8\fP\&. +.SS "void serd_reader_free (\fBSerdReader\fP * reader)" + +.PP +Free \fCreader\fP\&. +.SS "\fBSerdWriter\fP* serd_writer_new (\fBSerdSyntax\fP syntax, \fBSerdStyle\fP style, \fBSerdEnv\fP * env, const \fBSerdURI\fP * base_uri, \fBSerdSink\fP ssink, void * stream)" + +.PP +Create a new RDF writer\&. +.SS "void serd_writer_free (\fBSerdWriter\fP * writer)" + +.PP +Free \fCwriter\fP\&. +.SS "\fBSerdEnv\fP* serd_writer_get_env (\fBSerdWriter\fP * writer)" + +.PP +Return the env used by \fCwriter\fP\&. +.SS "size_t serd_file_sink (const void * buf, size_t len, void * stream)" + +.PP +A convenience sink function for writing to a FILE*\&. This function can be used as a SerdSink when writing to a FILE*\&. The \fCstream\fP parameter must be a FILE* opened for writing\&. +.SS "size_t serd_chunk_sink (const void * buf, size_t len, void * stream)" + +.PP +A convenience sink function for writing to a string\&. This function can be used as a SerdSink to write to a \fBSerdChunk\fP which is resized as necessary with realloc()\&. The \fCstream\fP parameter must point to an initialized \fBSerdChunk\fP\&. When the write is finished, the string should be retrieved with \fBserd_chunk_sink_finish()\fP\&. +.SS "uint8_t* serd_chunk_sink_finish (\fBSerdChunk\fP * stream)" + +.PP +Finish a serialisation to a chunk with \fBserd_chunk_sink()\fP\&. The returned string is the result of the serialisation, which is NULL terminated (by this function) and owned by the caller\&. +.SS "void serd_writer_set_error_sink (\fBSerdWriter\fP * writer, \fBSerdErrorSink\fP error_sink, void * error_handle)" + +.PP +Set a function to be called when errors occur during writing\&. The \fCerror_sink\fP will be called with \fChandle\fP as its first argument\&. If no error function is set, errors are printed to stderr\&. +.SS "void serd_writer_chop_blank_prefix (\fBSerdWriter\fP * writer, const uint8_t * prefix)" + +.PP +Set a prefix to be removed from matching blank node identifiers\&. +.SS "\fBSerdStatus\fP serd_writer_set_base_uri (\fBSerdWriter\fP * writer, const \fBSerdNode\fP * uri)" + +.PP +Set the current output base URI (and emit directive if applicable)\&. Note this function can be safely casted to SerdBaseSink\&. +.SS "\fBSerdStatus\fP serd_writer_set_root_uri (\fBSerdWriter\fP * writer, const \fBSerdNode\fP * uri)" + +.PP +Set the current root URI\&. The root URI should be a prefix of the base URI\&. The path of the root URI is the highest path any relative up-reference can refer to\&. For example, with root file:///foo/root and base file:///foo/root/base, file:///foo/root will be written as <\&.\&./>, but file:///foo will be written non-relatively as file:///foo\&. If the root is not explicitly set, it defaults to the base URI, so no up-references will be created at all\&. +.SS "\fBSerdStatus\fP serd_writer_set_prefix (\fBSerdWriter\fP * writer, const \fBSerdNode\fP * name, const \fBSerdNode\fP * uri)" + +.PP +Set a namespace prefix (and emit directive if applicable)\&. Note this function can be safely casted to SerdPrefixSink\&. +.SS "\fBSerdStatus\fP serd_writer_write_statement (\fBSerdWriter\fP * writer, \fBSerdStatementFlags\fP flags, const \fBSerdNode\fP * graph, const \fBSerdNode\fP * subject, const \fBSerdNode\fP * predicate, const \fBSerdNode\fP * object, const \fBSerdNode\fP * datatype, const \fBSerdNode\fP * lang)" + +.PP +Write a statement\&. Note this function can be safely casted to SerdStatementSink\&. +.SS "\fBSerdStatus\fP serd_writer_end_anon (\fBSerdWriter\fP * writer, const \fBSerdNode\fP * node)" + +.PP +Mark the end of an anonymous node's description\&. Note this function can be safely casted to SerdEndSink\&. +.SS "\fBSerdStatus\fP serd_writer_finish (\fBSerdWriter\fP * writer)" + +.PP +Finish a write\&. +.SH "Variable Documentation" +.PP +.SS "const \fBSerdURI\fP SERD_URI_NULL\fC [static]\fP" +\fBInitial value:\fP +.PP +.nf += { + {NULL, 0}, {NULL, 0}, {NULL, 0}, {NULL, 0}, {NULL, 0}, {NULL, 0} +} +.fi +.SS "const \fBSerdNode\fP SERD_NODE_NULL = { NULL, 0, 0, 0, \fBSERD_NOTHING\fP }\fC [static]\fP" + +.SH "Author" +.PP +Generated by Doxygen for Serd from the source code and modified by Dennis Braun \&. diff -Nru serd-0.30.2/debian/upstream/metadata serd-0.30.4/debian/upstream/metadata --- serd-0.30.2/debian/upstream/metadata 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/debian/upstream/metadata 2020-05-08 20:58:45.000000000 +0000 @@ -0,0 +1,5 @@ +Archive: github +Bug-Database: https://github.com/drobilla/serd/issues +Bug-Submit: https://github.com/drobilla/serd/issues/new +Repository: https://github.com/drobilla/serd.git +Repository-Browse: https://github.com/drobilla/serd/ diff -Nru serd-0.30.2/debian/upstream/signing-key.asc serd-0.30.4/debian/upstream/signing-key.asc --- serd-0.30.2/debian/upstream/signing-key.asc 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/debian/upstream/signing-key.asc 2020-05-08 20:58:45.000000000 +0000 @@ -0,0 +1,121 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQGiBEXaIU8RBAC7vZmKrMkUyYMKomqHn9bpgFlT01fSQZyB5vHCTb5uW467HQGv +FMu6CCh2UbTyMDc/0py+EDgmkiqstUQ6hII2BbjoAlRgh4Kw43/6G1IDQiMAHXFx +jgs4Kx/xEsamMWXcGLYgBQExnN0EjjGy8ukLFHi5d4RAgNVY3tUlT+31wwCgoetH +x893hs3OQCNV21UCUV/Ndy0D/1RqBTZGXjTQ2eBCbZI7YCGOfPPdmNoDbSaDMubk +UNdbc78+FvG4SOnXxOdwe6W7Lc5qHwYXcga21ajEXT7Fpok+bj9/6a2WCiB4gzkg +Pi8Lwa0XTs7Hjyh9DFtxGbJHNxtsUV97pVBzrxdAiKasY0/CVWuiJBbZuLsyxWwe +rgwjA/9FJXx1tqOBclX4IGZnVzCGzNhTMtj2P248gZ8B6fOTkbt5bUGHBs2XtM0j +irsYeLWeWsfMa0fFMksfrwekbA4u2uMv9dA8VyjXmYGmKfNOtuyPm/NOS4CjpRQO +e3uB+ttbTKwK9Hx6j5WpfLlUBSlAKlxL1wt4cV03QXI5Sh5+QLQiRGF2ZSBSb2Jp +bGxhcmQgPGRhdmVAZHJvYmlsbGEubmV0PoheBBMRAgAeBQJF2iFPAhsDBgsJCAcD +AgMVAgMDFgIBAh4BAheAAAoJEMb2DmUpcnBgGoQAoIHtHRacGREDktZoKv+hMqW5 +SolkAJ9Xaolpgqa0yuO0+U0cHLqYMdN4mbkCDQRF2iFWEAgA+TUcUVyDVXmiBsbM +V6MOW4ZClnS8Ayz+jOkRbPgIaZOgaWekTVXFHvIYb8zQIZHlYNRj3cESkECKzFPH +uQbYcWLtq2AhI5I32027uoierPzM3tkAIttbqxI+ZNvyLM+rOdO/tR7N3QQy4dxB +goNN33kMYoe9M+AoAVJVhj5i+lv79lkQOiOGyIrZRe8tK2vARwl4jpxn5ZyGtY46 +1KMuoOq1H0gBxUGnHG/29gMtfM0WR+mdkB0N4Vmd5DwCBF1PZW+bz/jwUtKTYKlU +4oVLToPbbr1ZxIQ/GeaiX0QbFC6qkYAz1mbXuwIhT7NZnF1Bb5NUVaNDD6me0P/z +mys3pwADBQgAmjvG8d8Ou84Ry7KFlI5aVttIRDvVvODI14XgrRsaEamBurtqH2d1 +GiTuQKatTBcP2Vh4EBggUKvUBo6OqHl5HSJnMCssot9sbjd2INcVNhA37psZA/z0 +SiHvsU5MYJZAhIRy2OSq6f2rTJnN3tpH9uP22G0hnHwWsvaPif2rJJKa4FsLfiSJ +83nNZycmL0swG/3r2CFaWKdgI8Qxh4a9hzhQ/xp677rp+wXoR15Tiz3doVIks9gU +x/ttSOlIe1qikvIm2sK4YjGskyk3ThDnbKADBA0LPxmUw0LRwfMUpjB9w/KPB6K1 +garaVufX87EiQjMqtcatifrlt86BQG6UqIhJBBgRAgAJBQJF2iFWAhsMAAoJEMb2 +DmUpcnBgUWgAnig09zgkm9I8MYfmjNdeVicZ/TslAJ9gXHch/j3J0oVLZn7iLl8L +enSb2JkCDQROyvsgARAAymdAvdxjXiijDSh09Je1GZlWZz8DBVBSp+Sy8Er3f6aa +NjpdUagO4EBLYXTXOaCmpg+iwqmH9F9kDniyPj1JYkaLvttFhXlUaLY4bVAf74RG +Wbxkrq2Geh5WfK78SbAHuLdp9bx7mCq3HahHLB/DGkElRCgvhFwGRoju7bvkHl/Y +MJJsLpUN+Tpdle5VeVuUAH8l48D3WCwp2kUBzA6DXF/SqOHtNV3tbnuKKdB2Q4ks +JI51KwqrSa3vTrB+8TmVpocjqUK1RD+7rBJKEh4ARHhlEz6C2W3nZm0lLxsFCkgs +ccqCdLV0ZP6vYhAOPWN1kvBjkkibd0szH9a4AUWO9kUT8B0HHzcquJl6LyV2NtVj +PkPNc4zBGsb+otuPRHDU2EeW248/42royn2TgDioJ3keTe/ZCD22CJ8kNBSZOPOU +9DkZiBv/1heourSWsQAQnWTz0uE4/yVk2/Z6L4nFMk2YZYeYoiYjtz2FdMn+/9tj +eJDr+LH1q6EcBPf3qjT90gOSo3cLlswpVPOOndlrXExVi17dQSrQGi8mBxBjeMb6 +pNbF9EXcbd3Cm9oWxJy6gVEPkY0a5tRmH2Vf8uO8SdHWqh1mMgMzzFLrlvef4CmB +TS0Rp72OYq8R+9lFcRGtbeOkUdaPIL7zxCsf+H0pJTjIH4nEYkZmv9wtBW+SgfcA +EQEAAbQgRGF2aWQgUm9iaWxsYXJkIDxkQGRyb2JpbGxhLm5ldD6JAjoEEwEIACQC +GwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AFAk7K/HYCGQEACgkQ/GzPL3NkwkAb +RA/9Fq7wDadgPMTz47skAWqDyzc1kv4Auft+qpJDpSo9PwZWwQicAJZYNscinZVR +ZKhf+BD0v/eMoy+bzu+wf3Q+Eqq3xYCVUAHZzlQGa0MZ/8nQWfqqZe6Hq43KTgf0 +liSTWOYLHh50JuNPb0Vu2bjrgYxcLEEDIEIhulLiB+iiyuXWJ0ssA32Y9Oh+jxb2 +h62G9rWsYsvoAqvPyxhrbD1WLCMLi9KBXRpUTVaGcMtRicqpYvjZrqEkXINS6OBQ +mBuHiLoef7NGJR+22ljz2XPbQMji8m02ozOk8DDNlBMyubasIknulOEGKGgfwr2c +ZbU+1uUD4BbmWYAALGRXe2pl6AbGPU8kjgHQux2Pd7PH8qJxEvuU4O9Zi99jZgP2 +CMh4I4x3fv9RfDM4z77vMkaV8yoITz4vGdlY+UvSK5BzAMfQxuSCxPXtaqQEjS2g +r6KpUmadK7fLUmvFhXuPKwwA/BxbW6YcQKjhUZqnI5q4Hjek8iEnUiiZLnh1dSl3 +lp2us8Dxq3+TTX09qraOY25Kwf/Xjyd/l6/74JxXXFaeQkb9LHyqk3Jlk2THf3aW +TzH8h9lvTwruYhME0ib8mnPqDSfs1LQILmln8rs7Ma9HCKoUFJeMjqz3+sDMP3HC +SqqrdwxkqnufG/0S3dYjd+z910J/Qj1J/yhNAt1cA6Dwx3a5Ag0ETsr7IAEQALBn +W/tm8zo9y8G1yOO0S0PKXxf3yPcM8J3swZupmuRmQyhUF3xoxiTtZH4XbMnUw2Dd +zpt7XRFC8BTmI+5E32uxxR7EMgqMS1/XMlIp+7qEiMF25DAZ6agOBFEe244MFlDt ++WIt7XeJPViByKxbgi5rS14MljUazfQrmgzAVq45RpDc3QIhgE65Q+9R4FDillWU +wv5AkieRb3QdMHXrvSgsQ21bnvjdRggp8Xw4GG4k0e5WnpU7FvDM2unDywvcU/Ln +RDxsZazzPNJDi1kq7CYmB94xvIdxvDedQV8SFJ1YZkkx8MTule60t40b4pr6l2zJ +zR2SaR0GRsOaKeUPP08ye+20arJV/PbER7holpB8N68F3MYW712dosCLBVD2srps +juWLdKA4X/SVb8i0bu/T7dwMJeDSOSmdWXLEIMBsCQxuttk/hLkJQBVvWu/guMft +8Qn2Lb9uPFe8Si8rkjiWTSEq2Q2PXcUuX+0w9kbmuDULdovAyi+sLObkyx/dVz6L +JbM1Ea6XWLhi4QVwKL5/VOey/6UctW2DKg0SNvLA5jiTx4L8u3Kd4TtvV8qmWOMT +mLWXnezwE2Ln4gH65ZkbImgdZswt5r8GDZ4fxLZsxjS9WPWAndH5z4yFtaUUHgf9 +A985baj2MVA3dhKXjoiZxLTThxxO40UKwamCRY21ABEBAAGJAh8EGAEIAAkFAk7K ++yACGwwACgkQ/GzPL3NkwkDnGA/7BVKA/8hEHmmtrq7LzRLu6HYALdf/B7yfcpnw +HjFjZchExWaQXuG+AqfgP4bm+OBnpN1eOX7dD1x79AHJb/Mp2SX3NlzGvujzwMDr +R9Hp4XmeeBXfxvSQXiRqcHH6Jr/rv564vYxgp0zdCmCRP1y+sSOEOyh46cTP9LD9 +w+1LzWW96dTo5FQ3q193rzrFBUJStbuMIsp3puQ08ntmXXfYk+KuXB/hMKbJU9gb +MBV7cQdGX4AZ4DcSsacs1TPdiqGZ4fJ3XjGegory9dVaTbwkK1ULEENGsPc8LIYT +IlNHJ4E1ZSMAkTTn7ThphBvHnXOPG9fgNimAhXYKpE4a+DwQJY1YY06KFJtqeGYG +oFiTqfhn4R/Mq2kArFNoCmmbH1gDThswc7idTmyI4DHAhx8kHK0mrnkaA2J+Ah82 +Kimsu+sMKUxMwbYZ32yK18HQI3y8iXEuGsGQk8X2gKO8YfOOvGFf2sPl1IN+ZZ8Z +I7bZi/yzh5K04Pxyb6LTYpG+YDAZS6H5NkWQxPM0TCDFLDlLl5SESl7BxgqryqCY +4okRGVq0WLXa3MpFCjoYOAdtkQOm6ZOE9tjKogx0ZN9cflde2D9MSi9ADCZ8gK4t +Q581Ea43owT2iMJVceGcqJE3ZVnUq2PXDoVGVgIxT1stR69am4hgSHpShTRVU5fi +o+jiuHKZAg0ET0BWRQEQAOru18ePCKArnY43QCcDiVjDCTrPx0lswgkaGPWRwL1j +OHiLnwMaafsb/SMjvgwJ3P40Tzo1wB22STmQ4/r5JL3nVQ7cRmeCDSMbbva9vuOA +C/zOGH6N6Pd/Vyq4bJp3eWhL/bNiBF7R4ft9E5RD1WIM7kDM0LUd2HgqyvwgKngi +JFfZNCEXFuXhHNc4nuRsrLnEb5T+6PTlnoQRQyqd0rhShYTBvjL9DUhhFtgqNmjY +l1hCurFnyE8G5zkxnIuJ+wWlgBiPSIIfZZY0IGLXH7DHDHaV+N4MKduCiOhPwLha +NHxNekBFaFNllLgVGMUE/Bp4GvHcfAgXtAQbztqag5folJxNYNWX1qLmYh9hluJA +0MRq+nFNpYWmMTcQQYOPpBuOVRf8u5qNp/aQwH5DdoDa6Mdwfbrq+RcMBogwCjZG +ROe296YuBBIUfWRxfYQaIwbtrTajSZW2DWUze3tONLWjPJKJFDD6w42UQSp+fnDr +rdZhasDU8bmVE5LUyA8B01BJWglQIgfLZ5PzDsxSZtWulxsOoz+VwS2sbslNkVWF +PWbcMoKB/fAtN+mmMzrjmHLbF+hLta4ZZBJcCS4Nk0Lw4+9Msf7jWWNEUbKyqvKn +SK774mIDktp+o2fPXmi0KLcwxrda2SJSbPeDbYsPzhwTR66+ZoQ430MifR4RIwan +ABEBAAG0IERhdmlkIFJvYmlsbGFyZCA8ZEBkcm9iaWxsYS5uZXQ+iQI6BBMBCAAk +AhsDBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheABQJPQFfZAhkBAAoJEDZyeCqb82jz +x9YQALzxj/8b+JD9px1lGb7ZbN5GTBdhJ6CgLkObSkwnU88vWxOhZDKd6dTyGYtm +Hg3rridM/0OMVv/hUXxTcRWKHo8S44OArFSNhjuarfh7IQRDpe16J9vWaStuK8ui +yOmHl8IGmtDoVtlDo7yd0/b8lwCJd9R34d81ruYLHguHxo6ahMyB+SjPXoqEj8dU +cY4OR35wRI04H+HSq7s4kpqX4G5uhrtW5dEk7GuqELyg9forQ4xDD/cSOXUtBX/S +ak9KRKZLxNyp5h4xvSHi5wl3DjB4Of5JP7e437J5PXQtw1mNHCwbMyp6R9cqETTw +jRj11gbqFy1PjiKDgT6/iPvA+Vg4GcJHk1Rhzq2PMLegZIqJ6+F3G2oRl0XF1J9j +3XvSnXSTMlqEQd6VNFmAd1PGJMfok3kzbrPBIBt8/ltaSn71ekanzxpAVC4fHO6J +zPszyqpdkRriTL7nYZ872J5+BWCC3eOrQVvDNu+FZznHRuI8TqhdWr76w4oZMx56 +S/oV8bo9wVQx0urxjB851IcPWU8GyBu4Bqb9kpw8IzUY08AENKzal9KstCkCoG5a +80B0sS7Et7a23TNZF2rBKOzza7yte+5dPDeDg0WSexzJr35kArjUz7sgKODFfAlv +TTgRPTTRIdBeQV69aUc3XvaJQFXwPobzyvH/ie166GqJIRvOuQINBE9AVkUBEACr +6qQSWRlg+AifZqLYSY64VQ4Bvj3SfVXlMLlMWDeKFAnW49sg/DMu9t20m3m/L49U +W51zyjnRlIN8j6NqmVtRKAs2C+FRpSTKU6NSdsBweUOkQP6FGJRlb9o3cTxePBvQ +L/weulB/rzRhBqL4W3U4L3jUxYE0vCYsD5Dq0/177BtazrOIBuRADABLQ7m9976j +Ifz9zNoix8j7CNtX0g+JB4E5kObVQ41NLyZ8ei+t/q1MP1KxwD6e+icESlLNrO2r +hXBssc6KScbdrPmCfR5bumFitEfxeIJIs5XR/FKCgmQH4SRQJQ4MY/+B5OIRDH4z +jbs8EP2kD+85hbKx8sjrQeafA1VYw+TNFBJhCNqMkzl6WyZ2GX7ZP0xw19BS/Rio +OLVq3I3WSRpJGsguzE87xXDF91caaxQnCL1LM2zqNstYDNYIAmCThVixeONFbFiP +N7OsTG2lsSh79mX8+/2YAxj/9ACCxDcFxXeWbByVdpuV2n24F3lLQBY1/Gpy8ysk +JLCOFEjGYVIHsEaD+FxR2x9WusWb+aeIzHmOA8cwcLazJcneMvOTIrlgAz0yZphY ++c6kx/opem0N+nKX+aEFbolnlsPXhGNCAD5xffJOIUK+gGrPstf2WdqYfmWegd7a +k1FG4j1WqHwHplOwgStPTO33IhhWXHLjyRsf8AyumQARAQABiQIfBBgBCAAJBQJP +QFZFAhsMAAoJEDZyeCqb82jzTUkP/3jvhkMK0IGcuCVkfB3uIxsjLKl+lI2FDq/z +UOo6Ko491q/8Ks2E4fGYmVrcxymnAThB4STL0QaLJdIaRlJo0cMkcEsF0RKxu1aa +LRRWk08hrdjI3aRLwzAdWxHAE3ESz75Tl26ZB1MvgWBSzyLtYJXYBz738ldIfvs5 +hzhDWMJTcbhf+Hnaoxt3fcDu8k0EdTIBCRziOO7uq9npDxwMOTyPQvEMr4v8kIvn +/Npu3ZQtadzkeSr+/ENCGNz1KatTV3IylH6X8ANP8eiq4ODOrayjyKs0ZDtL3sn+ +jJhoz/AF/qBpSTnEtDUpPT3U0Noo4HHkYQYiK8SI0OcxH9tSkgaeRcnFvlbJw2ac +kRpHuXNuGZ66zt2yDj7cZG6ssg9Yrraxx3y+27MJXYnowOnRjCdCQZ5hKeOny73l +yFZYDisCvqha138PRJtSwQAgnKEu0Bh/sSI0DtPZmsXC9iPg9AxBDqVfdxtsWqfA +31JmR+MsN58cT1Ej4Li+cH9sPOFVOpSfgylCgHUC2Lact8v5xrArHyrCBfmavDnc +lir84A5TuwGMLhm2Ui9yKn5fGgiF4P4UU1zeTPb45Mf9NU5pKJXd5H0MsOU58Dja +M5Af3dpH6c8wsyDkNeVDvUzLXghsUH80HQMSpfZtNLZ/57KoSi7YYYotWZX/mch2 +i4mqVEEp +=TTId +-----END PGP PUBLIC KEY BLOCK----- diff -Nru serd-0.30.2/debian/watch serd-0.30.4/debian/watch --- serd-0.30.2/debian/watch 2019-12-08 19:58:21.000000000 +0000 +++ serd-0.30.4/debian/watch 2020-05-08 20:58:45.000000000 +0000 @@ -1,3 +1,3 @@ -version=3 -opts="uversionmangle=s/-/./,dversionmangle=s/~dfsg.*//" \ -http://download.drobilla.net/serd-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) +version=4 +opts="pgpsigurlmangle=s/$/.sig/,uversionmangle=s/-/./,dversionmangle=s/~dfsg.*//" \ +https://download.drobilla.net/serd-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) diff -Nru serd-0.30.2/doc/layout.xml serd-0.30.4/doc/layout.xml --- serd-0.30.2/doc/layout.xml 2019-10-19 18:00:00.000000000 +0000 +++ serd-0.30.4/doc/layout.xml 2020-04-11 10:26:21.626008700 +0000 @@ -25,6 +25,7 @@ + @@ -83,6 +84,7 @@ + @@ -93,7 +95,6 @@ - diff -Nru serd-0.30.2/doc/reference.doxygen.in serd-0.30.4/doc/reference.doxygen.in --- serd-0.30.2/doc/reference.doxygen.in 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/doc/reference.doxygen.in 2020-04-25 14:46:28.787513700 +0000 @@ -1451,7 +1451,7 @@ # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. -ENUM_VALUES_PER_LINE = 1 +ENUM_VALUES_PER_LINE = 0 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. @@ -2157,12 +2157,6 @@ EXTERNAL_PAGES = YES -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of 'which perl'). -# The default file (with absolute path) is: /usr/bin/perl. - -PERL_PATH = /usr/bin/perl - #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- @@ -2176,15 +2170,6 @@ CLASS_DIAGRAMS = NO -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see: -# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - # You can include diagrams made with dia in doxygen documentation. Doxygen will # then run dia to produce the diagram and insert it in the documentation. The # DIA_PATH tag allows you to specify the directory where the dia binary resides. diff -Nru serd-0.30.2/doc/style.css serd-0.30.4/doc/style.css --- serd-0.30.2/doc/style.css 2019-10-19 18:00:00.000000000 +0000 +++ serd-0.30.4/doc/style.css 2020-04-11 10:26:21.629342000 +0000 @@ -1,744 +1,808 @@ body { - background: #FFF; - color: #222; - font-style: normal; - line-height: 1.6em; - margin-left: auto; - margin-right: auto; - padding: 1em; - max-width: 60em; - font-family: "DejaVu Serif",Palatino,serif; - text-rendering: optimizeLegibility; -} - -h1, .title, #projectname, h2, h3, h4, h5, h6 { - line-height: 1.0125em; - color: #444; - font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; - margin: 1em 0 0.5em 0; -} - -h1, .titlearea .header .titlebox, #projectname { - font-size: 300%; - font-weight: 400; - margin-bottom: 0.25em; - margin-top: 0; + background: #FFF; + color: #222; + font-style: normal; + line-height: 1.6em; + margin-left: auto; + margin-right: auto; + padding: 1em; + max-width: 60em; + font-family: "SF Pro Text", Verdana, "DejaVu Sans", sans-serif; + text-rendering: optimizeLegibility; +} + +h1 { + font-size: 1.68em; + font-weight: 500; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + line-height: 2em; + margin: 0 0 0.25em 0; } -.header .headertitle .title { - font-size: 180%; - font-weight: 400; - margin: 0.75em 0.25em 0.5em 0; +h2 { + line-height: 1.68em; + font-size: 1.41em; + font-weight: 600; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + margin: 1.25em 0 0.5em 0; } -.ingroups { - display: inline; +h3 { + line-height: 1.41em; + font-size: 1.18em; + font-weight: 600; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + margin: 1.25em 0 0.5em 0; } -.title .ingroups a { - font-size: small; - margin-left: 1em; + +h4 { + line-height: 1.18em; + font-size: 1em; + font-weight: 600; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + margin: 1.25em 0 0.5em 0; } -#titlebox, #metabox { - display: inline-block; +h5, h6 { + font-size: 0.7em; + font-weight: 600; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + margin: 1.25em 0 0.5em 0; } -#titlebox{ - display: inline-block; - width: 75%; - left: 0; - top: 0; + +a { + color: #546E00; + text-decoration: none; } -#title { - margin-bottom: 0.25em; +h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { + color: #444; } -#shortdesc { - margin: 0; - color: #666; - display: inline-block; - font-style: italic; - padding: 0; +a:hover { + text-decoration: underline; } -#titlearea { - margin: 0.25em auto 0.25em auto; - padding: 0; - position: relative; - clear: both; - line-height: 1.0em; +h1 a:link, h2 a:link, h3 a:link, h4 a:link, h5 a:link, h6 a:link { + color: #444; } -h2 { - font-size: 160%; - font-weight: 400; +h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited { + color: #444; } -h3 { - font-size: 140%; - font-weight: 400; +p { + margin: 0.5em 0 0.5em 0; } -h4 { - font-size: 120%; - font-weight: 500; +dt { + font-weight: 600; } -h5, h6 { - font-size: 110%; - font-weight: 600; +dd { + margin-left: 2em; } -h1 a, h1 a:link, h1 a:visited , -h2 a, h2 a:link, h2 a:visited , -h3 a, h3 a:link, h3 a:visited , -h4 a, h4 a:link, h4 a:visited , -h5 a, h5 a:link, h5 a:visited , -h6 a, h6 a:link, h6 a:visited { - color: #444; +caption { + font-weight: 700; } -p { - margin: 0.5em 0 0.5em 0; +.title, #projectname { + line-height: 1.0125em; + margin: 0.75em 0 0 0; } -dt { - font-weight: 700; +.titlearea .header .titlebox, #projectname { + font-size: 1.68em; + font-weight: 400; + margin-bottom: 0.25em; + margin-top: 0; } -dd { - margin-left: 2em; +#header { + padding: 0 0 0.5em 0; + border-bottom: 1px solid #EEE; } -caption { - font-weight: 700; +.header .headertitle .title { + line-height: 1.68em; + font-size: 1.68em; + font-weight: 600; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; } -span.legend { - font-size: small; - text-align: center; +.ingroups { + display: none; } -h3.version { - font-size: small; - text-align: center; +.title .ingroups a { + font-size: small; + margin-left: 1em; } -div.qindex,div.navtab { - background-color: #EBEFF6; - border: 1px solid #A3B4D7; - text-align: center; - margin: 2px; - padding: 2px; +#titlebox, #metabox { + display: inline-block; } -div.navtab { - margin-right: 15px; +#titlebox { + display: inline-block; + width: 75%; + left: 0; + top: 0; } -/* @group Link Styling */ -a { - color: #546E00; - text-decoration: none; +#title { + margin-bottom: 0.25em; + line-height: 1.25em; + font-size: 2.5em; + color: #333; + font-weight: 600; } -.contents a:visited { - color: #344E00; +.PageDoc { + margin-top: 1.5em; } -a:hover { - text-decoration: underline; +.PageDoc .header .headertitle .title { + display: none; +} + +#shortdesc { + margin: 0; + color: #666; + display: inline-block; + font-style: italic; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + padding: 0; +} + +#titlearea { + margin: 0.25em auto 0 auto; + padding: 0; + position: relative; + clear: both; + line-height: 1em; +} + +.legend { + font-size: small; + text-align: center; +} + +.version { + font-size: small; + text-align: center; +} + +div.qindex,div.navtab { + background-color: #EBEFF6; + border: 1px solid #A3B4D7; + text-align: center; + margin: 2px; + padding: 2px; +} + +div.navtab { + margin-right: 15px; +} + +.contents a:visited { + color: #344E00; } a.qindexHL { - background-color: #9CAFD4; - color: #FFF; - border: 1px double #869DCA; + background-color: #9CAFD4; + color: #FFF; + border: 1px double #869DCA; } code { - color: #444; + color: #444; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; } -/* @end */ dl.el { - margin-left: -1cm; + margin-left: -1cm; } .fragment { - font-family: "DejaVu Sans Mono",monospace,fixed; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; } pre.fragment { - border: 1px solid #C4C4C4; - background-color: #F9F9F9; - padding: 0.5em; - overflow: auto; + border: 1px solid #C4C4C4; + background-color: #F9F9F9; + padding: 0.5em; + overflow: auto; } div.ah { - background-color: #000; - font-weight: 700; - color: #FFF; - margin-bottom: 3px; - margin-top: 3px; - padding: .2em; - border: thin solid #333; + background-color: #000; + font-weight: 700; + color: #FFF; + margin-bottom: 3px; + margin-top: 3px; + padding: 0.2em; + border: thin solid #333; } div.groupHeader { - margin-left: 16px; - margin-top: 12px; - margin-bottom: 6px; - font-weight: 700; + margin-left: 16px; + margin-top: 12px; + margin-bottom: 6px; + font-weight: 700; } a + h2.groupheader { - display: none; + display: none; } div.groupText { - margin-left: 16px; - font-style: italic; + margin-left: 16px; + font-style: italic; } div.contents, #content { - padding: 0 0.25em 0 0.25em; - max-width: 60em; - margin-left: auto; - margin-right: auto; + max-width: 60em; + margin-left: auto; + margin-right: auto; +} + +.groupheader + p { + font-style: italic; + color: #666; + margin: 0 0 1em 0; } td.indexkey { - background-color: #EBEFF6; - font-weight: 700; - border: 1px solid #C4CFE5; - margin: 2px 0; - padding: 2px 10px; + background-color: #EBEFF6; + font-weight: 700; + border: 1px solid #C4CFE5; + margin: 2px 0; + padding: 2px 10px; } td.indexvalue { - background-color: #EBEFF6; - border: 1px solid #C4CFE5; - padding: 2px 10px; - margin: 2px 0; + background-color: #EBEFF6; + border: 1px solid #C4CFE5; + padding: 2px 10px; + margin: 2px 0; } table.memname { - font-family: "DejaVu Sans Mono",monospace,fixed; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; + border-spacing: 0; +} + +table.memname tbody tr:last-child { + display: none; +} + +table.memname tbody tr:only-child { + display: table-cell; +} + +table.memname tbody tr:nth-last-child(2)::after { + content: ")"; } tr.memlist { - background-color: #EEF1F7; + background-color: #EEF1F7; } p.formulaDsp { - text-align: center; + text-align: center; } img.formulaInl { - vertical-align: middle; + vertical-align: middle; } div.center { - text-align: center; - margin-top: 0; - margin-bottom: 0; - padding: 0; + text-align: center; + margin-top: 0; + margin-bottom: 0; + padding: 0; } div.center img { - border: 0; + border: 0; } address.footer { - text-align: right; + text-align: right; } img.footer { - border: 0; - vertical-align: middle; + border: 0; + vertical-align: middle; } -/* @group Code Colorization */ span.keyword { - color: #586E75; + color: #586E75; } span.keywordtype { - color: #546E00; + color: #546E00; } span.keywordflow { - color: #586E75; + color: #586E75; } span.comment { - color: #6C71C4; + color: #6C71C4; } span.preprocessor { - color: #D33682; + color: #D33682; } span.stringliteral { - color: #CB4B16; + color: #CB4B16; } span.charliteral { - color: #CB4B16; + color: #CB4B16; } -/* @end */ td.tiny { - font-size: x-small; + font-size: x-small; } .dirtab { - padding: 4px; - border-collapse: collapse; - border: 1px solid #A3B4D7; + padding: 4px; + border-collapse: collapse; + border: 1px solid #A3B4D7; } th.dirtab { - background: #EBEFF6; - font-weight: 700; + background: #EBEFF6; + font-weight: 700; } hr { - height: 0; - border: none; - border-top: 1px solid #DDD; - margin: 2em 0 1em; + height: 0; + border: none; + border-top: 1px solid #DDD; + margin: 2em 0; } #footer { - bottom: 0; - clear: both; - font-size: x-small; - margin: 2em 0 0; - padding: 0 1em 1em 1em; - vertical-align: top; - color: #888; + bottom: 0; + clear: both; + font-size: x-small; + margin: 2em 0 0; + padding: 0 1em 1em 1em; + vertical-align: top; + color: #888; +} + +td.ititle { + padding-bottom: 0.75em; } -/* @group Member Descriptions */ table.memberdecls { - border-spacing: 0.125em; - line-height: 1.3em; + border-spacing: 0.125em; + line-height: 1.3em; } .mdescLeft,.mdescRight,.memItemLeft,.memItemRight,.memTemplItemLeft,.memTemplItemRight,.memTemplParams { - margin: 0; - padding: 0; + margin: 0; + padding: 0; } .mdescLeft,.mdescRight { - color: #555; + color: #555; } .memItemLeft,.memItemRight,.memTemplParams { - border: 0; - font-family: "DejaVu Sans Mono",monospace,fixed; + border: 0; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; } .memItemLeft,.memTemplItemLeft { - white-space: nowrap; - padding-left: 2em; - padding-right: 1em; + white-space: nowrap; + padding-left: 2em; } .memItemLeft a.el { - font-weight: bold; + font-weight: bold; } .memTemplParams { - color: #464646; - white-space: nowrap; + color: #464646; + white-space: nowrap; } td.memSeparator { - display: none; + display: none; } td.mlabels-right { - vertical-align: top; - padding-top: 4px; - color: #B4C342; + color: #B4C342; + font-weight: normal; + margin-left: 1em; } .memtitle { - display: none; + display: none; } -/* @end */ -/* @group Member Details */ -/* Styles for detailed member documentation */ .memtemplate { - color: #888; - font-style: italic; - font-family: "DejaVu Sans Mono",monospace,fixed; - font-size: small; + color: #888; + font-style: italic; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; + font-size: small; } .memnav { - background-color: #EEE; - border: 1px solid #B4C342; - text-align: center; - margin: 2px; - margin-right: 15px; - padding: 2px; + background-color: #EEE; + border: 1px solid #B4C342; + text-align: center; + margin: 2px; + margin-right: 15px; + padding: 2px; } .memitem { - padding: 0.25em 0.5em 0.25em 0.5em; - margin: 0 0 1em 0; - border-radius: 6px; - border: 1px solid #DDD; + padding: 0.5em 0.5em 0.25em 0.5em; + margin: 1em 0 2em 0; } .memproto { - font-size: 110%; - font-weight: 400; - line-height: 1em; - color: #000; + border-bottom: 1px solid #EEE; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; + font-size: 1.09em; + font-weight: 600; + line-height: 1.41em; + margin-bottom: 0.25em; + padding-bottom: 0.125em; } .memproto .paramname { - font-style: normal; + font-style: normal; + padding-right: 0.25em; } .memdoc { - padding: 0 0.25em 0 0.25em; + padding: 0; +} + +.memdoc > p:first-child, .memdoc .textblock > p:first-child { + font-style: italic; + color: #444; + margin-bottom: 0.75em; } .paramkey { - text-align: right; + text-align: right; } .paramtype { - color: #666; - padding-right: 0.5em; - white-space: nowrap; + color: #666; + padding: 0 0.25em 0 0.25em; + white-space: nowrap; } -.paramname { - color: #111; - white-space: nowrap; - font-family: "DejaVu Sans Mono",monospace,fixed; - font-style: italic; - padding-right: 0.5em; +.params .paramname { + color: #111; + white-space: nowrap; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; + font-style: italic; + padding-right: 0.5em; + vertical-align: top; } .fieldname { - color: #000; + color: #000; } .fieldtable { - padding-top: 0.25em; - border-top: 1px dashed #DDD; + margin-top: 1.0em; + border-collapse: collapse; } .fieldtable tbody tr:first-child { - display: none; + display: none; } td.fieldname { - padding: 0 0.5em 0 0.25em; - vertical-align: top; - font-family: "DejaVu Sans Mono",monospace,fixed; + vertical-align: top; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; +} + +td.fielddoc { + padding: 0.125em 0.5em 0 0.25em; + vertical-align: top; +} + +.fieldtable tbody tr td { + border-top: 1px dashed #DDD; + border-bottom: 1px dashed #DDD; } td.fieldtype { - color: #666; - padding: 0 0.5em 0 0; - vertical-align: top; - font-family: "DejaVu Sans Mono",monospace,fixed; + color: #666; + padding: 0 0.5em 0 0; + vertical-align: top; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; } td.fielddoc p { - margin: 0; - vertical-align: top; - padding: 0 0.5em 0 0; + margin: 0; + padding: 0 0.5em 0 0; } p.reference { - font-size: x-small; - font-style: italic; + font-size: x-small; + font-style: italic; } -/* @end */ -/* @group Directory (tree) */ -/* for the tree view */ .ftvtree { - font-family: sans-serif; - margin: 0; + font-family: "DejaVu Sans", Verdana, Helvetica, Arial, sans-serif; + margin: 0; } -/* these are for tree view when used as main index */ .directory { - font-size: small; - margin: 0.5em; + margin: 0.5em; } .directory h3 { - margin: 0; - margin-top: 1em; - font-size: 11pt; + margin: 0; + margin-top: 1em; + font-size: 11pt; } .directory > h3 { - margin-top: 0; + margin-top: 0; } .directory p { - margin: 0; - white-space: nowrap; + margin: 0; + white-space: nowrap; } .directory div { - display: none; - margin: 0; + display: none; + margin: 0; } .directory img { - vertical-align: -30%; + vertical-align: -30%; } td.entry { - font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; - font-weight: 400; - padding-right: 1em; + font-family: "DejaVu Sans", Verdana, Helvetica, Arial, sans-serif; + font-weight: 400; + padding-right: 1em; } -td.entry .arrow { - display: none; +.arrow { + color: #CCC; + user-select: none; + font-size: 80%; + display: inline-block; + width: 16px; + height: 22px; + vertical-align: top; } td.entry b { - font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; - font-weight: 400; - font-size: 130%; + font-family: "DejaVu Sans", Verdana, Helvetica, Arial, sans-serif; + font-weight: 400; + font-size: 130%; } -/* these are for tree view when not used as main index */ .directory-alt { - font-size: 100%; - font-weight: bold; + font-size: 100%; + font-weight: bold; } .directory-alt h3 { - margin: 0; - margin-top: 1em; - font-size: 11pt; + margin: 0; + margin-top: 1em; + font-size: 11pt; } .directory-alt > h3 { - margin-top: 0; + margin-top: 0; } .directory-alt p { - margin: 0; - white-space: nowrap; + margin: 0; + white-space: nowrap; } .directory-alt div { - display: none; - margin: 0; + display: none; + margin: 0; } .directory-alt img { - vertical-align: -30%; + vertical-align: -30%; } -/* @end */ div.dynheader { - margin-top: 8px; + margin-top: 8px; } address { - font-style: normal; - color: #444; + font-style: normal; + color: #444; } table.doxtable { - border-collapse: collapse; - margin: 0.5em; + border-collapse: collapse; + margin: 0.5em; } table.doxtable td,table.doxtable th { - border: 1px solid #DDD; - padding: 3px 7px 2px; + border: 1px solid #DDD; + padding: 3px 7px 2px; } table.doxtable th { - background-color: #F3F3F3; - color: #000; - padding-bottom: 4px; - padding-top: 5px; - text-align: left; - font-weight: bold; + background-color: #F3F3F3; + color: #000; + padding-bottom: 4px; + padding-top: 5px; + text-align: left; + font-weight: bold; } .tabsearch { - top: 0; - left: 10px; - height: 36px; - z-index: 101; - overflow: hidden; - font-size: 13px; + top: 0; + left: 10px; + height: 36px; + z-index: 101; + overflow: hidden; + font-size: 13px; } div.navpath { - color: #DDD; + color: #DDD; } .navpath ul { - overflow: hidden; - margin: 0; - padding: 0; + overflow: hidden; + margin: 0; + padding: 0; } .navpath li { - float: left; - padding-left: 0; - margin-left: 0.5em; - padding-right: 1em; + float: left; + padding-left: 0; + margin-left: 0.5em; + padding-right: 1em; } .navpath a { - display: block; - text-decoration: none; - outline: none; + display: block; + text-decoration: none; + outline: none; } div.summary { - font-size: small; - font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; - margin: 0; - color: #FFF; /* Hide separator bars */ - border-bottom: 1px solid #DDD; - padding: 0.25em 0; + font-size: small; + font-family: "DejaVu Sans", Verdana, Helvetica, Arial, sans-serif; + margin: 0; + padding: 0.25em 0; + display: none; } div.summary a { - white-space: nowrap; + white-space: nowrap; } -/* Metadata box (right aligned next to title) */ - #metabox { - display: inline-block; - font-size: x-small; - margin: 0 0 0.25em 0; - position: absolute; - right: 0; - top: 0; - color: #666; - font-style: italic; - padding: 0 1em; + display: inline-block; + font-size: x-small; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + position: absolute; + right: 0; + bottom: 0.25em; + color: #666; + font-style: italic; } #meta { - border-style: hidden; - margin-right: 0.25em; + border-style: hidden; + margin-right: 0.25em; } #meta tr, #meta th, #meta td { - background-color: transparent; - border: 0; - margin: 0; - font-weight: normal; + background-color: transparent; + border: 0; + margin: 0; + font-weight: normal; } #meta th { - text-align: right; + text-align: right; } -#meta th:after { - content: ":"; +#meta th::after { + content: ":"; } div.line { - font-family: "DejaVu Sans Mono",monospace,fixed; - line-height: 1.4em; - white-space: pre-wrap; + font-family: "SF Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace, fixed; + line-height: 1.4em; + white-space: pre-wrap; } .glow { - background-color: #2AA198; - box-shadow: 0 0 10px #2AA198; + background-color: #2AA198; + box-shadow: 0 0 10px #2AA198; } span.lineno { - padding-right: 4px; - text-align: right; - border-right: 2px solid #546E00; - background-color: #E8E8E8; - white-space: pre; + padding-right: 4px; + text-align: right; + border-right: 2px solid #546E00; + background-color: #E8E8E8; + white-space: pre; } + span.lineno a { - background-color: #D8D8D8; + background-color: #D8D8D8; } span.lineno a:hover { - background-color: #C8C8C8; + background-color: #C8C8C8; } .tabs, .tabs2, .navpath { - padding: 0.25em 0; - border-bottom: 1px solid #DDD; - font-size: small; - font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; - margin: 0; + padding: 0.25em 0; + font-size: small; + font-family: Helvetica, Arial, "DejaVu Sans Condensed", Verdana, sans-serif; + margin: 0; } th { - text-align: left; - font-size: 110%; - font-weight: 500; + text-align: left; + font-size: 110%; + font-weight: 500; } .mlabel { - padding: 0.125em; + padding: 0.125em; } -#navrow1 { - /* Disable menu from Doxygen 1.8.15, it is faked in the template */ - display: none; +#navrow1, #navrow2 { + /* Disable menu from Doxygen 1.8.15, it is faked in the template */ + display: none; } -/* tabs*/ - .tablist { - margin: 0; - padding: 0; - display: table; + margin: 0; + padding: 0; + display: table; } .tablist li { - display: table-cell; - line-height: 2em; - list-style: none; - border-bottom: 0; + display: table-cell; + line-height: 2em; + list-style: none; + border-bottom: 0; } .tablist a { - display: block; - padding: 0 1em 0 0; - font-family: "DejaVu Sans",Helvetica,Arial,sans-serif; - text-decoration: none; - outline: none; + display: block; + padding: 0 1em 0 0; + text-decoration: none; + outline: none; } .tabs3 .tablist a { - padding: 0 10px; + padding: 0 10px; } .tablist li.current a { - color: #222; + color: #222; } span.icon { - display: none; + display: none; } diff -Nru serd-0.30.2/.gitattributes serd-0.30.4/.gitattributes --- serd-0.30.2/.gitattributes 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/.gitattributes 2020-04-11 10:26:21.626008700 +0000 @@ -0,0 +1,4 @@ +*.nq -text +*.nt -text +*.trig -text +*.ttl -text diff -Nru serd-0.30.2/.gitlab-ci.yml serd-0.30.4/.gitlab-ci.yml --- serd-0.30.2/.gitlab-ci.yml 2019-10-19 22:20:18.000000000 +0000 +++ serd-0.30.4/.gitlab-ci.yml 2020-04-25 14:46:28.787513700 +0000 @@ -176,26 +176,25 @@ win_dbg: <<: *build_definition script: python ./waf configure build -dT --no-coverage - tags: [windows] + tags: [windows,msvc,python] test:win_dbg: <<: *test_definition script: python ./waf test needs: ["win_dbg"] - tags: [windows] + tags: [windows,msvc,python] win_rel: <<: *build_definition script: python ./waf configure build -T --no-coverage - tags: - - windows + tags: [windows,msvc,python] test:win_rel: <<: *test_definition script: python ./waf test needs: ["win_rel"] - tags: [windows] + tags: [windows,msvc,python] pages: diff -Nru serd-0.30.2/NEWS serd-0.30.4/NEWS --- serd-0.30.2/NEWS 2019-10-20 23:19:20.000000000 +0000 +++ serd-0.30.4/NEWS 2020-04-26 16:36:05.808137700 +0000 @@ -1,3 +1,11 @@ +serd (0.30.4) stable; + + * Fix EOF handling while reading in bulk or from strings + * Fix lax handling of string errors + * Fix reading from a null-delimited socket + + -- David Robillard Sun, 26 Apr 2020 16:04:05 +0000 + serd (0.30.2) stable; * Fix GCC 4 build diff -Nru serd-0.30.2/serd.ttl serd-0.30.4/serd.ttl --- serd-0.30.2/serd.ttl 2019-06-06 20:18:53.000000000 +0000 +++ serd-0.30.4/serd.ttl 2019-12-12 11:32:57.447873000 +0000 @@ -22,11 +22,11 @@ , , ; - doap:bug-database ; + doap:bug-database ; doap:blog ; doap:developer ; doap:maintainer ; doap:repository [ a doap:GitBranch ; - doap:location + doap:location ] . diff -Nru serd-0.30.2/src/byte_source.c serd-0.30.4/src/byte_source.c --- serd-0.30.2/src/byte_source.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/byte_source.c 2020-04-25 14:46:28.787513700 +0000 @@ -29,6 +29,7 @@ ? SERD_ERR_UNKNOWN : SERD_FAILURE); } else if (n_read < source->page_size) { source->file_buf[n_read] = '\0'; + source->buf_size = n_read; } return SERD_SUCCESS; } @@ -47,6 +48,7 @@ source->stream = stream; source->from_stream = true; source->page_size = page_size; + source->buf_size = page_size; source->cur = cur; source->error_func = error_func; source->read_func = read_func; diff -Nru serd-0.30.2/src/env.c serd-0.30.4/src/env.c --- serd-0.30.2/src/env.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/env.c 2020-04-25 14:46:28.790847000 +0000 @@ -188,13 +188,13 @@ return SERD_ERR_BAD_ARG; } - const size_t name_len = colon - curie->buf; + const size_t name_len = (size_t)(colon - curie->buf); const SerdPrefix* const prefix = serd_env_find(env, curie->buf, name_len); if (prefix) { uri_prefix->buf = prefix->uri.buf; uri_prefix->len = prefix->uri.n_bytes; uri_suffix->buf = colon + 1; - uri_suffix->len = curie->n_bytes - (colon - curie->buf) - 1; + uri_suffix->len = curie->n_bytes - name_len - 1; return SERD_SUCCESS; } return SERD_ERR_BAD_CURIE; diff -Nru serd-0.30.2/src/n3.c serd-0.30.4/src/n3.c --- serd-0.30.2/src/n3.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/n3.c 2020-04-25 14:46:28.790847000 +0000 @@ -44,20 +44,21 @@ static inline uint8_t read_HEX(SerdReader* reader) { - const uint8_t c = peek_byte(reader); + const int c = peek_byte(reader); if (is_xdigit(c)) { - return eat_byte_safe(reader, c); + return (uint8_t)eat_byte_safe(reader, c); } - return r_err(reader, SERD_ERR_BAD_SYNTAX, - "invalid hexadecimal digit `%c'\n", c); + + return (uint8_t)r_err(reader, SERD_ERR_BAD_SYNTAX, + "invalid hexadecimal digit `%c'\n", c); } // Read UCHAR escape, initial \ is already eaten by caller static inline bool read_UCHAR(SerdReader* reader, Ref dest, uint32_t* char_code) { - const uint8_t b = peek_byte(reader); - unsigned length = 0; + const int b = peek_byte(reader); + unsigned length = 0; switch (b) { case 'U': length = 8; @@ -78,7 +79,7 @@ } char* endptr = NULL; - const uint32_t code = strtoul((const char*)buf, &endptr, 16); + const uint32_t code = (uint32_t)strtoul((const char*)buf, &endptr, 16); assert(endptr == (char*)buf + length); unsigned size = 0; @@ -103,17 +104,17 @@ uint32_t c = code; switch (size) { case 4: - buf[3] = 0x80 | (uint8_t)(c & 0x3F); + buf[3] = (uint8_t)(0x80u | (c & 0x3Fu)); c >>= 6; c |= (16 << 12); // set bit 4 // fallthru case 3: - buf[2] = 0x80 | (uint8_t)(c & 0x3F); + buf[2] = (uint8_t)(0x80u | (c & 0x3Fu)); c >>= 6; c |= (32 << 6); // set bit 5 // fallthru case 2: - buf[1] = 0x80 | (uint8_t)(c & 0x3F); + buf[1] = (uint8_t)(0x80u | (c & 0x3Fu)); c >>= 6; c |= 0xC0; // set bits 6 and 7 // fallthru @@ -130,7 +131,7 @@ static inline bool read_ECHAR(SerdReader* reader, Ref dest, SerdNodeFlags* flags) { - const uint8_t c = peek_byte(reader); + const int c = peek_byte(reader); switch (c) { case 't': eat_byte_safe(reader, 't'); @@ -166,7 +167,7 @@ bad_char(SerdReader* reader, const char* fmt, uint8_t c) { // Skip bytes until the next start byte - for (uint8_t b = peek_byte(reader); (b & 0x80);) { + for (int b = peek_byte(reader); b != EOF && ((uint8_t)b & 0x80);) { eat_byte_safe(reader, b); b = peek_byte(reader); } @@ -185,11 +186,14 @@ bytes[0] = c; for (unsigned i = 1; i < *size; ++i) { - if (((bytes[i] = peek_byte(reader)) & 0x80) == 0) { + const int b = peek_byte(reader); + if (b == EOF || ((uint8_t)b & 0x80) == 0) { return bad_char(reader, "invalid UTF-8 continuation 0x%X\n", - bytes[i]); + (uint8_t)b); } - eat_byte_safe(reader, bytes[i]); + + eat_byte_safe(reader, b); + bytes[i] = (uint8_t)b; } return SERD_SUCCESS; @@ -250,8 +254,8 @@ read_comment(SerdReader* reader) { eat_byte_safe(reader, '#'); - uint8_t c; - while (((c = peek_byte(reader)) != 0xA) && (c != 0xD) && c) { + int c; + while (((c = peek_byte(reader)) != 0xA) && c != 0xD && c != EOF && c) { eat_byte_safe(reader, c); } } @@ -260,7 +264,7 @@ static inline bool read_ws(SerdReader* reader) { - const uint8_t c = peek_byte(reader); + const int c = peek_byte(reader); switch (c) { case 0x9: case 0xA: case 0xD: case 0x20: eat_byte_safe(reader, c); @@ -302,9 +306,10 @@ static Ref read_STRING_LITERAL_LONG(SerdReader* reader, SerdNodeFlags* flags, uint8_t q) { - Ref ref = push_node(reader, SERD_LITERAL, "", 0); - while (!reader->status) { - const uint8_t c = peek_byte(reader); + Ref ref = push_node(reader, SERD_LITERAL, "", 0); + SerdStatus st = SERD_SUCCESS; + while (!reader->status && !(st && reader->strict)) { + const int c = peek_byte(reader); if (c == '\\') { eat_byte_safe(reader, c); uint32_t code; @@ -316,17 +321,21 @@ } } else if (c == q) { eat_byte_safe(reader, q); - const uint8_t q2 = eat_byte_safe(reader, peek_byte(reader)); - const uint8_t q3 = peek_byte(reader); + const int q2 = eat_byte_safe(reader, peek_byte(reader)); + const int q3 = peek_byte(reader); if (q2 == q && q3 == q) { // End of string eat_byte_safe(reader, q3); - return ref; + break; } *flags |= SERD_HAS_QUOTE; push_byte(reader, ref, c); - read_character(reader, ref, flags, q2); + read_character(reader, ref, flags, (uint8_t)q2); + } else if (c == EOF) { + r_err(reader, SERD_ERR_BAD_SYNTAX, "end of file in long string\n"); + return pop_node(reader, ref); } else { - read_character(reader, ref, flags, eat_byte_safe(reader, c)); + st = read_character( + reader, ref, flags, (uint8_t)eat_byte_safe(reader, c)); } } return ref; @@ -337,11 +346,15 @@ static Ref read_STRING_LITERAL(SerdReader* reader, SerdNodeFlags* flags, uint8_t q) { - Ref ref = push_node(reader, SERD_LITERAL, "", 0); - while (!reader->status) { - const uint8_t c = peek_byte(reader); - uint32_t code = 0; + Ref ref = push_node(reader, SERD_LITERAL, "", 0); + SerdStatus st = SERD_SUCCESS; + while (!reader->status && !(st && reader->strict)) { + const int c = peek_byte(reader); + uint32_t code = 0; switch (c) { + case EOF: + r_err(reader, SERD_ERR_BAD_SYNTAX, "end of file in short string\n"); + return pop_node(reader, ref); case '\n': case '\r': r_err(reader, SERD_ERR_BAD_SYNTAX, "line end in short string\n"); return pop_node(reader, ref); @@ -359,7 +372,8 @@ eat_byte_check(reader, q); return ref; } else { - read_character(reader, ref, flags, eat_byte_safe(reader, c)); + st = read_character( + reader, ref, flags, (uint8_t)eat_byte_safe(reader, c)); } } } @@ -370,17 +384,21 @@ static Ref read_String(SerdReader* reader, SerdNodeFlags* flags) { - const uint8_t q1 = peek_byte(reader); + const int q1 = peek_byte(reader); eat_byte_safe(reader, q1); - const uint8_t q2 = peek_byte(reader); - if (q2 != q1) { // Short string (not triple quoted) - return read_STRING_LITERAL(reader, flags, q1); + const int q2 = peek_byte(reader); + if (q2 == EOF) { + return r_err(reader, SERD_ERR_BAD_SYNTAX, "unexpected end of file\n"); + } else if (q2 != q1) { // Short string (not triple quoted) + return read_STRING_LITERAL(reader, flags, (uint8_t)q1); } eat_byte_safe(reader, q2); - const uint8_t q3 = peek_byte(reader); - if (q3 != q1) { // Empty short string ("" or '') + const int q3 = peek_byte(reader); + if (q3 == EOF) { + return r_err(reader, SERD_ERR_BAD_SYNTAX, "unexpected end of file\n"); + } else if (q3 != q1) { // Empty short string ("" or '') return push_node(reader, SERD_LITERAL, "", 0); } @@ -390,7 +408,7 @@ } eat_byte_safe(reader, q3); - return read_STRING_LITERAL_LONG(reader, flags, q1); + return read_STRING_LITERAL_LONG(reader, flags, (uint8_t)q1); } static inline bool @@ -407,15 +425,15 @@ static SerdStatus read_PN_CHARS_BASE(SerdReader* reader, Ref dest) { - uint32_t code; - const uint8_t c = peek_byte(reader); - SerdStatus st = SERD_SUCCESS; + uint32_t code; + const int c = peek_byte(reader); + SerdStatus st = SERD_SUCCESS; if (is_alpha(c)) { push_byte(reader, dest, eat_byte_safe(reader, c)); - } else if (!(c & 0x80)) { + } else if (c == EOF || !(c & 0x80)) { return SERD_FAILURE; } else if ((st = read_utf8_code(reader, dest, &code, - eat_byte_safe(reader, c)))) { + (uint8_t)eat_byte_safe(reader, c)))) { return st; } else if (!is_PN_CHARS_BASE(code)) { r_err(reader, SERD_ERR_BAD_SYNTAX, @@ -437,15 +455,15 @@ static SerdStatus read_PN_CHARS(SerdReader* reader, Ref dest) { - uint32_t code; - const uint8_t c = peek_byte(reader); - SerdStatus st = SERD_SUCCESS; + uint32_t code; + const int c = peek_byte(reader); + SerdStatus st = SERD_SUCCESS; if (is_alpha(c) || is_digit(c) || c == '_' || c == '-') { push_byte(reader, dest, eat_byte_safe(reader, c)); - } else if (!(c & 0x80)) { + } else if (c == EOF || !(c & 0x80)) { return SERD_FAILURE; } else if ((st = read_utf8_code(reader, dest, &code, - eat_byte_safe(reader, c)))) { + (uint8_t)eat_byte_safe(reader, c)))) { return st; } else if (!is_PN_CHARS(code)) { r_err(reader, (st = SERD_ERR_BAD_SYNTAX), @@ -471,7 +489,7 @@ static SerdStatus read_PLX(SerdReader* reader, Ref dest) { - uint8_t c = peek_byte(reader); + int c = peek_byte(reader); switch (c) { case '%': if (!read_PERCENT(reader, dest)) { @@ -495,7 +513,7 @@ static SerdStatus read_PN_LOCAL(SerdReader* reader, Ref dest, bool* ate_dot) { - uint8_t c = peek_byte(reader); + int c = peek_byte(reader); SerdStatus st = SERD_SUCCESS; bool trailing_unescaped_dot = false; switch (c) { @@ -537,7 +555,7 @@ static SerdStatus read_PN_PREFIX_tail(SerdReader* reader, Ref dest) { - uint8_t c; + int c; while ((c = peek_byte(reader))) { // Middle: (PN_CHARS | '.')* if (c == '.') { push_byte(reader, dest, eat_byte_safe(reader, c)); @@ -567,10 +585,11 @@ static Ref read_LANGTAG(SerdReader* reader) { - uint8_t c = peek_byte(reader); + int c = peek_byte(reader); if (!is_alpha(c)) { return r_err(reader, SERD_ERR_BAD_SYNTAX, "unexpected `%c'\n", c); } + Ref ref = push_node(reader, SERD_LITERAL, "", 0); push_byte(reader, ref, eat_byte_safe(reader, c)); while ((c = peek_byte(reader)) && is_alpha(c)) { @@ -588,13 +607,13 @@ static bool read_IRIREF_scheme(SerdReader* reader, Ref dest) { - uint8_t c = peek_byte(reader); + int c = peek_byte(reader); if (!isalpha(c)) { return r_err(reader, SERD_ERR_BAD_SYNTAX, "bad IRI scheme start `%c'\n", c); } - while ((c = peek_byte(reader))) { + while ((c = peek_byte(reader)) != EOF) { if (c == '>') { return r_err(reader, SERD_ERR_BAD_SYNTAX, "missing IRI scheme\n"); } else if (!is_uri_scheme_char(c)) { @@ -608,21 +627,22 @@ } } - return false; + return r_err(reader, SERD_ERR_BAD_SYNTAX, "unexpected end of file\n"); } static Ref read_IRIREF(SerdReader* reader) { TRY_RET(eat_byte_check(reader, '<')); - Ref ref = push_node(reader, SERD_URI, "", 0); + Ref ref = push_node(reader, SERD_URI, "", 0); + SerdStatus st = SERD_SUCCESS; if (!fancy_syntax(reader) && !read_IRIREF_scheme(reader, ref)) { return pop_node(reader, ref); } uint32_t code = 0; - while (!reader->status) { - const uint8_t c = eat_byte_safe(reader, peek_byte(reader)); + while (!reader->status && !(st && reader->strict)) { + const int c = eat_byte_safe(reader, peek_byte(reader)); switch (c) { case '"': case '<': case '^': case '`': case '{': case '|': case '}': r_err(reader, SERD_ERR_BAD_SYNTAX, @@ -660,11 +680,11 @@ push_byte(reader, ref, c); } else if (!(c & 0x80)) { push_byte(reader, ref, c); - } else if (read_utf8_character(reader, ref, c)) { + } else if ((st = read_utf8_character(reader, ref, (uint8_t)c))) { if (reader->strict) { + reader->status = SERD_FAILURE; return pop_node(reader, ref); } - reader->status = SERD_FAILURE; } } } @@ -688,7 +708,7 @@ read_0_9(SerdReader* reader, Ref str, bool at_least_one) { unsigned count = 0; - for (uint8_t c; is_digit((c = peek_byte(reader))); ++count) { + for (int c; is_digit((c = peek_byte(reader))); ++count) { push_byte(reader, str, eat_byte_safe(reader, c)); } if (at_least_one && count == 0) { @@ -703,9 +723,10 @@ #define XSD_DECIMAL NS_XSD "decimal" #define XSD_DOUBLE NS_XSD "double" #define XSD_INTEGER NS_XSD "integer" - Ref ref = push_node(reader, SERD_LITERAL, "", 0); - uint8_t c = peek_byte(reader); - bool has_decimal = false; + + Ref ref = push_node(reader, SERD_LITERAL, "", 0); + int c = peek_byte(reader); + bool has_decimal = false; if (c == '-' || c == '+') { push_byte(reader, ref, eat_byte_safe(reader, c)); } @@ -817,9 +838,9 @@ const SerdStatus st = read_PN_PREFIX(reader, *dest); bool ate_dot = false; SerdNode* node = deref(reader, *dest); - const uint8_t next = peek_byte(reader); + const int next = peek_byte(reader); if (!st && node->n_bytes == 1 && node->buf[0] == 'a' && - next != ':' && !is_PN_CHARS_BASE(next)) { + next != ':' && !is_PN_CHARS_BASE((uint32_t)next)) { pop_node(reader, *dest); return (*dest = push_node(reader, SERD_URI, NS_RDF "type", 47)); } else if (st > SERD_FAILURE || @@ -841,7 +862,7 @@ reader->bprefix ? (char*)reader->bprefix : "", reader->bprefix_len); - uint8_t c = peek_byte(reader); // First: (PN_CHARS | '_' | [0-9]) + int c = peek_byte(reader); // First: (PN_CHARS | '_' | [0-9]) if (is_digit(c) || c == '_') { push_byte(reader, ref, eat_byte_safe(reader, c)); } else if (read_PN_CHARS(reader, ref)) { @@ -923,7 +944,7 @@ ctx.subject = *dest; if (!empty) { - *ctx.flags &= ~(SERD_LIST_CONT); + *ctx.flags &= ~(unsigned)SERD_LIST_CONT; if (!subject) { *ctx.flags |= SERD_ANON_CONT; } @@ -954,14 +975,14 @@ const size_t orig_stack_size = reader->stack.size; #endif - bool ret = false; - bool simple = (ctx->subject != 0); - SerdNode* node = NULL; - Ref o = 0; - Ref datatype = 0; - Ref lang = 0; - uint32_t flags = 0; - const uint8_t c = peek_byte(reader); + bool ret = false; + bool simple = (ctx->subject != 0); + SerdNode* node = NULL; + Ref o = 0; + Ref datatype = 0; + Ref lang = 0; + uint32_t flags = 0; + const int c = peek_byte(reader); if (!fancy_syntax(reader)) { switch (c) { case '"': case ':': case '<': case '_': break; @@ -970,7 +991,7 @@ } } switch (c) { - case '\0': case ')': + case EOF: case '\0': case ')': return r_err(reader, SERD_ERR_BAD_SYNTAX, "expected object\n"); case '[': simple = false; @@ -1065,12 +1086,12 @@ return true; } - bool ate_semi = false; - uint8_t c; + bool ate_semi = false; + int c; do { read_ws_star(reader); switch (c = peek_byte(reader)) { - case 0: + case EOF: case '\0': return r_err(reader, SERD_ERR_BAD_SYNTAX, "unexpected end of file\n"); case '.': case ']': case '}': @@ -1094,7 +1115,7 @@ { pop_node(reader, n2); pop_node(reader, n1); - *ctx.flags &= ~SERD_LIST_CONT; + *ctx.flags &= ~(unsigned)SERD_LIST_CONT; return ret && (eat_byte_safe(reader, ')') == ')'); } @@ -1158,7 +1179,7 @@ } static Ref -read_subject(SerdReader* reader, ReadContext ctx, Ref* dest, char* s_type) +read_subject(SerdReader* reader, ReadContext ctx, Ref* dest, int* s_type) { bool ate_dot = false; switch ((*s_type = peek_byte(reader))) { @@ -1309,7 +1330,7 @@ read_ws_star(reader); while (peek_byte(reader) != '}') { bool ate_dot = false; - char s_type = 0; + int s_type = 0; ctx->subject = 0; Ref subj = read_subject(reader, *ctx, &ctx->subject, &s_type); if (!subj && ctx->subject) { @@ -1347,12 +1368,11 @@ ReadContext ctx = { 0, 0, 0, 0, 0, 0, &flags }; Ref subj = 0; bool ate_dot = false; - char s_type = 0; + int s_type = 0; bool ret = true; read_ws_star(reader); switch (peek_byte(reader)) { - case '\0': - reader->source.eof = true; + case EOF: case '\0': return reader->status <= SERD_FAILURE; case '@': if (!fancy_syntax(reader)) { @@ -1415,7 +1435,7 @@ static void skip_until(SerdReader* reader, uint8_t byte) { - for (uint8_t c = 0; (c = peek_byte(reader)) && c != byte;) { + for (int c = 0; (c = peek_byte(reader)) && c != byte;) { eat_byte_safe(reader, c); } } @@ -1442,10 +1462,9 @@ SerdStatementFlags flags = 0; ReadContext ctx = { 0, 0, 0, 0, 0, 0, &flags }; bool ate_dot = false; - char s_type = false; + int s_type = 0; read_ws_star(reader); - if (peek_byte(reader) == '\0') { - reader->source.eof = true; + if (peek_byte(reader) == EOF) { break; } else if (peek_byte(reader) == '@') { return r_err(reader, SERD_ERR_BAD_SYNTAX, diff -Nru serd-0.30.2/src/node.c serd-0.30.4/src/node.c --- serd-0.30.2/src/node.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/node.c 2020-04-25 14:46:28.790847000 +0000 @@ -279,7 +279,7 @@ char* t = s - 1; uint64_t dec = (uint64_t)int_part; do { - *t-- = '0' + (dec % 10); + *t-- = (char)('0' + dec % 10); } while ((dec /= 10) > 0); *s++ = '.'; @@ -288,20 +288,20 @@ double frac_part = fabs(d - int_part); if (frac_part < DBL_EPSILON) { *s++ = '0'; - node.n_bytes = node.n_chars = (s - buf); + node.n_bytes = node.n_chars = (size_t)(s - buf); } else { - uint64_t frac = llround(frac_part * pow(10.0, (int)frac_digits)); + uint64_t frac = (uint64_t)llround(frac_part * pow(10.0, (int)frac_digits)); s += frac_digits - 1; unsigned i = 0; // Skip trailing zeros for (; i < frac_digits - 1 && !(frac % 10); ++i, --s, frac /= 10) {} - node.n_bytes = node.n_chars = (s - buf) + 1; + node.n_bytes = node.n_chars = (size_t)(s - buf) + 1u; // Write digits from last trailing zero to decimal point for (; i < frac_digits; ++i) { - *s-- = '0' + (frac % 10); + *s-- = (char)('0' + (frac % 10)); frac /= 10; } } @@ -313,7 +313,7 @@ serd_node_new_integer(int64_t i) { int64_t abs_i = (i < 0) ? -i : i; - const unsigned digits = serd_digits(abs_i); + const unsigned digits = serd_digits((double)abs_i); char* buf = (char*)calloc(digits + 2, 1); SerdNode node = { (const uint8_t*)buf, 0, 0, 0, SERD_LITERAL }; @@ -324,11 +324,11 @@ ++s; } - node.n_bytes = node.n_chars = (s - buf) + 1; + node.n_bytes = node.n_chars = (size_t)(s - buf) + 1u; // Write integer part (right to left) do { - *s-- = '0' + (abs_i % 10); + *s-- = (char)('0' + (abs_i % 10)); } while ((abs_i /= 10) > 0); return node; diff -Nru serd-0.30.2/src/reader.c serd-0.30.4/src/reader.c --- serd-0.30.2/src/reader.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/reader.c 2020-04-25 14:46:28.790847000 +0000 @@ -14,6 +14,7 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ +#include "reader.h" #include "serd_internal.h" #include @@ -41,7 +42,7 @@ { SerdNode* node = deref(reader, ref); const char* prefix = reader->bprefix ? (const char*)reader->bprefix : ""; - node->n_bytes = node->n_chars = snprintf( + node->n_bytes = node->n_chars = (size_t)snprintf( (char*)node->buf, buf_size, "%sb%u", prefix, reader->next_id++); } @@ -96,7 +97,7 @@ reader->allocs, sizeof(reader->allocs) * (++reader->n_allocs)); reader->allocs[reader->n_allocs - 1] = ((uint8_t*)mem - reader->stack.buf); #endif - return (uint8_t*)node - reader->stack.buf; + return (Ref)((uint8_t*)node - reader->stack.buf); } Ref @@ -127,7 +128,7 @@ #endif SerdNode* const node = deref(reader, ref); uint8_t* const top = reader->stack.buf + reader->stack.size; - serd_stack_pop_aligned(&reader->stack, top - (uint8_t*)node); + serd_stack_pop_aligned(&reader->stack, (size_t)(top - (uint8_t*)node)); } return 0; } @@ -159,10 +160,8 @@ static bool read_doc(SerdReader* reader) { - switch (reader->syntax) { - case SERD_NQUADS: return read_nquadsDoc(reader); - default: return read_turtleTrigDoc(reader); - } + return ((reader->syntax == SERD_NQUADS) ? read_nquadsDoc(reader) + : read_turtleTrigDoc(reader)); } SerdReader* @@ -345,6 +344,11 @@ st = serd_byte_source_advance(&reader->source); } + if (peek_byte(reader) == 0) { + // Skip leading null byte, for reading from a null-delimited socket + eat_byte_safe(reader, 0); + } + return st ? st : read_statement(reader) ? SERD_SUCCESS : SERD_FAILURE; } diff -Nru serd-0.30.2/src/reader.h serd-0.30.4/src/reader.h --- serd-0.30.2/src/reader.h 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/reader.h 2020-04-25 14:46:28.790847000 +0000 @@ -16,16 +16,21 @@ #include "serd_internal.h" -static inline uint8_t +#include +#include + +static inline int peek_byte(SerdReader* reader) { - return serd_byte_source_peek(&reader->source); + SerdByteSource* source = &reader->source; + + return source->eof ? EOF : (int)source->read_buf[source->read_head]; } -static inline uint8_t +static inline int eat_byte(SerdReader* reader) { - const uint8_t c = peek_byte(reader); + const int c = peek_byte(reader); const SerdStatus st = serd_byte_source_advance(&reader->source); if (st) { reader->status = st; @@ -33,20 +38,20 @@ return c; } -static inline uint8_t -eat_byte_safe(SerdReader* reader, const uint8_t byte) +static inline int +eat_byte_safe(SerdReader* reader, const int byte) { (void)byte; - const uint8_t c = eat_byte(reader); + const int c = eat_byte(reader); assert(c == byte); return c; } -static inline uint8_t -eat_byte_check(SerdReader* reader, const uint8_t byte) +static inline int +eat_byte_check(SerdReader* reader, const int byte) { - const uint8_t c = peek_byte(reader); + const int c = peek_byte(reader); if (c != byte) { return r_err(reader, SERD_ERR_BAD_SYNTAX, "expected `%c', not `%c'\n", byte, c); @@ -65,16 +70,18 @@ } static inline SerdStatus -push_byte(SerdReader* reader, Ref ref, const uint8_t c) +push_byte(SerdReader* reader, Ref ref, const int c) { + assert(c != EOF); SERD_STACK_ASSERT_TOP(reader, ref); + uint8_t* const s = serd_stack_push(&reader->stack, 1); SerdNode* const node = (SerdNode*)(reader->stack.buf + ref); ++node->n_bytes; if (!(c & 0x80)) { // Starts with 0 bit, start of new character ++node->n_chars; } - *(s - 1) = c; + *(s - 1) = (uint8_t)c; *s = '\0'; return SERD_SUCCESS; } diff -Nru serd-0.30.2/src/serd_internal.h serd-0.30.4/src/serd_internal.h --- serd-0.30.2/src/serd_internal.h 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/serd_internal.h 2020-04-25 14:46:28.790847000 +0000 @@ -92,6 +92,7 @@ SerdStreamErrorFunc error_func; ///< Error function (e.g. ferror) void* stream; ///< Stream (e.g. FILE) size_t page_size; ///< Number of bytes to read at a time + size_t buf_size; ///< Number of bytes in file_buf Cursor cur; ///< Cursor for error reporting uint8_t* file_buf; ///< Buffer iff reading pages from a file const uint8_t* read_buf; ///< Pointer to file_buf or read_byte @@ -140,28 +141,34 @@ SerdStatus st = SERD_SUCCESS; switch (serd_byte_source_peek(source)) { - case '\0': break; case '\n': ++source->cur.line; source->cur.col = 0; break; default: ++source->cur.col; } + const bool was_eof = source->eof; if (source->from_stream) { source->eof = false; if (source->page_size > 1) { if (++source->read_head == source->page_size) { st = serd_byte_source_page(source); + } else if (source->read_head == source->buf_size) { + source->eof = true; } } else { if (!source->read_func(&source->read_byte, 1, 1, source->stream)) { + source->eof = true; st = source->error_func(source->stream) ? SERD_ERR_UNKNOWN : SERD_FAILURE; } } } else if (!source->eof) { ++source->read_head; // Move to next character in string + if (source->read_buf[source->read_head] == '\0') { + source->eof = true; + } } - return source->eof ? SERD_FAILURE : st; + return (was_eof && source->eof) ? SERD_FAILURE : st; } /* Stack */ @@ -228,13 +235,14 @@ serd_stack_push(stack, 1); // Push padding if necessary - const uint8_t pad = align - stack->size % align; + const size_t pad = align - stack->size % align; if (pad > 0) { serd_stack_push(stack, pad); } // Set top of stack to pad count so we can properly pop later - stack->buf[stack->size - 1] = pad; + assert(pad < UINT8_MAX); + stack->buf[stack->size - 1] = (uint8_t)pad; // Push requested space at aligned location return serd_stack_push(stack, n_bytes); @@ -250,7 +258,7 @@ const uint8_t pad = stack->buf[stack->size - 1]; // Pop padding and pad count - serd_stack_pop(stack, pad + 1); + serd_stack_pop(stack, pad + 1u); } /* Byte Sink */ @@ -327,35 +335,35 @@ /** Return true if `c` lies within [`min`...`max`] (inclusive) */ static inline bool -in_range(const uint8_t c, const uint8_t min, const uint8_t max) +in_range(const int c, const int min, const int max) { return (c >= min && c <= max); } /** RFC2234: ALPHA ::= %x41-5A / %x61-7A ; A-Z / a-z */ static inline bool -is_alpha(const uint8_t c) +is_alpha(const int c) { return in_range(c, 'A', 'Z') || in_range(c, 'a', 'z'); } /** RFC2234: DIGIT ::= %x30-39 ; 0-9 */ static inline bool -is_digit(const uint8_t c) +is_digit(const int c) { return in_range(c, '0', '9'); } /* RFC2234: HEXDIG ::= DIGIT / "A" / "B" / "C" / "D" / "E" / "F" */ static inline bool -is_hexdig(const uint8_t c) +is_hexdig(const int c) { return is_digit(c) || in_range(c, 'A', 'F'); } /* Turtle / JSON / C: XDIGIT ::= DIGIT / A-F / a-f */ static inline bool -is_xdigit(const uint8_t c) +is_xdigit(const int c) { return is_hexdig(c) || in_range(c, 'a', 'f'); } @@ -422,7 +430,7 @@ static inline uint32_t parse_counted_utf8_char(const uint8_t* utf8, size_t size) { - uint32_t c = utf8[0] & ((1 << (8 - size)) - 1); + uint32_t c = utf8[0] & ((1u << (8 - size)) - 1); for (size_t i = 1; i < size; ++i) { const uint8_t in = utf8[i] & 0x3F; c = (c << 6) | in; @@ -438,7 +446,8 @@ case 1: case 2: case 3: case 4: return parse_counted_utf8_char(utf8, *size); default: - return *size = 0; + *size = 0; + return 0; } } @@ -516,7 +525,7 @@ } static inline bool -is_uri_scheme_char(const uint8_t c) +is_uri_scheme_char(const int c) { switch (c) { case ':': case '+': case '-': case '.': diff -Nru serd-0.30.2/src/string.c serd-0.30.4/src/string.c --- serd-0.30.2/src/string.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/string.c 2020-04-25 14:46:28.790847000 +0000 @@ -170,7 +170,11 @@ "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"; -static inline uint8_t unmap(const uint8_t in) { return b64_unmap[in] - 47; } +static inline uint8_t +unmap(const uint8_t in) +{ + return (uint8_t)(b64_unmap[in] - 47); +} /** Decode 4 base64 characters to 3 raw bytes. @@ -181,7 +185,7 @@ out[0] = (uint8_t)(((unmap(in[0]) << 2)) | unmap(in[1]) >> 4); out[1] = (uint8_t)(((unmap(in[1]) << 4) & 0xF0) | unmap(in[2]) >> 2); out[2] = (uint8_t)(((unmap(in[2]) << 6) & 0xC0) | unmap(in[3])); - return 1 + (in[2] != '=') + ((in[2] != '=') && (in[3] != '=')); + return 1u + (in[2] != '=') + ((in[2] != '=') && (in[3] != '=')); } void* diff -Nru serd-0.30.2/src/uri.c serd-0.30.4/src/uri.c --- serd-0.30.2/src/uri.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/uri.c 2020-04-25 14:46:28.790847000 +0000 @@ -58,8 +58,8 @@ return NULL; } if (hostname) { - *hostname = (uint8_t*)calloc(path - auth + 1, 1); - memcpy(*hostname, auth, path - auth); + *hostname = (uint8_t*)calloc((size_t)(path - auth + 1), 1); + memcpy(*hostname, auth, (size_t)(path - auth)); } } } @@ -128,7 +128,7 @@ goto path; // Relative URI (starts with path by definition) case ':': out->scheme.buf = utf8; - out->scheme.len = (ptr++) - utf8; + out->scheme.len = (size_t)((ptr++) - utf8); goto maybe_authority; // URI with scheme case '+': case '-': case '.': continue; @@ -297,12 +297,12 @@ } while (up > 0 && (--base_last > base->buf)); // Set path prefix - base->len = base_last - base->buf + 1; + base->len = (size_t)(base_last - base->buf + 1); } // Set path suffix path->buf = begin; - path->len = end - begin; + path->len = (size_t)(end - begin); } /// See http://tools.ietf.org/html/rfc3986#section-5.2.2 diff -Nru serd-0.30.2/src/writer.c serd-0.30.4/src/writer.c --- serd-0.30.2/src/writer.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/src/writer.c 2020-04-26 16:36:05.808137700 +0000 @@ -129,7 +129,7 @@ va_list args; va_start(args, fmt); - const SerdError e = { st, NULL, 0, 0, fmt, &args }; + const SerdError e = { st, (const uint8_t*)"", 0, 0, fmt, &args }; serd_error(writer->error_sink, writer->error_handle, &e); va_end(args); } diff -Nru serd-0.30.2/tests/bad/bad-eof-after-quotes.ttl serd-0.30.4/tests/bad/bad-eof-after-quotes.ttl --- serd-0.30.2/tests/bad/bad-eof-after-quotes.ttl 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-after-quotes.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -0,0 +1,3 @@ +@prefix eg: . + +<> eg:comment "" \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-at-string-start.ttl serd-0.30.4/tests/bad/bad-eof-at-string-start.ttl --- serd-0.30.2/tests/bad/bad-eof-at-string-start.ttl 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-at-string-start.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -0,0 +1,3 @@ +@prefix eg: . + +<> eg:comment " \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-blank.ttl serd-0.30.4/tests/bad/bad-eof-in-blank.ttl --- serd-0.30.2/tests/bad/bad-eof-in-blank.ttl 2019-06-06 20:15:58.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-blank.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -1,3 +1,3 @@ -@prefix eg: . +@prefix eg: . <> eg:thing [ eg:comment "Thing" \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-escape.ttl serd-0.30.4/tests/bad/bad-eof-in-escape.ttl --- serd-0.30.2/tests/bad/bad-eof-in-escape.ttl 2019-06-06 20:15:58.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-escape.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -1,3 +1,3 @@ -@prefix eg: . +@prefix eg: . <> eg:comment """\uA \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-lang-suffix.ttl serd-0.30.4/tests/bad/bad-eof-in-lang-suffix.ttl --- serd-0.30.2/tests/bad/bad-eof-in-lang-suffix.ttl 2019-06-06 20:15:58.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-lang-suffix.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -1,3 +1,3 @@ -@prefix eg: . +@prefix eg: . <> eg:comment "That ain't no language"@en-x \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-lang.ttl serd-0.30.4/tests/bad/bad-eof-in-lang.ttl --- serd-0.30.2/tests/bad/bad-eof-in-lang.ttl 2019-06-06 20:15:58.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-lang.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -1,3 +1,3 @@ -@prefix eg: . +@prefix eg: . <> eg:comment "That ain't no language"@a \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-list.ttl serd-0.30.4/tests/bad/bad-eof-in-list.ttl --- serd-0.30.2/tests/bad/bad-eof-in-list.ttl 2019-06-06 20:15:58.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-list.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -1,3 +1,3 @@ -@prefix eg: . +@prefix eg: . <> eg:thing ( eg:car eg:cdr \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-long-string.ttl serd-0.30.4/tests/bad/bad-eof-in-long-string.ttl --- serd-0.30.2/tests/bad/bad-eof-in-long-string.ttl 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-long-string.ttl 2019-12-12 11:32:57.454540000 +0000 @@ -0,0 +1,3 @@ +@prefix eg: . + +<> eg:comment """This is the string that never ends \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-string.ttl serd-0.30.4/tests/bad/bad-eof-in-string.ttl --- serd-0.30.2/tests/bad/bad-eof-in-string.ttl 2019-06-06 20:15:58.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-string.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -1,3 +1,3 @@ -@prefix eg: . +@prefix eg: . <> eg:comment "This is the string that never ends \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-triple-quote.ttl serd-0.30.4/tests/bad/bad-eof-in-triple-quote.ttl --- serd-0.30.2/tests/bad/bad-eof-in-triple-quote.ttl 2019-06-06 20:15:58.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-triple-quote.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -1,3 +1,3 @@ -@prefix eg: . +@prefix eg: . <> eg:comment """Hello"" \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/bad-eof-in-uri-scheme.nt serd-0.30.4/tests/bad/bad-eof-in-uri-scheme.nt --- serd-0.30.2/tests/bad/bad-eof-in-uri-scheme.nt 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/bad/bad-eof-in-uri-scheme.nt 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1 @@ + . +@prefix eg: . <> eg:uri . +@prefix eg: . <> eg:thing ( . \ No newline at end of file diff -Nru serd-0.30.2/tests/bad/manifest.ttl serd-0.30.4/tests/bad/manifest.ttl --- serd-0.30.2/tests/bad/manifest.ttl 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/tests/bad/manifest.ttl 2020-04-25 14:46:28.790847000 +0000 @@ -30,6 +30,8 @@ <#bad-char-in-uri> <#bad-datatype> <#bad-dot-after-subject> + <#bad-eof-after-quotes> + <#bad-eof-at-string-start> <#bad-eof-in-blank> <#bad-eof-in-escape> <#bad-eof-in-lang-suffix> @@ -38,9 +40,11 @@ <#bad-eof-in-object-list2> <#bad-eof-in-object-list> <#bad-eof-in-predicate-list> + <#bad-eof-in-long-string> <#bad-eof-in-string> <#bad-eof-in-triple-quote> <#bad-eof-in-uri> + <#bad-eof-in-uri-scheme> <#bad-escape> <#bad-ext-namedblank-op> <#bad-hex-digit> @@ -186,6 +190,16 @@ mf:name "bad-dot-after-subject" ; mf:action . +<#bad-eof-after-quotes> + rdf:type rdft:TestTurtleNegativeSyntax ; + mf:name "bad-eof-after-quotes" ; + mf:action . + +<#bad-eof-at-string-start> + rdf:type rdft:TestTurtleNegativeSyntax ; + mf:name "bad-eof-at-string-start" ; + mf:action . + <#bad-eof-in-blank> rdf:type rdft:TestTurtleNegativeSyntax ; mf:name "bad-eof-in-blank" ; @@ -226,6 +240,11 @@ mf:name "bad-eof-in-predicate-list" ; mf:action . +<#bad-eof-in-long-string> + rdf:type rdft:TestTurtleNegativeSyntax ; + mf:name "bad-eof-in-long-string" ; + mf:action . + <#bad-eof-in-string> rdf:type rdft:TestTurtleNegativeSyntax ; mf:name "bad-eof-in-string" ; @@ -241,6 +260,11 @@ mf:name "bad-eof-in-uri" ; mf:action . +<#bad-eof-in-uri-scheme> + rdf:type rdft:TestNTriplesNegativeSyntax ; + mf:name "bad-eof-in-uri-scheme" ; + mf:action . + <#bad-escape> rdf:type rdft:TestTurtleNegativeSyntax ; mf:name "bad-escape" ; diff -Nru serd-0.30.2/tests/good/manifest.ttl serd-0.30.4/tests/good/manifest.ttl --- serd-0.30.2/tests/good/manifest.ttl 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/tests/good/manifest.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -18,7 +18,7 @@ <#test-30> <#test-a-without-whitespace> <#test-backspace> - <#test-bad-utf8> + <#test-base-nopath> <#test-base-query> <#test-blank-cont> <#test-blank-in-list> @@ -109,11 +109,11 @@ mf:action ; mf:result . -<#test-bad-utf8> +<#test-base-nopath> rdf:type rdft:TestTurtleEval ; - mf:name "test-bad-utf8" ; - mf:action ; - mf:result . + mf:name "test-base-nopath" ; + mf:action ; + mf:result . <#test-base-query> rdf:type rdft:TestTurtleEval ; diff -Nru serd-0.30.2/tests/good/test-bad-utf8.nt serd-0.30.4/tests/good/test-bad-utf8.nt --- serd-0.30.2/tests/good/test-bad-utf8.nt 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/tests/good/test-bad-utf8.nt 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ - "Impossible bytes: \uFFFD \uFFFD" . - "2 continuation bytes: \uFFFD" . - "Missing continuation: \uFFFD" . diff -Nru serd-0.30.2/tests/good/test-bad-utf8.ttl serd-0.30.4/tests/good/test-bad-utf8.ttl --- serd-0.30.2/tests/good/test-bad-utf8.ttl 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/tests/good/test-bad-utf8.ttl 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ - "Impossible bytes: þ ÿ" . - "2 continuation bytes: €¿" . - "Missing continuation: À" . diff -Nru serd-0.30.2/tests/good/test-base-nopath.nt serd-0.30.4/tests/good/test-base-nopath.nt --- serd-0.30.2/tests/good/test-base-nopath.nt 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/good/test-base-nopath.nt 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1 @@ + . diff -Nru serd-0.30.2/tests/good/test-base-nopath.ttl serd-0.30.4/tests/good/test-base-nopath.ttl --- serd-0.30.2/tests/good/test-base-nopath.ttl 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/good/test-base-nopath.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1,3 @@ +@base . + +

. diff -Nru serd-0.30.2/tests/good/test-empty-path-base.ttl serd-0.30.4/tests/good/test-empty-path-base.ttl --- serd-0.30.2/tests/good/test-empty-path-base.ttl 2019-06-06 20:18:53.000000000 +0000 +++ serd-0.30.4/tests/good/test-empty-path-base.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -1,3 +1,3 @@ -@base . +@base . a . diff -Nru serd-0.30.2/tests/lax/manifest.ttl serd-0.30.4/tests/lax/manifest.ttl --- serd-0.30.2/tests/lax/manifest.ttl 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/lax/manifest.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1,25 @@ +@prefix mf: . +@prefix rdf: . +@prefix rdfs: . +@prefix rdft: . + +<> + rdf:type mf:Manifest ; + rdfs:comment "Serd lax syntax test cases" ; + mf:entries ( + <#test-bad-uri> + <#test-bad-utf8> + ) . + +<#test-bad-uri> + rdf:type rdft:TestTurtleNegativeSyntax ; + mf:name "test-bad-uri" ; + mf:action ; + mf:result . + +<#test-bad-utf8> + rdf:type rdft:TestTurtleNegativeSyntax ; + mf:name "test-bad-utf8" ; + mf:action ; + mf:result . + diff -Nru serd-0.30.2/tests/lax/test-bad-uri.nt serd-0.30.4/tests/lax/test-bad-uri.nt --- serd-0.30.2/tests/lax/test-bad-uri.nt 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/lax/test-bad-uri.nt 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1,3 @@ + . + . + . diff -Nru serd-0.30.2/tests/lax/test-bad-uri.ttl serd-0.30.4/tests/lax/test-bad-uri.ttl --- serd-0.30.2/tests/lax/test-bad-uri.ttl 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/lax/test-bad-uri.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1,4 @@ + . + . + . + . diff -Nru serd-0.30.2/tests/lax/test-bad-utf8.nt serd-0.30.4/tests/lax/test-bad-utf8.nt --- serd-0.30.2/tests/lax/test-bad-utf8.nt 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/lax/test-bad-utf8.nt 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1,6 @@ + "Impossible bytes: \uFFFD \uFFFD" . + "2 continuation bytes: \uFFFD" . + "Missing continuation: \uFFFD" . + "Impossible bytes: \uFFFD \uFFFD" . + "2 continuation bytes: \uFFFD" . + "Missing continuation: \uFFFD" . diff -Nru serd-0.30.2/tests/lax/test-bad-utf8.ttl serd-0.30.4/tests/lax/test-bad-utf8.ttl --- serd-0.30.2/tests/lax/test-bad-utf8.ttl 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/tests/lax/test-bad-utf8.ttl 2019-12-12 11:32:57.457873600 +0000 @@ -0,0 +1,6 @@ + "Impossible bytes: þ ÿ" . + "2 continuation bytes: €¿" . + "Missing continuation: À" . + """Impossible bytes: þ ÿ""" . + """2 continuation bytes: €¿""" . + """Missing continuation: À""" . diff -Nru serd-0.30.2/tests/serd_test.c serd-0.30.4/tests/serd_test.c --- serd-0.30.2/tests/serd_test.c 2019-10-19 22:19:43.000000000 +0000 +++ serd-0.30.4/tests/serd_test.c 2020-04-25 14:46:28.790847000 +0000 @@ -111,8 +111,66 @@ serd_node_free(&node); } -int -main(void) +static void +test_read_chunks(void) +{ + ReaderTest* const rt = (ReaderTest*)calloc(1, sizeof(ReaderTest)); + FILE* const f = tmpfile(); + static const char null = 0; + SerdReader* const reader = + serd_reader_new(SERD_TURTLE, rt, free, NULL, NULL, test_sink, NULL); + + assert(reader); + assert(serd_reader_get_handle(reader) == rt); + assert(f); + + SerdStatus st = serd_reader_start_stream(reader, f, NULL, false); + assert(st == SERD_SUCCESS); + + // Write two statement separated by null characters + fprintf(f, "@prefix eg: .\n"); + fprintf(f, "eg:s eg:p eg:o1 .\n"); + fwrite(&null, sizeof(null), 1, f); + fprintf(f, "eg:s eg:p eg:o2 .\n"); + fwrite(&null, sizeof(null), 1, f); + fseek(f, 0, SEEK_SET); + + // Read prefix + st = serd_reader_read_chunk(reader); + assert(st == SERD_SUCCESS); + assert(rt->n_statements == 0); + + // Read first statement + st = serd_reader_read_chunk(reader); + assert(st == SERD_SUCCESS); + assert(rt->n_statements == 1); + + // Read terminator + st = serd_reader_read_chunk(reader); + assert(st == SERD_SUCCESS); // FIXME: return SERD_FAILURE? + assert(rt->n_statements == 1); + + // Read second statement (after null terminator) + st = serd_reader_read_chunk(reader); + assert(st == SERD_SUCCESS); + assert(rt->n_statements == 2); + + // Read terminator + st = serd_reader_read_chunk(reader); + assert(st == SERD_SUCCESS); // FIXME: return SERD_FAILURE? + assert(rt->n_statements == 2); + + // EOF + st = serd_reader_read_chunk(reader); + assert(st == SERD_SUCCESS); // FIXME: return SERD_FAILURE? + assert(rt->n_statements == 2); + + serd_reader_free(reader); + fclose(f); +} + +static void +test_string_to_double(void) { #define MAX 1000000 #define NUM_TESTS 1000 @@ -136,9 +194,11 @@ const double delta = fabs(num - expt_test_nums[i]); assert(delta <= DBL_EPSILON); } +} - // Test serd_node_new_decimal - +static void +test_double_to_node(void) +{ const double dbl_test_nums[] = { 0.0, 9.0, 10.0, .01, 2.05, -16.00001, 5.000000005, 0.0000000001, NAN, INFINITY }; @@ -157,9 +217,11 @@ assert(node.n_bytes == len && node.n_chars == len); serd_node_free(&node); } +} - // Test serd_node_new_integer - +static void +test_integer_to_node(void) +{ const long int_test_nums[] = { 0, -0, -23, 23, -12340, 1000, -1000 }; @@ -175,8 +237,11 @@ assert(node.n_bytes == len && node.n_chars == len); serd_node_free(&node); } +} - // Test serd_node_new_blob +static void +test_blob_to_node(void) +{ for (size_t size = 0; size < 256; ++size) { uint8_t* data = (uint8_t*)malloc(size); for (size_t i = 0; i < size; ++i) { @@ -201,9 +266,11 @@ serd_free(out); free(data); } +} - // Test serd_strlen - +static void +test_strlen(void) +{ const uint8_t str[] = { '"', '5', 0xE2, 0x82, 0xAC, '"', '\n', 0 }; size_t n_bytes; @@ -215,9 +282,11 @@ assert(len == 5); assert(serd_strlen(str, &n_bytes, NULL) == 5); +} - // Test serd_strerror - +static void +test_strerror(void) +{ const uint8_t* msg = NULL; assert(!strcmp((const char*)(msg = serd_strerror(SERD_SUCCESS)), "Success")); for (int i = SERD_FAILURE; i <= SERD_ERR_INTERNAL; ++i) { @@ -225,9 +294,11 @@ assert(strcmp((const char*)msg, "Success")); } msg = serd_strerror((SerdStatus)-1); +} - // Test serd_uri_to_path - +static void +test_uri_to_path(void) +{ const uint8_t* uri = (const uint8_t*)"file:///home/user/foo.ttl"; assert(!strcmp((const char*)serd_uri_to_path(uri), "/home/user/foo.ttl")); @@ -251,9 +322,11 @@ uri = (const uint8_t*)"C|/Windows/Sucks"; assert(!strcmp((const char*)serd_uri_to_path(uri), "C|/Windows/Sucks")); +} - // Test file URI escaping and parsing - +static void +test_uri_parsing(void) +{ test_file_uri(NULL, "C:/My 100%", true, "file:///C:/My%20100%%", NULL); test_file_uri("ahost", "C:\\Pointless Space", true, @@ -272,9 +345,11 @@ uint8_t* out_path = serd_file_uri_parse(USTR("file:///foo/%0Xbar"), NULL); assert(!strcmp((const char*)out_path, "/foo/bar")); serd_free(out_path); +} - // Test serd_node_equals - +static void +test_node_equals(void) +{ const uint8_t replacement_char_str[] = { 0xEF, 0xBF, 0xBD, 0 }; SerdNode lhs = serd_node_from_string(SERD_LITERAL, replacement_char_str); SerdNode rhs = serd_node_from_string(SERD_LITERAL, USTR("123")); @@ -286,9 +361,11 @@ SerdNode null_copy = serd_node_copy(&SERD_NODE_NULL); assert(serd_node_equals(&SERD_NODE_NULL, &null_copy)); +} - // Test serd_node_from_string - +static void +test_node_from_string(void) +{ SerdNode node = serd_node_from_string(SERD_LITERAL, (const uint8_t*)"hello\""); assert(node.n_bytes == 6 && node.n_chars == 6 && node.flags == SERD_HAS_QUOTE && @@ -296,9 +373,11 @@ node = serd_node_from_string(SERD_URI, NULL); assert(serd_node_equals(&node, &SERD_NODE_NULL)); +} - // Test serd_node_from_substring - +static void +test_node_from_substring(void) +{ SerdNode empty = serd_node_from_substring(SERD_LITERAL, NULL, 32); assert(!empty.buf && !empty.n_bytes && !empty.n_chars && !empty.flags && !empty.type); @@ -312,9 +391,11 @@ assert(a_b.n_bytes == 4 && a_b.n_chars == 4 && a_b.flags == SERD_HAS_QUOTE && !strncmp((const char*)a_b.buf, "a\"bc", 4)); +} - // Test serd_node_new_uri_from_string - +static void +test_uri_from_string(void) +{ SerdNode nonsense = serd_node_new_uri_from_string(NULL, NULL, NULL); assert(nonsense.type == SERD_NOTHING); @@ -330,7 +411,16 @@ serd_node_free(&nil); serd_node_free(&nil2); - // Test serd_node_new_relative_uri + serd_node_free(&base); +} + +static void +test_relative_uri(void) +{ + SerdURI base_uri; + SerdNode base = serd_node_new_uri_from_string(USTR("http://example.org/"), + NULL, &base_uri); + SerdNode abs = serd_node_from_string(SERD_URI, USTR("http://example.org/foo/bar")); SerdURI abs_uri; serd_uri_parse(abs.buf, &abs_uri); @@ -357,9 +447,11 @@ serd_node_free(&up); serd_node_free(&rel); serd_node_free(&base); +} - // Test SerdEnv - +static void +test_env(void) +{ SerdNode u = serd_node_from_string(SERD_URI, USTR("http://example.org/foo")); SerdNode b = serd_node_from_string(SERD_CURIE, USTR("invalid")); SerdNode c = serd_node_from_string(SERD_CURIE, USTR("eg.2:b")); @@ -367,13 +459,13 @@ serd_env_set_prefix_from_strings(env, USTR("eg.2"), USTR("http://example.org/")); assert(serd_env_set_base_uri(env, NULL)); - assert(serd_env_set_base_uri(env, &node)); - assert(serd_node_equals(serd_env_get_base_uri(env, NULL), &node)); + assert(serd_env_set_base_uri(env, &SERD_NODE_NULL)); + assert(serd_node_equals(serd_env_get_base_uri(env, NULL), &SERD_NODE_NULL)); SerdChunk prefix, suffix; assert(serd_env_expand(env, &b, &prefix, &suffix)); - SerdNode xnode = serd_env_expand_node(env, &node); + SerdNode xnode = serd_env_expand_node(env, &SERD_NODE_NULL); assert(serd_node_equals(&xnode, &SERD_NODE_NULL)); SerdNode xu = serd_env_expand_node(env, &u); @@ -402,10 +494,14 @@ SerdNode prefix_name; assert(!serd_env_qualify(env, &shorter_uri, &prefix_name, &suffix)); - // Test SerdReader and SerdWriter + serd_env_free(env); +} - const char* path = "serd_test.ttl"; +static void +test_writer(const char* const path) +{ FILE* fd = fopen(path, "wb"); + SerdEnv* env = serd_env_new(NULL); assert(fd); SerdWriter* writer = serd_writer_new( @@ -415,6 +511,8 @@ serd_writer_chop_blank_prefix(writer, USTR("tmp")); serd_writer_chop_blank_prefix(writer, NULL); + const SerdNode lit = serd_node_from_string(SERD_LITERAL, USTR("hello")); + assert(serd_writer_set_base_uri(writer, &lit)); assert(serd_writer_set_prefix(writer, &lit, &lit)); assert(serd_writer_end_anon(writer, NULL)); @@ -489,10 +587,13 @@ assert(!strcmp((const char*)out, "@base .\n")); serd_free(out); + serd_env_free(env); + fclose(fd); +} - // Rewind and test reader - fseek(fd, 0, SEEK_SET); - +static void +test_reader(const char* path) +{ ReaderTest* rt = (ReaderTest*)calloc(1, sizeof(ReaderTest)); SerdReader* reader = serd_reader_new( SERD_TURTLE, rt, free, @@ -518,9 +619,30 @@ assert(serd_reader_read_string(reader, USTR("This isn't Turtle at all."))); serd_reader_free(reader); - fclose(fd); +} - serd_env_free(env); +int +main(void) +{ + test_string_to_double(); + test_double_to_node(); + test_integer_to_node(); + test_blob_to_node(); + test_strlen(); + test_strerror(); + test_uri_to_path(); + test_uri_parsing(); + test_node_equals(); + test_node_from_string(); + test_node_from_substring(); + test_uri_from_string(); + test_relative_uri(); + test_env(); + test_read_chunks(); + + const char* const path = "serd_test.ttl"; + test_writer(path); + test_reader(path); printf("Success\n"); return 0; diff -Nru serd-0.30.2/waflib/Context.py serd-0.30.4/waflib/Context.py --- serd-0.30.2/waflib/Context.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Context.py 2020-04-26 16:04:17.523404600 +0000 @@ -6,20 +6,30 @@ Classes and functions enabling the command system """ -import os, re, imp, sys +import os, re, sys from waflib import Utils, Errors, Logs import waflib.Node +if sys.hexversion > 0x3040000: + import types + class imp(object): + new_module = lambda x: types.ModuleType(x) +else: + import imp + # the following 3 constants are updated on each new release (do not touch) -HEXVERSION=0x2001200 +HEXVERSION=0x2001300 """Constant updated on new releases""" -WAFVERSION="2.0.18" +WAFVERSION="2.0.19" """Constant updated on new releases""" -WAFREVISION="314689b8994259a84f0de0aaef74d7ce91f541ad" +WAFREVISION="e83405712e95b47c040763fdfa468c04dfe72e4b" """Git revision when the waf version is updated""" +WAFNAME="waf" +"""Application name displayed on --help""" + ABI = 20 """Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)""" diff -Nru serd-0.30.2/waflib/extras/autoship.py serd-0.30.4/waflib/extras/autoship.py --- serd-0.30.2/waflib/extras/autoship.py 2019-10-20 23:42:45.000000000 +0000 +++ serd-0.30.4/waflib/extras/autoship.py 2020-04-26 16:04:17.526738200 +0000 @@ -12,14 +12,9 @@ sys.stderr.write("warning: %s\n" % msg) -def error_exit(msg): - sys.stderr.write("error: %s\n" % msg) - sys.exit(1) - - def ensure(condition, message): if not condition: - error_exit(message) + raise Exception(message) def get_project_info(top=None): @@ -34,16 +29,24 @@ loader = importlib.machinery.SourceFileLoader("wscript", wscript_path) spec = importlib.util.spec_from_loader("wscript", loader) wscript = importlib.util.module_from_spec(spec) - spec.loader.exec_module(wscript) - return { - "name": wscript.APPNAME, - "version": wscript.VERSION, - "uri": getattr(wscript, "uri", None), - "title": getattr(wscript, "title", wscript.APPNAME.title()), - "dist_pattern": wscript.dist_pattern, - "post_tags": wscript.post_tags, - } + try: + spec.loader.exec_module(wscript) + + info = {"name": wscript.APPNAME, "version": wscript.VERSION} + + for key in ["uri", "title", "dist_pattern", "post_tags"]: + value = getattr(wscript, key, None) + if value is not None: + info[key] = value + + if "title" not in info: + info["title"] = wscript.APPNAME.title() + + return info + + except Exception: + return {} def parse_version(revision): @@ -80,13 +83,13 @@ return "".join([indent + "* %s\n" % item for item in items]) -def get_release_json(entry): +def get_release_json(title, entry): """Return a release description in Gitlab JSON format""" import json version = entry["revision"] desc = { - "name": "Serd %s" % version, + "name": "%s %s" % (title, version), "tag_name": "v%s" % version, "description": get_items_markdown(entry["items"]), "released_at": entry["date"].isoformat(), @@ -107,7 +110,7 @@ while True: # Read header line head = f.readline() - matches = re.match(r"([^ ]*) \((.*)\) ([a-zA-z]*)", head) + matches = re.match(r"([^(]*) \(([0-9.]*)\) ([a-zA-z]*)", head) if matches is None: break @@ -189,10 +192,9 @@ news.write("\n\n -- %s %s\n" % (author, date)) -def read_ttl_news(name, in_files, top_entries=None, dist_pattern=None): +def read_ttl_news(name, in_files, dist_pattern=None): """Read news entries from Turtle""" - import datetime import rdflib doap = rdflib.Namespace("http://usefulinc.com/ns/doap#") @@ -211,6 +213,14 @@ if f[2].endswith(".ttl"): g.parse(f[2], format="turtle") + def parse_datetime(date): + import datetime + + try: + return datetime.datetime.strptime(date, "%Y-%m-%dT%H:%M:%S%z") + except Exception: + return datetime.datetime.strptime(date, "%Y-%m-%d") + entries = {} for r in g.triples([proj, doap.release, None]): release = r[2] @@ -229,7 +239,8 @@ if revision and date and blamee and changeset: status = "stable" if is_release_version(revision) else "unstable" - iso_date = datetime.datetime.strptime(date, "%Y-%m-%dT%H:%M:%S%z") + iso_date = parse_datetime(date) + e = { "name": name, "revision": str(revision), @@ -244,10 +255,6 @@ for i in g.triples([changeset, dcs.item, None]): item = str(g.value(i[2], rdfs.label, None)) e["items"] += [item] - if dist and top_entries is not None: - if dist not in top_entries: - top_entries[dist] = {"items": []} - top_entries[dist]["items"] += ["%s: %s" % (name, item)] e["blamee_name"] = str(g.value(blamee, foaf.name, None)) e["blamee_mbox"] = str(g.value(blamee, foaf.mbox, None)) @@ -286,15 +293,25 @@ subject = rdflib.URIRef(subject_uri) g.add((subject, rdf.type, doap.Project)) else: - # Find project URI to use as subject, and optionally the maintainer + # Find project URI to use as subject subject = g.value(None, rdf.type, doap.Project) ensure(subject is not None, "Unable to find project URI for subject") + # Get doap:name from first NEWS entry if it is not already present + if g.value(subject, doap.name, None) is None: + first_entry = next(iter(entries.values())) + g.add((subject, doap.name, rdflib.Literal(first_entry["name"]))) + + # Get maintainer maintainer = g.value(subject, doap.maintainer, None) + if not maintainer: + maintainer = g.value(subject, doap.developer, None) - for r, e in entries.items(): + revisions = sorted(entries.keys(), reverse=True) + for r in revisions: + e = entries[r] semver = parse_version(e["revision"]) - ver_string = "%03d%03d%03d" % semver + ver_string = ("%03d" * len(semver)) % semver release = rdflib.BNode("r%s" % ver_string) g.add((subject, doap.release, release)) @@ -371,7 +388,7 @@ args = ap.parse_args(sys.argv[2:]) entries = read_news( - args.in_path, args.in_format, args.unsorted, args.timezones + args.in_path, args.in_format, args.unsorted, not args.timezones ) with open(args.out_path, "w") as news: @@ -385,13 +402,14 @@ ap.add_argument("--template") ap.add_argument("--unsorted", action="store_true", help="don't sort items") ap.add_argument("--in-format", default="NEWS", choices=["NEWS", "turtle"]) + ap.add_argument("--uri", help="project URI") args = ap.parse_args(sys.argv[2:]) - info = get_project_info() - entries = read_news(args.in_path, args.in_format, info["dist_pattern"]) + entries = read_news(args.in_path, args.in_format, args.unsorted, True) + uri = args.uri if args.uri else get_project_info()["uri"] write_ttl_news( - entries, args.out_path, template=args.template, subject_uri=info["uri"] + entries, args.out_path, template=args.template, subject_uri=uri ) @@ -404,8 +422,9 @@ info = get_project_info() description = get_blurb("README.md") title = info["title"] - meta["Tags"] = ", ".join(info["post_tags"]) meta["Author"] = meta.get("Author", os.getenv("USER")) + if info["post_tags"]: + meta["Tags"] = ", ".join(info["post_tags"]) try: os.mkdir(out_dir) @@ -452,13 +471,75 @@ ap.add_argument("--title", help="Title for posts") args = ap.parse_args(sys.argv[2:]) - info = get_project_info() - entries = read_news(args.in_path, args.in_format, info["dist_pattern"]) + entries = read_news(args.in_path, args.in_format) meta = {"Author": args.author} if args.author else {} write_posts(entries, args.out_dir, meta) +def json_command(): + ap = argparse.ArgumentParser(description="Get release description in JSON") + ap.add_argument("version", help="Version number") + ap.add_argument("--in-path", default="NEWS", help="input file") + ap.add_argument("--in-format", default="NEWS", choices=["NEWS", "turtle"]) + + args = ap.parse_args(sys.argv[2:]) + info = get_project_info() + semver = parse_version(args.version) + entries = read_news(args.in_path, args.in_format) + + print(get_release_json(info["title"], entries[semver])) + + +def post_lab_release(version, lab, group, token, dry_run=False): + import shlex + import subprocess + + def run_cmd(cmd): + if dry_run: + print(" ".join([shlex.quote(i) for i in cmd])) + else: + subprocess.check_call(cmd) + + info = get_project_info() + name = info["name"] + title = info["title"] + semver = parse_version(version) + entries = read_news() + url = "https://%s/api/v4/projects/%s%%2F%s" % (lab, group, name) + dry_run = dry_run + + # Check that this is a release version + ensure(is_release_version(semver), "%s is an unstable version" % version) + + # Post Gitlab release + post_cmd = [ + "curl", + "-XPOST", + "-HContent-Type: application/json", + "-HPRIVATE-TOKEN: " + token, + "-d" + get_release_json(title, entries[semver]), + "%s/releases" % url, + ] + run_cmd(post_cmd) + + report("Posted Gitlab release %s %s" % (name, version)) + + +def post_lab_release_command(): + ap = argparse.ArgumentParser(description="Post Gitlab release") + ap.add_argument("version", help="Version number") + ap.add_argument("group", help="Gitlab user or group for project") + ap.add_argument("token", help="Gitlab access token") + ap.add_argument("--lab", default="gitlab.com", help="Gitlab instance") + ap.add_argument("--dry-run", action="store_true", help="do nothing") + args = ap.parse_args(sys.argv[2:]) + + post_lab_release( + args.version, args.lab, args.group, args.token, args.dry_run + ) + + def release(args, posts_dir=None, remote_dist_dir=None, dist_name=None): import json import os @@ -468,6 +549,8 @@ def run_cmd(cmd): if args.dry_run: print(" ".join([shlex.quote(i) for i in cmd])) + else: + subprocess.check_call(cmd) info = get_project_info() name = info["name"] @@ -500,8 +583,8 @@ # Check that working copy is clean branch_cmd = ["git", "rev-parse", "--abbrev-ref", "HEAD"] - branch = subprocess.check_output(branch_cmd).decode('ascii').strip() - status_cmd = ["git", "status", "--porcelain", "-b", "--ignore-submodules"] + branch = subprocess.check_output(branch_cmd).decode("ascii").strip() + status_cmd = ["git", "status", "--porcelain", "-b"] status = subprocess.check_output(status_cmd).decode("utf-8") sys.stdout.write(status) expected_status = "## %s...origin/%s\n" % (branch, branch) @@ -540,19 +623,12 @@ run_cmd(["scp", sig, os.path.join(remote_dist_dir, sig)]) # Post Gitlab release - post_cmd = [ - "curl", - "-XPOST", - "-HContent-Type: application/json", - "-HPRIVATE-TOKEN: " + args.token, - "-d" + get_release_json(entries[semver]), - "https://gitlab.com/api/v4/projects/drobilla%2Fserd/releases", - ] - run_cmd(post_cmd) + post_lab_release(version, args.lab, args.group, args.token, dry_run) report("Released %s %s" % (name, version)) report("Remember to upload posts and push to other remotes!") + def release_command(): ap = argparse.ArgumentParser(description="Release project") ap.add_argument("group", help="Gitlab user or group for project") diff -Nru serd-0.30.2/waflib/extras/autowaf.py serd-0.30.4/waflib/extras/autowaf.py --- serd-0.30.2/waflib/extras/autowaf.py 2019-10-20 23:10:48.000000000 +0000 +++ serd-0.30.4/waflib/extras/autowaf.py 2020-04-26 16:04:17.526738200 +0000 @@ -276,7 +276,6 @@ Options.options.no_coverage = True append_cxx_flags(['/nologo', '/FS', - '/DNDEBUG', '/D_CRT_SECURE_NO_WARNINGS', '/experimental:external', '/external:W0', @@ -352,7 +351,7 @@ try: conf.env.BUILD_TESTS = Options.options.build_tests conf.env.NO_COVERAGE = Options.options.no_coverage - if not Options.options.no_coverage: + if conf.env.BUILD_TESTS and not Options.options.no_coverage: # Set up unit test code coverage if conf.is_defined('CLANG'): for cov in [conf.env.CC[0].replace('clang', 'llvm-cov'), @@ -471,12 +470,13 @@ version, has_objects=True, include_path=None, - lib_path=None): + lib_path=None, + lib=None): "Set up environment for local library as if found via pkg-config." NAME = name.upper() major_ver = version.split('.')[0] pkg_var_name = 'PKG_' + name.replace('-', '_') + '_' + major_ver - lib_name = '%s-%s' % (name, major_ver) + lib_name = '%s-%s' % (lib if lib is not None else name, major_ver) if lib_path is None: lib_path = str(conf.path.get_bld()) @@ -915,6 +915,8 @@ stdout=None, stderr=None, verbosity=1): + import tempfile + def stream(s): return open(s, 'wb') if type(s) == str else s @@ -928,11 +930,25 @@ output = TestOutput(expected) with open(os.devnull, 'wb') as null: out = null if verbosity < 3 and not stdout else stdout - err = null if verbosity < 2 and not stderr else stderr + tmp_err = None + if stderr or verbosity >= 2: + err = stderr + else: + tmp_err = tempfile.TemporaryFile() + err = tmp_err + proc = subprocess.Popen(test, stdin=stdin, stdout=out, stderr=err) output.stdout, output.stderr = proc.communicate() output.result = proc.returncode + if tmp_err is not None: + if output.result != expected: + tmp_err.seek(0) + for line in tmp_err: + sys.stderr.write(line.decode('utf-8')) + + tmp_err.close() + if output and verbosity > 0: self.tst.log_good(' OK', name) @@ -980,17 +996,18 @@ self.max_depth = max(self.max_depth, len(self.stack) - 1) bld_dir = node.get_bld().parent - if bld_dir != self.path.get_bld(): - Logs.info('') - self.original_dir = os.getcwd() - Logs.info("Waf: Entering directory `%s'\n", bld_dir) - os.chdir(str(bld_dir)) + if hasattr(wscript_module, 'test'): + self.original_dir = os.getcwd() + Logs.info("Waf: Entering directory `%s'", bld_dir) + os.chdir(str(bld_dir)) - if not self.env.NO_COVERAGE and str(node.parent) == Context.top_dir: - self.clear_coverage() + if not self.env.NO_COVERAGE and str(node.parent) == Context.top_dir: + self.clear_coverage() + + Logs.info('') + self.log_good('=' * 10, 'Running %s tests\n', group_name) - self.log_good('=' * 10, 'Running %s tests', group_name) super(TestContext, self).pre_recurse(node) def test_result(self, success): @@ -1011,9 +1028,12 @@ duration = (bench_time() - self.start_time) * 1000.0 is_top = str(node.parent) == str(Context.top_dir) - if is_top and self.max_depth > 1: - Logs.info('') + wscript_module = Context.load_module(node.abspath()) + if not hasattr(wscript_module, 'test'): + os.chdir(self.original_dir) + return + Logs.info('') self.log_good('=' * 10, '%d tests from %s ran (%d ms total)', scope.n_total, scope.name, duration) @@ -1029,9 +1049,8 @@ Logs.pprint('GREEN', '[ PASSED ] %d tests' % successes) if scope.n_failed > 0: Logs.pprint('RED', '[ FAILED ] %d tests' % scope.n_failed) - if is_top: - Logs.info("\nWaf: Leaving directory `%s'" % os.getcwd()) + Logs.info("\nWaf: Leaving directory `%s'" % os.getcwd()) os.chdir(self.original_dir) def execute(self): diff -Nru serd-0.30.2/waflib/extras/clang_compilation_database.py serd-0.30.4/waflib/extras/clang_compilation_database.py --- serd-0.30.2/waflib/extras/clang_compilation_database.py 2019-10-17 12:25:28.000000000 +0000 +++ serd-0.30.4/waflib/extras/clang_compilation_database.py 2020-04-26 16:04:17.526738200 +0000 @@ -1,6 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # Christoph Koke, 2013 +# Alibek Omarov, 2019 """ Writes the c and cpp compile commands into build/compile_commands.json @@ -8,14 +9,23 @@ Usage: - def configure(conf): - conf.load('compiler_cxx') - ... - conf.load('clang_compilation_database') + Load this tool in `options` to be able to generate database + by request in command-line and before build: + + $ waf clangdb + + def options(opt): + opt.load('clang_compilation_database') + + Otherwise, load only in `configure` to generate it always before build. + + def configure(conf): + conf.load('compiler_cxx') + ... + conf.load('clang_compilation_database') """ -import sys, os, json, shlex, pipes -from waflib import Logs, TaskGen, Task +from waflib import Logs, TaskGen, Task, Build, Scripting Task.Task.keep_last_cmd = True @@ -23,63 +33,103 @@ @TaskGen.after_method('process_use') def collect_compilation_db_tasks(self): "Add a compilation database entry for compiled tasks" - try: - clang_db = self.bld.clang_compilation_database_tasks - except AttributeError: - clang_db = self.bld.clang_compilation_database_tasks = [] - self.bld.add_post_fun(write_compilation_database) + if not isinstance(self.bld, ClangDbContext): + return tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y) for task in getattr(self, 'compiled_tasks', []): if isinstance(task, tup): - clang_db.append(task) + self.bld.clang_compilation_database_tasks.append(task) -def write_compilation_database(ctx): - "Write the clang compilation database as JSON" - database_file = ctx.bldnode.make_node('compile_commands.json') - Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path)) - try: - root = json.load(database_file) - except IOError: - root = [] - clang_db = dict((x['file'], x) for x in root) - for task in getattr(ctx, 'clang_compilation_database_tasks', []): +class ClangDbContext(Build.BuildContext): + '''generates compile_commands.json by request''' + cmd = 'clangdb' + clang_compilation_database_tasks = [] + + def write_compilation_database(self): + """ + Write the clang compilation database as JSON + """ + database_file = self.bldnode.make_node('compile_commands.json') + Logs.info('Build commands will be stored in %s', database_file.path_from(self.path)) try: - cmd = task.last_cmd - except AttributeError: - continue - directory = getattr(task, 'cwd', ctx.variant_dir) - f_node = task.inputs[0] - filename = os.path.relpath(f_node.abspath(), directory) - entry = { - "directory": directory, - "arguments": cmd, - "file": filename, - } - clang_db[filename] = entry - root = list(clang_db.values()) - database_file.write(json.dumps(root, indent=2)) - -# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled. -# This will make sure compile_commands.json is always fully up to date. -# Previously you could end up with a partial compile_commands.json if the build failed. -for x in ('c', 'cxx'): - if x not in Task.classes: - continue - - t = Task.classes[x] - - def runnable_status(self): - def exec_command(cmd, **kw): - pass - - run_status = self.old_runnable_status() - if run_status == Task.SKIP_ME: - setattr(self, 'old_exec_command', getattr(self, 'exec_command', None)) - setattr(self, 'exec_command', exec_command) - self.run() - setattr(self, 'exec_command', getattr(self, 'old_exec_command', None)) - return run_status + root = database_file.read_json() + except IOError: + root = [] + clang_db = dict((x['file'], x) for x in root) + for task in self.clang_compilation_database_tasks: + try: + cmd = task.last_cmd + except AttributeError: + continue + f_node = task.inputs[0] + filename = f_node.path_from(task.get_cwd()) + entry = { + "directory": task.get_cwd().abspath(), + "arguments": cmd, + "file": filename, + } + clang_db[filename] = entry + root = list(clang_db.values()) + database_file.write_json(root) + + def execute(self): + """ + Build dry run + """ + self.restore() + + if not self.all_envs: + self.load_envs() + + self.recurse([self.run_dir]) + self.pre_build() + + # we need only to generate last_cmd, so override + # exec_command temporarily + def exec_command(self, *k, **kw): + return 0 + + for g in self.groups: + for tg in g: + try: + f = tg.post + except AttributeError: + pass + else: + f() + + if isinstance(tg, Task.Task): + lst = [tg] + else: lst = tg.tasks + for tsk in lst: + tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y) + if isinstance(tsk, tup): + old_exec = tsk.exec_command + tsk.exec_command = exec_command + tsk.run() + tsk.exec_command = old_exec + + self.write_compilation_database() + +EXECUTE_PATCHED = False +def patch_execute(): + global EXECUTE_PATCHED + + if EXECUTE_PATCHED: + return + + def new_execute_build(self): + """ + Invoke clangdb command before build + """ + if type(self) == Build.BuildContext: + Scripting.run_command('clangdb') + + old_execute_build(self) + + old_execute_build = getattr(Build.BuildContext, 'execute_build', None) + setattr(Build.BuildContext, 'execute_build', new_execute_build) + EXECUTE_PATCHED = True - setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None)) - setattr(t, 'runnable_status', runnable_status) +patch_execute() diff -Nru serd-0.30.2/waflib/extras/javatest.py serd-0.30.4/waflib/extras/javatest.py --- serd-0.30.2/waflib/extras/javatest.py 2019-10-17 12:25:28.000000000 +0000 +++ serd-0.30.4/waflib/extras/javatest.py 2020-04-26 16:04:17.526738200 +0000 @@ -1,6 +1,6 @@ #! /usr/bin/env python # encoding: utf-8 -# Federico Pellegrin, 2017 (fedepell) +# Federico Pellegrin, 2019 (fedepell) """ Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest` @@ -11,6 +11,10 @@ but should be easily expandable to other frameworks given the flexibility of ut_str provided by the standard waf unit test environment. +The extra takes care also of managing non-java dependencies (ie. C/C++ libraries +using JNI or Python modules via JEP) and setting up the environment needed to run +them. + Example usage: def options(opt): @@ -20,15 +24,15 @@ conf.load('java javatest') def build(bld): - + [ ... mainprog is built here ... ] bld(features = 'javac javatest', - srcdir = 'test/', - outdir = 'test', + srcdir = 'test/', + outdir = 'test', sourcepath = ['test'], - classpath = [ 'src' ], - basedir = 'test', + classpath = [ 'src' ], + basedir = 'test', use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/ ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}', jtest_source = bld.path.ant_glob('test/*.xml'), @@ -53,10 +57,91 @@ """ import os -from waflib import Task, TaskGen, Options +from waflib import Task, TaskGen, Options, Errors, Utils, Logs +from waflib.Tools import ccroot + +def _process_use_rec(self, name): + """ + Recursively process ``use`` for task generator with name ``name``.. + Used by javatest_process_use. + """ + if name in self.javatest_use_not or name in self.javatest_use_seen: + return + try: + tg = self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.javatest_use_not.add(name) + return + + self.javatest_use_seen.append(name) + tg.post() + + for n in self.to_list(getattr(tg, 'use', [])): + _process_use_rec(self, n) @TaskGen.feature('javatest') -@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath') +@TaskGen.after_method('process_source', 'apply_link', 'use_javac_files') +def javatest_process_use(self): + """ + Process the ``use`` attribute which contains a list of task generator names and store + paths that later is used to populate the unit test runtime environment. + """ + self.javatest_use_not = set() + self.javatest_use_seen = [] + self.javatest_libpaths = [] # strings or Nodes + self.javatest_pypaths = [] # strings or Nodes + self.javatest_dep_nodes = [] + + names = self.to_list(getattr(self, 'use', [])) + for name in names: + _process_use_rec(self, name) + + def extend_unique(lst, varlst): + ext = [] + for x in varlst: + if x not in lst: + ext.append(x) + lst.extend(ext) + + # Collect type specific info needed to construct a valid runtime environment + # for the test. + for name in self.javatest_use_seen: + tg = self.bld.get_tgen_by_name(name) + + # Python-Java embedding crosstools such as JEP + if 'py' in tg.features: + # Python dependencies are added to PYTHONPATH + pypath = getattr(tg, 'install_from', tg.path) + + if 'buildcopy' in tg.features: + # Since buildcopy is used we assume that PYTHONPATH in build should be used, + # not source + extend_unique(self.javatest_pypaths, [pypath.get_bld().abspath()]) + + # Add buildcopy output nodes to dependencies + extend_unique(self.javatest_dep_nodes, [o for task in getattr(tg, 'tasks', []) for o in getattr(task, 'outputs', [])]) + else: + # If buildcopy is not used, depend on sources instead + extend_unique(self.javatest_dep_nodes, tg.source) + extend_unique(self.javatest_pypaths, [pypath.abspath()]) + + + if getattr(tg, 'link_task', None): + # For tasks with a link_task (C, C++, D et.c.) include their library paths: + if not isinstance(tg.link_task, ccroot.stlink_task): + extend_unique(self.javatest_dep_nodes, tg.link_task.outputs) + extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH) + + if 'pyext' in tg.features: + # If the taskgen is extending Python we also want to add the interpreter libpath. + extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH_PYEXT) + else: + # Only add to libpath if the link task is not a Python extension + extend_unique(self.javatest_libpaths, [tg.link_task.outputs[0].parent.abspath()]) + + +@TaskGen.feature('javatest') +@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath', 'javatest_process_use') def make_javatest(self): """ Creates a ``utest`` task with a populated environment for Java Unit test execution @@ -65,6 +150,9 @@ tsk = self.create_task('utest') tsk.set_run_after(self.javac_task) + # Dependencies from recursive use analysis + tsk.dep_nodes.extend(self.javatest_dep_nodes) + # Put test input files as waf_unit_test relies on that for some prints and log generation # If jtest_source is there, this is specially useful for passing XML for TestNG # that contain test specification, use that as inputs, otherwise test sources @@ -97,6 +185,21 @@ if not hasattr(self, 'ut_env'): self.ut_env = dict(os.environ) + def add_paths(var, lst): + # Add list of paths to a variable, lst can contain strings or nodes + lst = [ str(n) for n in lst ] + Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst) + self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '') + + add_paths('PYTHONPATH', self.javatest_pypaths) + + if Utils.is_win32: + add_paths('PATH', self.javatest_libpaths) + elif Utils.unversioned_sys_platform() == 'darwin': + add_paths('DYLD_LIBRARY_PATH', self.javatest_libpaths) + add_paths('LD_LIBRARY_PATH', self.javatest_libpaths) + else: + add_paths('LD_LIBRARY_PATH', self.javatest_libpaths) def configure(ctx): cp = ctx.env.CLASSPATH or '.' diff -Nru serd-0.30.2/waflib/extras/lv2.py serd-0.30.4/waflib/extras/lv2.py --- serd-0.30.2/waflib/extras/lv2.py 2019-10-17 12:25:28.000000000 +0000 +++ serd-0.30.4/waflib/extras/lv2.py 2019-12-12 18:53:02.838619500 +0000 @@ -1,8 +1,10 @@ import os +import re import sys from waflib import Logs from waflib import Options +from waflib import TaskGen def options(opt): conf_opts = opt.get_option_group('Configuration options') @@ -37,7 +39,7 @@ return ['~/.lv2', '/usr/%s/lv2' % libdirname, '/usr/local/%s/lv2' % libdirname] - + def configure(conf): def env_path(parent_dir_var, name): parent = os.getenv(parent_dir_var) @@ -70,6 +72,20 @@ else: conf.env['LV2DIR'] = os.path.join(conf.env['LIBDIR'], 'lv2') - # Add default LV2_PATH to runtime environment for tests that use plugins - if 'LV2_PATH' not in os.environ: - conf.run_env['LV2_PATH'] = default_lv2_path(conf) + # Define dynamically loadable module pattern and extension + lib_pat = None + if 'cshlib_PATTERN' in conf.env: + lib_pat = re.sub('^lib', '', conf.env.cshlib_PATTERN) + elif 'cxxshlib_PATTERN' in conf.env: + lib_pat = re.sub('^lib', '', conf.env.cxxshlib_PATTERN) + + if lib_pat is not None: + conf.env['LV2_LIB_PATTERN'] = lib_pat + conf.env['LV2_LIB_EXT'] = lib_pat[lib_pat.rfind('.'):] + +@TaskGen.feature('lv2lib') +@TaskGen.before_method('apply_link', 'propagate_uselib_vars') +def build_lv2_lib(self): + """Change library pattern to build a module without the "lib" prefix""" + self.env.cshlib_PATTERN = self.env.LV2_LIB_PATTERN + self.env.cxxshlib_PATTERN = self.env.LV2_LIB_PATTERN diff -Nru serd-0.30.2/waflib/extras/msvc_pdb.py serd-0.30.4/waflib/extras/msvc_pdb.py --- serd-0.30.2/waflib/extras/msvc_pdb.py 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/waflib/extras/msvc_pdb.py 2020-04-26 16:04:17.526738200 +0000 @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Rafaël Kooi 2019 + +from waflib import TaskGen + +@TaskGen.feature('c', 'cxx', 'fc') +@TaskGen.after_method('propagate_uselib_vars') +def add_pdb_per_object(self): + """For msvc/fortran, specify a unique compile pdb per object, to work + around LNK4099. Flags are updated with a unique /Fd flag based on the + task output name. This is separate from the link pdb. + """ + if not hasattr(self, 'compiled_tasks'): + return + + link_task = getattr(self, 'link_task', None) + + for task in self.compiled_tasks: + if task.inputs and task.inputs[0].name.lower().endswith('.rc'): + continue + + add_pdb = False + for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'): + # several languages may be used at once + for flag in task.env[flagname]: + if flag[1:].lower() == 'zi': + add_pdb = True + break + + if add_pdb: + node = task.outputs[0].change_ext('.pdb') + pdb_flag = '/Fd:' + node.abspath() + + for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'): + buf = [pdb_flag] + for flag in task.env[flagname]: + if flag[1:3] == 'Fd' or flag[1:].lower() == 'fs' or flag[1:].lower() == 'mp': + continue + buf.append(flag) + task.env[flagname] = buf + + if link_task and not node in link_task.dep_nodes: + link_task.dep_nodes.append(node) + if not node in task.outputs: + task.outputs.append(node) diff -Nru serd-0.30.2/waflib/extras/pytest.py serd-0.30.4/waflib/extras/pytest.py --- serd-0.30.2/waflib/extras/pytest.py 2019-10-17 12:25:28.000000000 +0000 +++ serd-0.30.4/waflib/extras/pytest.py 2020-04-26 16:04:17.526738200 +0000 @@ -40,6 +40,8 @@ - `pytest_libpath` attribute is used to manually specify additional linker paths. +3. Java class search path (CLASSPATH) of any Java/Javalike dependency + Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens because the extension might be part of a Python package or used standalone: @@ -119,6 +121,7 @@ self.pytest_use_seen = [] self.pytest_paths = [] # strings or Nodes self.pytest_libpaths = [] # strings or Nodes + self.pytest_javapaths = [] # strings or Nodes self.pytest_dep_nodes = [] names = self.to_list(getattr(self, 'use', [])) @@ -157,6 +160,17 @@ extend_unique(self.pytest_dep_nodes, tg.source) extend_unique(self.pytest_paths, [pypath.abspath()]) + if 'javac' in tg.features: + # If a JAR is generated point to that, otherwise to directory + if getattr(tg, 'jar_task', None): + extend_unique(self.pytest_javapaths, [tg.jar_task.outputs[0].abspath()]) + else: + extend_unique(self.pytest_javapaths, [tg.path.get_bld()]) + + # And add respective dependencies if present + if tg.use_lst: + extend_unique(self.pytest_javapaths, tg.use_lst) + if getattr(tg, 'link_task', None): # For tasks with a link_task (C, C++, D et.c.) include their library paths: if not isinstance(tg.link_task, ccroot.stlink_task): @@ -212,8 +226,9 @@ Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst) self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '') - # Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH + # Prepend dependency paths to PYTHONPATH, CLASSPATH and LD_LIBRARY_PATH add_paths('PYTHONPATH', self.pytest_paths) + add_paths('CLASSPATH', self.pytest_javapaths) if Utils.is_win32: add_paths('PATH', self.pytest_libpaths) diff -Nru serd-0.30.2/waflib/extras/wafcache.py serd-0.30.4/waflib/extras/wafcache.py --- serd-0.30.2/waflib/extras/wafcache.py 1970-01-01 00:00:00.000000000 +0000 +++ serd-0.30.4/waflib/extras/wafcache.py 2020-04-26 16:04:17.526738200 +0000 @@ -0,0 +1,524 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2019 (ita) + +""" +Filesystem-based cache system to share and re-use build artifacts + +Cache access operations (copy to and from) are delegated to +independent pre-forked worker subprocesses. + +The following environment variables may be set: +* WAFCACHE: several possibilities: + - File cache: + absolute path of the waf cache (~/.cache/wafcache_user, + where `user` represents the currently logged-in user) + - URL to a cache server, for example: + export WAFCACHE=http://localhost:8080/files/ + in that case, GET/POST requests are made to urls of the form + http://localhost:8080/files/000000000/0 (cache management is then up to the server) + - GCS or S3 bucket + gs://my-bucket/ + s3://my-bucket/ +* WAFCACHE_NO_PUSH: if set, disables pushing to the cache +* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations + +File cache specific options: + Files are copied using hard links by default; if the cache is located + onto another partition, the system switches to file copies instead. +* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M) +* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB) +* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try + and trim the cache (3 minutess) +Usage:: + + def build(bld): + bld.load('wafcache') + ... + +To troubleshoot:: + + waf clean build --zones=wafcache +""" + +import atexit, base64, errno, fcntl, getpass, os, shutil, sys, time, traceback, urllib3 +try: + import subprocess32 as subprocess +except ImportError: + import subprocess + +base_cache = os.path.expanduser('~/.cache/') +if not os.path.isdir(base_cache): + base_cache = '/tmp/' +default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser()) + +CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir) +TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000)) +EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3)) +EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10)) +WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0 +WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0 +OK = "ok" + +try: + import cPickle +except ImportError: + import pickle as cPickle + +if __name__ != '__main__': + from waflib import Task, Logs, Utils, Build + +def can_retrieve_cache(self): + """ + New method for waf Task classes + """ + if not self.outputs: + return False + + self.cached = False + + sig = self.signature() + ssig = Utils.to_hex(self.uid() + sig) + + files_to = [node.abspath() for node in self.outputs] + err = cache_command(ssig, [], files_to) + if err.startswith(OK): + if WAFCACHE_VERBOSITY: + Logs.pprint('CYAN', ' Fetched %r from cache' % files_to) + else: + Logs.debug('wafcache: fetched %r from cache', files_to) + else: + if WAFCACHE_VERBOSITY: + Logs.pprint('YELLOW', ' No cache entry %s' % files_to) + else: + Logs.debug('wafcache: No cache entry %s: %s', files_to, err) + return False + + self.cached = True + return True + +def put_files_cache(self): + """ + New method for waf Task classes + """ + if WAFCACHE_NO_PUSH or getattr(self, 'cached', None) or not self.outputs: + return + + bld = self.generator.bld + sig = self.signature() + ssig = Utils.to_hex(self.uid() + sig) + + files_from = [node.abspath() for node in self.outputs] + err = cache_command(ssig, files_from, []) + + if err.startswith(OK): + if WAFCACHE_VERBOSITY: + Logs.pprint('CYAN', ' Successfully uploaded %s to cache' % files_from) + else: + Logs.debug('wafcache: Successfully uploaded %r to cache', files_from) + else: + if WAFCACHE_VERBOSITY: + Logs.pprint('RED', ' Error caching step results %s: %s' % (files_from, err)) + else: + Logs.debug('wafcache: Error caching results %s: %s', files_from, err) + + bld.task_sigs[self.uid()] = self.cache_sig + +def hash_env_vars(self, env, vars_lst): + """ + Reimplement BuildContext.hash_env_vars so that the resulting hash does not depend on local paths + """ + if not env.table: + env = env.parent + if not env: + return Utils.SIG_NIL + + idx = str(id(env)) + str(vars_lst) + try: + cache = self.cache_env + except AttributeError: + cache = self.cache_env = {} + else: + try: + return self.cache_env[idx] + except KeyError: + pass + + v = str([env[a] for a in vars_lst]) + v = v.replace(self.srcnode.abspath().__repr__()[:-1], '') + m = Utils.md5() + m.update(v.encode()) + ret = m.digest() + + Logs.debug('envhash: %r %r', ret, v) + + cache[idx] = ret + + return ret + +def uid(self): + """ + Reimplement Task.uid() so that the signature does not depend on local paths + """ + try: + return self.uid_ + except AttributeError: + m = Utils.md5() + src = self.generator.bld.srcnode + up = m.update + up(self.__class__.__name__.encode()) + for x in self.inputs + self.outputs: + up(x.path_from(src).encode()) + self.uid_ = m.digest() + return self.uid_ + + +def make_cached(cls): + """ + Enable the waf cache for a given task class + """ + if getattr(cls, 'nocache', None) or getattr(cls, 'has_cache', False): + return + + m1 = getattr(cls, 'run', None) + def run(self): + if getattr(self, 'nocache', False): + return m1(self) + if self.can_retrieve_cache(): + return 0 + return m1(self) + cls.run = run + + m2 = getattr(cls, 'post_run', None) + def post_run(self): + if getattr(self, 'nocache', False): + return m2(self) + ret = m2(self) + self.put_files_cache() + if hasattr(self, 'chmod'): + for node in self.outputs: + os.chmod(node.abspath(), self.chmod) + return ret + cls.post_run = post_run + cls.has_cache = True + +process_pool = [] +def get_process(): + """ + Returns a worker process that can process waf cache commands + The worker process is assumed to be returned to the process pool when unused + """ + try: + return process_pool.pop() + except IndexError: + filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'wafcache.py' + cmd = [sys.executable, '-c', Utils.readf(filepath)] + return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0) + +def atexit_pool(): + for k in process_pool: + try: + os.kill(k.pid, 9) + except OSError: + pass + else: + k.wait() +atexit.register(atexit_pool) + +def build(bld): + """ + Called during the build process to enable file caching + """ + if process_pool: + # already called once + return + + for x in range(bld.jobs): + process_pool.append(get_process()) + + Task.Task.can_retrieve_cache = can_retrieve_cache + Task.Task.put_files_cache = put_files_cache + Task.Task.uid = uid + Build.BuildContext.hash_env_vars = hash_env_vars + for x in reversed(list(Task.classes.values())): + make_cached(x) + +def cache_command(sig, files_from, files_to): + """ + Create a command for cache worker processes, returns a pickled + base64-encoded tuple containing the task signature, a list of files to + cache and a list of files files to get from cache (one of the lists + is assumed to be empty) + """ + proc = get_process() + + obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to])) + proc.stdin.write(obj) + proc.stdin.write('\n'.encode()) + proc.stdin.flush() + obj = proc.stdout.readline() + if not obj: + raise OSError('Preforked sub-process %r died' % proc.pid) + process_pool.append(proc) + return cPickle.loads(base64.b64decode(obj)) + +try: + copyfun = os.link +except NameError: + copyfun = shutil.copy2 + +def atomic_copy(orig, dest): + """ + Copy files to the cache, the operation is atomic for a given file + """ + global copyfun + tmp = dest + '.tmp' + up = os.path.dirname(dest) + try: + os.makedirs(up) + except OSError: + pass + + try: + copyfun(orig, tmp) + except OSError as e: + if e.errno == errno.EXDEV: + copyfun = shutil.copy2 + copyfun(orig, tmp) + else: + raise + os.rename(tmp, dest) + +def lru_trim(): + """ + the cache folders take the form: + `CACHE_DIR/0b/0b180f82246d726ece37c8ccd0fb1cde2650d7bfcf122ec1f169079a3bfc0ab9` + they are listed in order of last access, and then removed + until the amount of folders is within TRIM_MAX_FOLDERS and the total space + taken by files is less than EVICT_MAX_BYTES + """ + lst = [] + for up in os.listdir(CACHE_DIR): + if len(up) == 2: + sub = os.path.join(CACHE_DIR, up) + for hval in os.listdir(sub): + path = os.path.join(sub, hval) + + size = 0 + for fname in os.listdir(path): + size += os.lstat(os.path.join(path, fname)).st_size + lst.append((os.stat(path).st_mtime, size, path)) + + lst.sort(key=lambda x: x[0]) + lst.reverse() + + tot = sum(x[1] for x in lst) + while tot > EVICT_MAX_BYTES or len(lst) > TRIM_MAX_FOLDERS: + _, tmp_size, path = lst.pop() + tot -= tmp_size + + tmp = path + '.tmp' + try: + shutil.rmtree(tmp) + except OSError: + pass + try: + os.rename(path, tmp) + except OSError: + sys.stderr.write('Could not rename %r to %r' % (path, tmp)) + else: + try: + shutil.rmtree(tmp) + except OSError: + sys.stderr.write('Could not remove %r' % tmp) + sys.stderr.write("Cache trimmed: %r bytes in %r folders left\n" % (tot, len(lst))) + + +def lru_evict(): + """ + Reduce the cache size + """ + lockfile = os.path.join(CACHE_DIR, 'all.lock') + try: + st = os.stat(lockfile) + except EnvironmentError as e: + if e.errno == errno.ENOENT: + with open(lockfile, 'w') as f: + f.write('') + return + else: + raise + + if st.st_mtime < time.time() - EVICT_INTERVAL_MINUTES * 60: + # check every EVICT_INTERVAL_MINUTES minutes if the cache is too big + # OCLOEXEC is unnecessary because no processes are spawned + fd = os.open(lockfile, os.O_RDWR | os.O_CREAT, 0o755) + try: + try: + fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + except EnvironmentError: + sys.stderr.write('another process is running!\n') + pass + else: + # now dow the actual cleanup + lru_trim() + os.utime(lockfile, None) + finally: + os.close(fd) + +class netcache(object): + def __init__(self): + self.http = urllib3.PoolManager() + + def url_of(self, sig, i): + return "%s/%s/%s" % (CACHE_DIR, sig, i) + + def upload(self, file_path, sig, i): + url = self.url_of(sig, i) + with open(file_path, 'rb') as f: + file_data = f.read() + r = self.http.request('POST', url, timeout=60, + fields={ 'file': ('%s/%s' % (sig, i), file_data), }) + if r.status >= 400: + raise OSError("Invalid status %r %r" % (url, r.status)) + + def download(self, file_path, sig, i): + url = self.url_of(sig, i) + with self.http.request('GET', url, preload_content=False, timeout=60) as inf: + if inf.status >= 400: + raise OSError("Invalid status %r %r" % (url, inf.status)) + with open(file_path, 'wb') as out: + shutil.copyfileobj(inf, out) + + def copy_to_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_from): + if not os.path.islink(x): + self.upload(x, sig, i) + except Exception: + return traceback.format_exc() + return OK + + def copy_from_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_to): + self.download(x, sig, i) + except Exception: + return traceback.format_exc() + return OK + +class fcache(object): + def __init__(self): + if not os.path.exists(CACHE_DIR): + os.makedirs(CACHE_DIR) + if not os.path.exists(CACHE_DIR): + raise ValueError('Could not initialize the cache directory') + + def copy_to_cache(self, sig, files_from, files_to): + """ + Copy files to the cache, existing files are overwritten, + and the copy is atomic only for a given file, not for all files + that belong to a given task object + """ + try: + for i, x in enumerate(files_from): + dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + atomic_copy(x, dest) + except Exception: + return traceback.format_exc() + else: + # attempt trimming if caching was successful: + # we may have things to trim! + lru_evict() + return OK + + def copy_from_cache(self, sig, files_from, files_to): + """ + Copy files from the cache + """ + try: + for i, x in enumerate(files_to): + orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + atomic_copy(orig, x) + + # success! update the cache time + os.utime(os.path.join(CACHE_DIR, sig[:2], sig), None) + except Exception: + return traceback.format_exc() + return OK + +class bucket_cache(object): + def bucket_copy(self, source, target): + if CACHE_DIR.startswith('s3://'): + cmd = ['aws', 's3', 'cp', source, target] + else: + cmd = ['gsutil', 'cp', source, target] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = proc.communicate() + if proc.returncode: + raise OSError('Error copy %r to %r using: %r (exit %r):\n out:%s\n err:%s' % ( + source, target, cmd, proc.returncode, out.decode(), err.decode())) + + def copy_to_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_from): + dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + self.bucket_copy(x, dest) + except Exception: + return traceback.format_exc() + return OK + + def copy_from_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_to): + orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + self.bucket_copy(orig, x) + except EnvironmentError: + return traceback.format_exc() + return OK + +def loop(service): + """ + This function is run when this file is run as a standalone python script, + it assumes a parent process that will communicate the commands to it + as pickled-encoded tuples (one line per command) + + The commands are to copy files to the cache or copy files from the + cache to a target destination + """ + # one operation is performed at a single time by a single process + # therefore stdin never has more than one line + txt = sys.stdin.readline().strip() + if not txt: + # parent process probably ended + sys.exit(1) + ret = OK + + [sig, files_from, files_to] = cPickle.loads(base64.b64decode(txt)) + if files_from: + # TODO return early when pushing files upstream + ret = service.copy_to_cache(sig, files_from, files_to) + elif files_to: + # the build process waits for workers to (possibly) obtain files from the cache + ret = service.copy_from_cache(sig, files_from, files_to) + else: + ret = "Invalid command" + + obj = base64.b64encode(cPickle.dumps(ret)) + sys.stdout.write(obj.decode()) + sys.stdout.write('\n') + sys.stdout.flush() + +if __name__ == '__main__': + if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://'): + service = bucket_cache() + elif CACHE_DIR.startswith('http'): + service = netcache() + else: + service = fcache() + while 1: + try: + loop(service) + except KeyboardInterrupt: + break + diff -Nru serd-0.30.2/waflib/Options.py serd-0.30.4/waflib/Options.py --- serd-0.30.2/waflib/Options.py 2019-10-17 12:25:28.000000000 +0000 +++ serd-0.30.4/waflib/Options.py 2020-04-26 16:04:17.523404600 +0000 @@ -44,7 +44,7 @@ """ def __init__(self, ctx, allow_unknown=False): optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False, - version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION)) + version='%s %s (%s)' % (Context.WAFNAME, Context.WAFVERSION, Context.WAFREVISION)) self.formatter.width = Logs.get_term_cols() self.ctx = ctx self.allow_unknown = allow_unknown @@ -96,11 +96,11 @@ lst.sort() ret = '\n'.join(lst) - return '''waf [commands] [options] + return '''%s [commands] [options] -Main commands (example: ./waf build -j4) +Main commands (example: ./%s build -j4) %s -''' % ret +''' % (Context.WAFNAME, Context.WAFNAME, ret) class OptionsContext(Context.Context): @@ -282,6 +282,8 @@ elif arg != 'options': commands.append(arg) + if options.jobs < 1: + options.jobs = 1 for name in 'top out destdir prefix bindir libdir'.split(): # those paths are usually expanded from Context.launch_dir if getattr(options, name, None): diff -Nru serd-0.30.2/waflib/Tools/c_aliases.py serd-0.30.4/waflib/Tools/c_aliases.py --- serd-0.30.2/waflib/Tools/c_aliases.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Tools/c_aliases.py 2020-04-26 16:04:17.523404600 +0000 @@ -38,7 +38,7 @@ :return: the list of features for a task generator processing the source files :rtype: list of string """ - exts = get_extensions(kw['source']) + exts = get_extensions(kw.get('source', [])) typ = kw['typ'] feats = [] @@ -72,7 +72,7 @@ feats.append(x + typ) will_link = True if not will_link and not kw.get('features', []): - raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw) + raise Errors.WafError('Unable to determine how to link %r, try adding eg: features="c cshlib"?' % kw) return feats def set_features(kw, typ): diff -Nru serd-0.30.2/waflib/Tools/c_config.py serd-0.30.4/waflib/Tools/c_config.py --- serd-0.30.2/waflib/Tools/c_config.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Tools/c_config.py 2020-04-26 16:04:17.523404600 +0000 @@ -86,6 +86,10 @@ :type uselib_store: string :param env: config set or conf.env by default :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param force_static: force usage of static libraries + :type force_static: bool default False + :param posix: usage of POSIX mode for shlex lexical analiysis library + :type posix: bool default True """ assert(isinstance(line, str)) @@ -103,6 +107,8 @@ lex.commenters = '' lst = list(lex) + so_re = re.compile(r"\.so(?:\.[0-9]+)*$") + # append_unique is not always possible # for example, apple flags may require both -arch i386 and -arch ppc uselib = uselib_store @@ -180,7 +186,7 @@ app('CFLAGS', tmp) app('CXXFLAGS', tmp) app('LINKFLAGS', tmp) - elif x.endswith(('.a', '.so', '.dylib', '.lib')): + elif x.endswith(('.a', '.dylib', '.lib')) or so_re.search(x): appu('LINKFLAGS', x) # not cool, #762 else: self.to_log('Unhandled flag %r' % x) @@ -246,6 +252,8 @@ * if modversion is given, then return the module version * else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable + :param path: the **-config program to use** + :type path: list of string :param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests) :type atleast_pkgconfig_version: string :param package: package name, for example *gtk+-2.0* @@ -260,6 +268,12 @@ :type variables: list of string :param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES) :type define_variable: dict(string: string) + :param pkg_config_path: paths where pkg-config should search for .pc config files (overrides env.PKG_CONFIG_PATH if exists) + :type pkg_config_path: string, list of directories separated by colon + :param force_static: force usage of static libraries + :type force_static: bool default False + :param posix: usage of POSIX mode for shlex lexical analiysis library + :type posix: bool default True """ path = Utils.to_list(kw['path']) @@ -334,6 +348,7 @@ """ Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc). This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg` + so check exec_cfg parameters descriptions for more details on kw passed A few examples:: diff -Nru serd-0.30.2/waflib/Tools/c_tests.py serd-0.30.4/waflib/Tools/c_tests.py --- serd-0.30.2/waflib/Tools/c_tests.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Tools/c_tests.py 2020-04-26 16:04:17.523404600 +0000 @@ -180,9 +180,15 @@ ######################################################################################## ENDIAN_FRAGMENT = ''' +#ifdef _MSC_VER +#define testshlib_EXPORT __declspec(dllexport) +#else +#define testshlib_EXPORT +#endif + short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; -int use_ascii (int i) { +int testshlib_EXPORT use_ascii (int i) { return ascii_mm[i] + ascii_ii[i]; } short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; @@ -208,12 +214,12 @@ return -1 @feature('grep_for_endianness') -@after_method('process_source') +@after_method('apply_link') def grep_for_endianness_fun(self): """ Used by the endianness configuration test """ - self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0]) + self.create_task('grep_for_endianness', self.link_task.outputs[0]) @conf def check_endianness(self): @@ -223,7 +229,8 @@ tmp = [] def check_msg(self): return tmp[0] - self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', + + self.check(fragment=ENDIAN_FRAGMENT, features='c cshlib grep_for_endianness', msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg, confcache=None) return tmp[0] diff -Nru serd-0.30.2/waflib/Tools/fc.py serd-0.30.4/waflib/Tools/fc.py --- serd-0.30.2/waflib/Tools/fc.py 2019-10-17 12:25:28.000000000 +0000 +++ serd-0.30.4/waflib/Tools/fc.py 2020-04-26 16:04:17.523404600 +0000 @@ -13,8 +13,8 @@ from waflib.Configure import conf ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS']) -ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) -ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) +ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS']) +ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS']) ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS']) @extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08') diff -Nru serd-0.30.2/waflib/Tools/msvc.py serd-0.30.4/waflib/Tools/msvc.py --- serd-0.30.2/waflib/Tools/msvc.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Tools/msvc.py 2020-04-26 16:04:17.523404600 +0000 @@ -723,6 +723,10 @@ _libpaths = self.env.LIBPATH static_libs=[ + 'lib%ss.a' % lib, + 'lib%s.a' % lib, + '%ss.a' % lib, + '%s.a' %lib, 'lib%ss.lib' % lib, 'lib%s.lib' % lib, '%ss.lib' % lib, @@ -922,7 +926,7 @@ v.LIB_ST = '%s.lib' v.LIBPATH_ST = '/LIBPATH:%s' - v.STLIB_ST = '%s.lib' + v.STLIB_ST = '%s.a' v.STLIBPATH_ST = '/LIBPATH:%s' if v.MSVC_MANIFEST: @@ -936,7 +940,7 @@ v.IMPLIB_ST = '/IMPLIB:%s' v.LINKFLAGS_cstlib = [] - v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.lib' + v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.a' v.cprogram_PATTERN = v.cxxprogram_PATTERN = '%s.exe' diff -Nru serd-0.30.2/waflib/Tools/python.py serd-0.30.4/waflib/Tools/python.py --- serd-0.30.2/waflib/Tools/python.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Tools/python.py 2020-04-26 16:04:17.523404600 +0000 @@ -620,7 +620,7 @@ v.PYO = getattr(Options.options, 'pyo', 1) try: - v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip() + v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import sys\ntry:\n print(sys.implementation.cache_tag)\nexcept AttributeError:\n import imp\n print(imp.get_tag())\n"]).strip() except Errors.WafError: pass diff -Nru serd-0.30.2/waflib/Tools/qt5.py serd-0.30.4/waflib/Tools/qt5.py --- serd-0.30.2/waflib/Tools/qt5.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Tools/qt5.py 2020-04-26 16:04:17.526738200 +0000 @@ -482,8 +482,8 @@ self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?') # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC? - frag = '#include \nint main(int argc, char **argv) {return 0;}\n' - uses = 'QT5CORE QT5WIDGETS QT5GUI' + frag = '#include \nint main(int argc, char **argv) {QMap m;return m.keys().size();}\n' + uses = 'QT5CORE' for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]: msg = 'See if Qt files compile ' if flag: @@ -499,7 +499,7 @@ # FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/ if Utils.unversioned_sys_platform() == 'freebsd': - frag = '#include \nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n' + frag = '#include \nint main(int argc, char **argv) {QMap m;return m.keys().size();}\n' try: self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?') except self.errors.ConfigurationError: diff -Nru serd-0.30.2/waflib/Utils.py serd-0.30.4/waflib/Utils.py --- serd-0.30.2/waflib/Utils.py 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/Utils.py 2020-04-26 16:04:17.526738200 +0000 @@ -891,7 +891,7 @@ """ Delegates process execution to a pre-forked process instance. """ - if not 'env' in kwargs: + if not kwargs.get('env'): kwargs['env'] = dict(os.environ) try: obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs])) diff -Nru serd-0.30.2/waflib/waf serd-0.30.4/waflib/waf --- serd-0.30.2/waflib/waf 2019-10-19 17:59:11.000000000 +0000 +++ serd-0.30.4/waflib/waf 2020-04-26 16:04:17.526738200 +0000 @@ -19,7 +19,7 @@ def main(): script_path = os.path.abspath(inspect.getfile(inspect.getmodule(main))) - project_path = os.path.dirname(script_path) + project_path = os.path.dirname(os.path.realpath(script_path)) Scripting.waf_entry_point(os.getcwd(), Context.WAFVERSION, project_path) diff -Nru serd-0.30.2/wscript serd-0.30.4/wscript --- serd-0.30.2/wscript 2019-10-20 23:19:20.000000000 +0000 +++ serd-0.30.4/wscript 2020-04-26 16:36:05.808137700 +0000 @@ -12,7 +12,7 @@ # major increment <=> incompatible changes # minor increment <=> compatible changes (additions) # micro increment <=> no interface changes -SERD_VERSION = '0.30.2' +SERD_VERSION = '0.30.4' SERD_MAJOR_VERSION = '0' # Mandatory waf variables @@ -294,7 +294,8 @@ cmd = ['./serdi_static', filename] if Options.options.test_wrapper: - cmd = [Options.options.test_wrapper] + cmd + import shlex + cmd = shlex.split(Options.options.test_wrapper) + cmd proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) for line in proc.communicate()[0].splitlines(): @@ -356,7 +357,8 @@ expected=expected_return, name=action) - if result and ((mf + 'result') in model[test]): + if (result and expected_return == 0 and + ((mf + 'result') in model[test])): # Check output against test suite check_uri = model[test][mf + 'result'][0] check_path = ctx.src_path(file_uri_to_path(check_uri)) @@ -372,21 +374,18 @@ if report is not None: report.write(earl_assertion(test, result, asserter)) - # Run lax test - check([command[0]] + ['-l'] + command[1:], - expected=None, name=action + ' lax') - ns_rdftest = 'http://www.w3.org/ns/rdftest#' for test_class, instances in instances.items(): if test_class.startswith(ns_rdftest): - expected = 1 if 'Negative' in test_class else 0 + expected = 1 if '-l' not in options and 'Negative' in test_class else 0 run_tests(test_class, instances, expected) def test(tst): import tempfile # Create test output directories - for i in ['bad', 'good', 'TurtleTests', 'NTriplesTests', 'NQuadsTests', 'TriGTests']: + for i in ['bad', 'good', 'lax', + 'TurtleTests', 'NTriplesTests', 'NQuadsTests', 'TriGTests']: try: test_dir = os.path.join('tests', i) os.makedirs(test_dir) @@ -471,6 +470,8 @@ serd_base = 'http://drobilla.net/sw/serd/tests/' test_suite(tst, serd_base + 'good/', 'good', None, 'Turtle') test_suite(tst, serd_base + 'bad/', 'bad', None, 'Turtle') + test_suite(tst, serd_base + 'lax/', 'lax', None, 'Turtle', ['-l']) + test_suite(tst, serd_base + 'lax/', 'lax', None, 'Turtle') # Standard test suites with open('earl.ttl', 'w') as report: