diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/argparser.py cwltool-2.0.20200224214940+dfsg/cwltool/argparser.py --- cwltool-2.0.20200126090152+dfsg/cwltool/argparser.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/argparser.py 2020-02-24 22:00:06.000000000 +0000 @@ -799,6 +799,7 @@ records: List[str], input_required: bool = True, ) -> argparse.ArgumentParser: + toolparser.description = tool.tool.get("doc", None) toolparser.add_argument("job_order", nargs="?", help="Job input json file") namemap["job_order"] = "job_order" diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/checker.py cwltool-2.0.20200224214940+dfsg/cwltool/checker.py --- cwltool-2.0.20200126090152+dfsg/cwltool/checker.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/checker.py 2020-02-24 22:00:06.000000000 +0000 @@ -166,8 +166,10 @@ for parm in src_parms: src_dict[parm["id"]] = parm - step_inputs_val = check_all_types(src_dict, step_inputs, "source") - workflow_outputs_val = check_all_types(src_dict, workflow_outputs, "outputSource") + step_inputs_val = check_all_types(src_dict, step_inputs, "source", param_to_step) + workflow_outputs_val = check_all_types( + src_dict, workflow_outputs, "outputSource", param_to_step + ) warnings = step_inputs_val["warning"] + workflow_outputs_val["warning"] exceptions = step_inputs_val["exception"] + workflow_outputs_val["exception"] @@ -212,18 +214,21 @@ "%s\n%s" % (msg1, bullets([msg3, msg4, msg5], " ")) ) elif sink.get("not_connected"): - msg = SourceLine(sink, "type").makeError( - "'%s' is not an input parameter of %s, expected %s" - % ( - shortname(sink["id"]), - param_to_step[sink["id"]]["run"], - ", ".join( - shortname(s["id"]) - for s in param_to_step[sink["id"]]["inputs"] - if not s.get("not_connected") - ), + if not sink.get("used_by_step"): + msg = SourceLine(sink, "type").makeError( + "'%s' is not an input parameter of %s, expected %s" + % ( + shortname(sink["id"]), + param_to_step[sink["id"]]["run"], + ", ".join( + shortname(s["id"]) + for s in param_to_step[sink["id"]]["inputs"] + if not s.get("not_connected") + ), + ) ) - ) + else: + msg = "" else: msg = ( SourceLine(src, "type").makeError( @@ -241,11 +246,17 @@ " source has linkMerge method %s" % linkMerge ) - warning_msgs.append(msg) + if warning.message is not None: + msg += "\n" + SourceLine(sink).makeError(" " + warning.message) + + if msg: + warning_msgs.append(msg) + for exception in exceptions: src = exception.src sink = exception.sink linkMerge = exception.linkMerge + extra_message = exception.message msg = ( SourceLine(src, "type").makeError( "Source '%s' of type %s is incompatible" @@ -257,6 +268,9 @@ % (shortname(sink["id"]), json_dumps(sink["type"])) ) ) + if extra_message is not None: + msg += "\n" + SourceLine(sink).makeError(" " + extra_message) + if linkMerge is not None: msg += "\n" + SourceLine(sink).makeError( " source has linkMerge method %s" % linkMerge @@ -278,19 +292,19 @@ exception_msgs.append(msg) all_warning_msg = strip_dup_lineno("\n".join(warning_msgs)) - all_exception_msg = strip_dup_lineno("\n".join(exception_msgs)) + all_exception_msg = strip_dup_lineno("\n" + "\n".join(exception_msgs)) - if warnings: + if all_warning_msg: _logger.warning("Workflow checker warning:\n%s", all_warning_msg) if exceptions: raise validate.ValidationException(all_exception_msg) -SrcSink = namedtuple("SrcSink", ["src", "sink", "linkMerge"]) +SrcSink = namedtuple("SrcSink", ["src", "sink", "linkMerge", "message"]) -def check_all_types(src_dict, sinks, sourceField): - # type: (Dict[str, Any], List[Dict[str, Any]], str) -> Dict[str, List[SrcSink]] +def check_all_types(src_dict, sinks, sourceField, param_to_step): + # type: (Dict[str, Any], List[Dict[str, Any]], str, Dict[str, Dict[str, Any]]) -> Dict[str, List[SrcSink]] """ Given a list of sinks, check if their types match with the types of their sources. @@ -299,21 +313,93 @@ validation = {"warning": [], "exception": []} # type: Dict[str, List[SrcSink]] for sink in sinks: if sourceField in sink: + valueFrom = sink.get("valueFrom") + pickValue = sink.get("pickValue") + + extra_message = None + if pickValue is not None: + extra_message = "pickValue is: %s" % pickValue + if isinstance(sink[sourceField], MutableSequence): - srcs_of_sink = [src_dict[parm_id] for parm_id in sink[sourceField]] linkMerge = sink.get( "linkMerge", ("merge_nested" if len(sink[sourceField]) > 1 else None), ) + + if pickValue in ["first_non_null", "only_non_null"]: + linkMerge = None + + srcs_of_sink = [] # type: List[Any] + for parm_id in sink[sourceField]: + srcs_of_sink += [src_dict[parm_id]] + if ( + is_conditional_step(param_to_step, parm_id) + and pickValue is None + ): + validation["warning"].append( + SrcSink( + src_dict[parm_id], + sink, + linkMerge, + message="Source is from conditional step, but pickValue is not used", + ) + ) else: parm_id = sink[sourceField] srcs_of_sink = [src_dict[parm_id]] linkMerge = None + + if pickValue is not None: + validation["warning"].append( + SrcSink( + src_dict[parm_id], + sink, + linkMerge, + message="pickValue is used but only a single input source is declared", + ) + ) + + if is_conditional_step(param_to_step, parm_id): + src_typ = srcs_of_sink[0]["type"] + snk_typ = sink["type"] + + if not isinstance(src_typ, list): + src_typ = [src_typ] + if "null" not in src_typ: + src_typ = ["null"] + src_typ + + if ( + "null" not in snk_typ + ): # Given our type names this works even if not a list + validation["warning"].append( + SrcSink( + src_dict[parm_id], + sink, + linkMerge, + message="Source is from conditional step and may produce `null`", + ) + ) + + srcs_of_sink[0]["type"] = src_typ + for src in srcs_of_sink: check_result = check_types(src, sink, linkMerge, valueFrom) if check_result == "warning": - validation["warning"].append(SrcSink(src, sink, linkMerge)) + validation["warning"].append( + SrcSink(src, sink, linkMerge, message=extra_message) + ) elif check_result == "exception": - validation["exception"].append(SrcSink(src, sink, linkMerge)) + validation["exception"].append( + SrcSink(src, sink, linkMerge, message=extra_message) + ) + return validation + + +def is_conditional_step(param_to_step: Dict[str, Dict[str, Any]], parm_id: str) -> bool: + source_step = param_to_step.get(parm_id) + if source_step is not None: + if source_step.get("when") is not None: + return True + return False diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/command_line_tool.py cwltool-2.0.20200224214940+dfsg/cwltool/command_line_tool.py --- cwltool-2.0.20200126090152+dfsg/cwltool/command_line_tool.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/command_line_tool.py 2020-02-24 22:00:06.000000000 +0000 @@ -162,6 +162,17 @@ yield job +class AbstractOperation(Process): + def job( + self, + job_order, # type: Mapping[str, str] + output_callbacks, # type: Callable[[Any, Any], Any] + runtimeContext, # type: RuntimeContext + ): + # type: (...) -> Generator[ExpressionTool.ExpressionJob, None, None] + raise WorkflowException("Abstract operation cannot be executed.") + + def remove_path(f): # type: (Dict[str, Any]) -> None if "path" in f: del f["path"] diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/context.py cwltool-2.0.20200224214940+dfsg/cwltool/context.py --- cwltool-2.0.20200126090152+dfsg/cwltool/context.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/context.py 2020-02-24 22:00:06.000000000 +0000 @@ -132,7 +132,7 @@ self.eval_timeout = 20 # type: float self.postScatterEval = ( None - ) # type: Optional[Callable[[MutableMapping[str, Any]], Dict[str, Any]]] + ) # type: Optional[Callable[[MutableMapping[str, Any]], Optional[MutableMapping[str, Any]]]] self.on_error = "stop" # type: str self.strict_memory_limit = False # type: bool diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/docker.py cwltool-2.0.20200224214940+dfsg/cwltool/docker.py --- cwltool-2.0.20200126090152+dfsg/cwltool/docker.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/docker.py 2020-02-24 22:00:06.000000000 +0000 @@ -1,6 +1,7 @@ """Enables Docker software containers via the {dx-,u,}docker runtimes.""" import datetime +import csv import os import re import shutil @@ -8,7 +9,7 @@ import tempfile import threading from distutils import spawn -from io import open # pylint: disable=redefined-builtin +from io import open, StringIO # pylint: disable=redefined-builtin from typing import Dict, List, MutableMapping, Optional, Set, Tuple import requests @@ -222,11 +223,20 @@ def append_volume(runtime, source, target, writable=False): # type: (List[str], str, str, bool) -> None """Add binding arguments to the runtime list.""" - runtime.append( - "--volume={}:{}:{}".format( - docker_windows_path_adjust(source), target, "rw" if writable else "ro" - ) - ) + options = [ + "type=bind", + "source=" + source, + "target=" + target, + ] + if not writable: + options.append("readonly") + output = StringIO() + csv.writer(output).writerow(options) + mount_arg = output.getvalue().strip() + runtime.append("--mount={}".format(mount_arg)) + # Unlike "--volume", "--mount" will fail if the volume doesn't already exist. + if not os.path.exists(source): + os.mkdir(source) def add_file_or_directory_volume( self, runtime: List[str], volume: MapperEnt, host_outdir_tgt: Optional[str] diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/executors.py cwltool-2.0.20200224214940+dfsg/cwltool/executors.py --- cwltool-2.0.20200126090152+dfsg/cwltool/executors.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/executors.py 2020-02-24 22:00:06.000000000 +0000 @@ -7,11 +7,22 @@ import threading from abc import ABCMeta, abstractmethod from threading import Lock -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union +from typing import ( + Any, + Dict, + Iterable, + List, + Optional, + Set, + Tuple, + Union, + MutableMapping, +) import psutil from schema_salad.validate import ValidationException +from schema_salad.sourceline import SourceLine from .command_line_tool import CallbackJob from .context import RuntimeContext, getdefault @@ -68,6 +79,14 @@ if not runtime_context.basedir: raise WorkflowException("Must provide 'basedir' in runtimeContext") + def check_for_abstract_op(tool: MutableMapping[str, Any]) -> None: + if tool["class"] == "Operation": + raise SourceLine(tool, "class", WorkflowException).makeError( + "Workflow has unrunnable abstract Operation" + ) + + process.visit(check_for_abstract_op) + finaloutdir = None # Type: Optional[str] original_outdir = runtime_context.outdir if isinstance(original_outdir, str): diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/job.py cwltool-2.0.20200224214940+dfsg/cwltool/job.py --- cwltool-2.0.20200126090152+dfsg/cwltool/job.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/job.py 2020-02-24 22:00:06.000000000 +0000 @@ -433,7 +433,9 @@ _logger.info("[job %s] completed %s", self.name, processStatus) if _logger.isEnabledFor(logging.DEBUG): - _logger.debug("[job %s] %s", self.name, json_dumps(outputs, indent=4)) + _logger.debug( + "[job %s] outputs %s", self.name, json_dumps(outputs, indent=4) + ) if self.generatemapper is not None and runtimeContext.secret_store is not None: # Delete any runtime-generated files containing secrets. diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/main.py cwltool-2.0.20200224214940+dfsg/cwltool/main.py --- cwltool-2.0.20200126090152+dfsg/cwltool/main.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/main.py 2020-02-24 22:00:06.000000000 +0000 @@ -782,7 +782,10 @@ input_required: bool = True, ) -> int: if not stdout: # force UTF-8 even if the console is configured differently - if hasattr(sys.stdout, "encoding") and sys.stdout.encoding != "UTF-8": + if hasattr(sys.stdout, "encoding") and sys.stdout.encoding.upper() not in ( + "UTF-8", + "UTF8", + ): if hasattr(sys.stdout, "detach"): stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8") else: diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/process.py cwltool-2.0.20200224214940+dfsg/cwltool/process.py --- cwltool-2.0.20200126090152+dfsg/cwltool/process.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/process.py 2020-02-24 22:00:06.000000000 +0000 @@ -119,6 +119,7 @@ "CommandLineTool.yml", "CommonWorkflowLanguage.yml", "Process.yml", + "Operation.yml", "concepts.md", "contrib.md", "intro.md", diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/provenance.py cwltool-2.0.20200224214940+dfsg/cwltool/provenance.py --- cwltool-2.0.20200126090152+dfsg/cwltool/provenance.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/provenance.py 2020-02-24 22:00:06.000000000 +0000 @@ -864,7 +864,7 @@ def generate_output_prov( self, - final_output: Union[Dict[str, Any], List[Dict[str, Any]]], + final_output: Union[MutableMapping[str, Any], List[Dict[str, Any]]], process_run_id: Optional[str], name: Optional[str], ) -> None: diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool-standalone.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool-standalone.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool-standalone.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool-standalone.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,2 @@ +- $import: Process.yml +- $import: CommandLineTool.yml diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/CommandLineTool.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,1175 @@ +saladVersion: v1.1 +$base: "https://w3id.org/cwl/cwl#" + +$namespaces: + cwl: "https://w3id.org/cwl/cwl#" + +$graph: + +- name: CommandLineToolDoc + type: documentation + doc: + - | + # Common Workflow Language (CWL) Command Line Tool Description, v1.2.0-dev1 + + This version: + * https://w3id.org/cwl/v1.2.0-dev1/ + + Current version: + * https://w3id.org/cwl/ + - "\n\n" + - {$include: contrib.md} + - "\n\n" + - | + # Abstract + + A Command Line Tool is a non-interactive executable program that reads + some input, performs a computation, and terminates after producing some + output. Command line programs are a flexible unit of code sharing and + reuse, unfortunately the syntax and input/output semantics among command + line programs is extremely heterogeneous. A common layer for describing + the syntax and semantics of programs can reduce this incidental + complexity by providing a consistent way to connect programs together. + This specification defines the Common Workflow Language (CWL) Command + Line Tool Description, a vendor-neutral standard for describing the + syntax and input/output semantics of command line programs. + + - {$include: intro.md} + + - | + ## Introduction to the CWL Command Line Tool standard v1.2.0-dev1 + + This specification represents the latest development release from the + CWL group. Since the v1.1 release, v1.2.0-dev1 introduces the + following updates to the CWL Command Line Tool standard. + Documents should use `cwlVersion: v1.2.0-dev1` to make use of new + syntax and features introduced in v1.2.0-dev1. Existing v1.1 documents + should be trivially updatable by changing `cwlVersion`, however + CWL documents that relied on previously undefined or + underspecified behavior may have slightly different behavior in + v1.2.0-dev1. + + ## Changelog + + See also the [CWL Workflow Description, v1.2.0-dev1 changelog](Workflow.html#Changelog). + + ## Purpose + + Standalone programs are a flexible and interoperable form of code reuse. + Unlike monolithic applications, applications and analysis workflows which + are composed of multiple separate programs can be written in multiple + languages and execute concurrently on multiple hosts. However, POSIX + does not dictate computer-readable grammar or semantics for program input + and output, resulting in extremely heterogeneous command line grammar and + input/output semantics among program. This is a particular problem in + distributed computing (multi-node compute clusters) and virtualized + environments (such as Docker containers) where it is often necessary to + provision resources such as input files before executing the program. + + Often this gap is filled by hard coding program invocation and + implicitly assuming requirements will be met, or abstracting program + invocation with wrapper scripts or descriptor documents. Unfortunately, + where these approaches are application or platform specific it creates a + significant barrier to reproducibility and portability, as methods + developed for one platform must be manually ported to be used on new + platforms. Similarly it creates redundant work, as wrappers for popular + tools must be rewritten for each application or platform in use. + + The Common Workflow Language Command Line Tool Description is designed to + provide a common standard description of grammar and semantics for + invoking programs used in data-intensive fields such as Bioinformatics, + Chemistry, Physics, Astronomy, and Statistics. This specification + attempts to define a precise data and execution model for Command Line Tools that + can be implemented on a variety of computing platforms, ranging from a + single workstation to cluster, grid, cloud, and high performance + computing platforms. Details related to execution of these programs not + laid out in this specification are open to interpretation by the computing + platform implementing this specification. + + - {$include: concepts.md} + - {$include: invocation.md} + + +- type: record + name: EnvironmentDef + doc: | + Define an environment variable that will be set in the runtime environment + by the workflow platform when executing the command line tool. May be the + result of executing an expression, such as getting a parameter from input. + fields: + - name: envName + type: string + doc: The environment variable name + - name: envValue + type: [string, Expression] + doc: The environment variable value + +- type: record + name: CommandLineBinding + extends: InputBinding + docParent: "#CommandInputParameter" + doc: | + + When listed under `inputBinding` in the input schema, the term + "value" refers to the the corresponding value in the input object. For + binding objects listed in `CommandLineTool.arguments`, the term "value" + refers to the effective value after evaluating `valueFrom`. + + The binding behavior when building the command line depends on the data + type of the value. If there is a mismatch between the type described by + the input schema and the effective value, such as resulting from an + expression evaluation, an implementation must use the data type of the + effective value. + + - **string**: Add `prefix` and the string to the command line. + + - **number**: Add `prefix` and decimal representation to command line. + + - **boolean**: If true, add `prefix` to the command line. If false, add + nothing. + + - **File**: Add `prefix` and the value of + [`File.path`](#File) to the command line. + + - **Directory**: Add `prefix` and the value of + [`Directory.path`](#Directory) to the command line. + + - **array**: If `itemSeparator` is specified, add `prefix` and the join + the array into a single string with `itemSeparator` separating the + items. Otherwise first add `prefix`, then recursively process + individual elements. + If the array is empty, it does not add anything to command line. + + - **object**: Add `prefix` only, and recursively add object fields for + which `inputBinding` is specified. + + - **null**: Add nothing. + + fields: + - name: position + type: [ "null", int, Expression ] + doc: | + The sorting key. Default position is 0. If the inputBinding is + associated with an input parameter, then the value of `self` in the + expression will be the value of the input parameter. Input parameter + defaults (as specified by the `InputParameter.default` field) must be + applied before evaluating the expression. Expressions must return a + single value of type int or a null. + - name: prefix + type: string? + doc: "Command line prefix to add before the value." + - name: separate + type: boolean? + default: true + doc: | + If true (default), then the prefix and value must be added as separate + command line arguments; if false, prefix and value must be concatenated + into a single command line argument. + - name: itemSeparator + type: string? + doc: | + Join the array elements into a single string with the elements + separated by by `itemSeparator`. + - name: valueFrom + type: + - "null" + - string + - Expression + jsonldPredicate: "cwl:valueFrom" + doc: | + If `valueFrom` is a constant string value, use this as the value and + apply the binding rules above. + + If `valueFrom` is an expression, evaluate the expression to yield the + actual value to use to build the command line and apply the binding + rules above. If the inputBinding is associated with an input + parameter, the value of `self` in the expression will be the value of + the input parameter. Input parameter defaults (as specified by the + `InputParameter.default` field) must be applied before evaluating the + expression. + + If the value of the associated input parameter is `null`, `valueFrom` is + not evaluated and nothing is added to the command line. + + When a binding is part of the `CommandLineTool.arguments` field, + the `valueFrom` field is required. + + - name: shellQuote + type: boolean? + default: true + doc: | + If `ShellCommandRequirement` is in the requirements for the current command, + this controls whether the value is quoted on the command line (default is true). + Use `shellQuote: false` to inject metacharacters for operations such as pipes. + + If `shellQuote` is true or not provided, the implementation must not + permit interpretation of any shell metacharacters or directives. + + +- type: record + name: CommandOutputBinding + extends: LoadContents + doc: | + Describes how to generate an output parameter based on the files produced + by a CommandLineTool. + + The output parameter value is generated by applying these operations in the + following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + fields: + - name: glob + type: + - "null" + - string + - Expression + - type: array + items: string + doc: | + Find files or directories relative to the output directory, using POSIX + glob(3) pathname matching. If an array is provided, find files or + directories that match any pattern in the array. If an expression is + provided, the expression must return a string or an array of strings, + which will then be evaluated as one or more glob patterns. Must only + match and return files/directories which actually exist. + + If the value of glob is a relative path pattern (does not + begin with a slash '/') then it is resolved relative to the + output directory. If the value of the glob is an absolute + path pattern (it does begin with a slash '/') then it must + refer to a path within the output directory. It is an error + if any glob resolves to a path outside the output directory. + Specifically this means globs that resolve to paths outside the output + directory are illegal. + + A glob may match a path within the output directory which is + actually a symlink to another file. In this case, the + expected behavior is for the resulting File/Directory object to take the + `basename` (and corresponding `nameroot` and `nameext`) of the + symlink. The `location` of the File/Directory is implementation + dependent, but logically the File/Directory should have the same content + as the symlink target. Platforms may stage output files/directories to + cloud storage that lack the concept of a symlink. In + this case file content and directories may be duplicated, or (to avoid + duplication) the File/Directory `location` may refer to the symlink + target. + + It is an error if a symlink in the output directory (or any + symlink in a chain of links) refers to any file or directory + that is not under an input or output directory. + + Implementations may shut down a container before globbing + output, so globs and expressions must not assume access to the + container filesystem except for declared input and output. + + - name: outputEval + type: Expression? + doc: | + Evaluate an expression to generate the output value. If + `glob` was specified, the value of `self` must be an array + containing file objects that were matched. If no files were + matched, `self` must be a zero length array; if a single file + was matched, the value of `self` is an array of a single + element. Additionally, if `loadContents` is `true`, the File + objects must include up to the first 64 KiB of file contents + in the `contents` field. The exit code of the process is + available in the expression as `runtime.exitCode`. + +- name: CommandLineBindable + type: record + fields: + inputBinding: + type: CommandLineBinding? + jsonldPredicate: "cwl:inputBinding" + doc: Describes how to turn this object into command line arguments. + +- name: CommandInputRecordField + type: record + extends: [InputRecordField, CommandLineBindable] + specialize: + - specializeFrom: InputRecordSchema + specializeTo: CommandInputRecordSchema + - specializeFrom: InputEnumSchema + specializeTo: CommandInputEnumSchema + - specializeFrom: InputArraySchema + specializeTo: CommandInputArraySchema + - specializeFrom: InputBinding + specializeTo: CommandLineBinding + + +- name: CommandInputRecordSchema + type: record + extends: [InputRecordSchema, CommandInputSchema, CommandLineBindable] + specialize: + - specializeFrom: InputRecordField + specializeTo: CommandInputRecordField + - specializeFrom: InputBinding + specializeTo: CommandLineBinding + + +- name: CommandInputEnumSchema + type: record + extends: [InputEnumSchema, CommandInputSchema, CommandLineBindable] + specialize: + - specializeFrom: InputBinding + specializeTo: CommandLineBinding + + +- name: CommandInputArraySchema + type: record + extends: [InputArraySchema, CommandInputSchema, CommandLineBindable] + specialize: + - specializeFrom: InputRecordSchema + specializeTo: CommandInputRecordSchema + - specializeFrom: InputEnumSchema + specializeTo: CommandInputEnumSchema + - specializeFrom: InputArraySchema + specializeTo: CommandInputArraySchema + - specializeFrom: InputBinding + specializeTo: CommandLineBinding + + +- name: CommandOutputRecordField + type: record + extends: OutputRecordField + specialize: + - specializeFrom: OutputRecordSchema + specializeTo: CommandOutputRecordSchema + - specializeFrom: OutputEnumSchema + specializeTo: CommandOutputEnumSchema + - specializeFrom: OutputArraySchema + specializeTo: CommandOutputArraySchema + fields: + - name: outputBinding + type: CommandOutputBinding? + jsonldPredicate: "cwl:outputBinding" + doc: | + Describes how to generate this output object based on the files + produced by a CommandLineTool + + +- name: CommandOutputRecordSchema + type: record + extends: OutputRecordSchema + specialize: + - specializeFrom: OutputRecordField + specializeTo: CommandOutputRecordField + + +- name: CommandOutputEnumSchema + type: record + extends: OutputEnumSchema + specialize: + - specializeFrom: OutputRecordSchema + specializeTo: CommandOutputRecordSchema + - specializeFrom: OutputEnumSchema + specializeTo: CommandOutputEnumSchema + - specializeFrom: OutputArraySchema + specializeTo: CommandOutputArraySchema + + +- name: CommandOutputArraySchema + type: record + extends: OutputArraySchema + specialize: + - specializeFrom: OutputRecordSchema + specializeTo: CommandOutputRecordSchema + - specializeFrom: OutputEnumSchema + specializeTo: CommandOutputEnumSchema + - specializeFrom: OutputArraySchema + specializeTo: CommandOutputArraySchema + + +- type: record + name: CommandInputParameter + extends: InputParameter + doc: An input parameter for a CommandLineTool. + fields: + - name: type + type: + - CWLType + - stdin + - CommandInputRecordSchema + - CommandInputEnumSchema + - CommandInputArraySchema + - string + - type: array + items: + - CWLType + - CommandInputRecordSchema + - CommandInputEnumSchema + - CommandInputArraySchema + - string + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + - name: inputBinding + type: CommandLineBinding? + doc: | + Describes how to turns the input parameters of a process into + command line arguments. + jsonldPredicate: "cwl:inputBinding" + +- type: record + name: CommandOutputParameter + extends: OutputParameter + doc: An output parameter for a CommandLineTool. + fields: + - name: type + type: + - CWLType + - stdout + - stderr + - CommandOutputRecordSchema + - CommandOutputEnumSchema + - CommandOutputArraySchema + - string + - type: array + items: + - CWLType + - CommandOutputRecordSchema + - CommandOutputEnumSchema + - CommandOutputArraySchema + - string + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + - name: outputBinding + type: CommandOutputBinding? + jsonldPredicate: "cwl:outputBinding" + doc: Describes how to generate this output object based on the files + produced by a CommandLineTool + +- name: stdin + type: enum + symbols: [ "cwl:stdin" ] + docParent: "#CommandOutputParameter" + doc: | + Only valid as a `type` for a `CommandLineTool` input with no + `inputBinding` set. `stdin` must not be specified at the `CommandLineTool` + level. + + The following + ``` + inputs: + an_input_name: + type: stdin + ``` + is equivalent to + ``` + inputs: + an_input_name: + type: File + streamable: true + + stdin: ${inputs.an_input_name.path} + ``` + +- name: stdout + type: enum + symbols: [ "cwl:stdout" ] + docParent: "#CommandOutputParameter" + doc: | + Only valid as a `type` for a `CommandLineTool` output with no + `outputBinding` set. + + The following + ``` + outputs: + an_output_name: + type: stdout + + stdout: a_stdout_file + ``` + is equivalent to + ``` + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + + stdout: a_stdout_file + ``` + + If there is no `stdout` name provided, a random filename will be created. + For example, the following + ``` + outputs: + an_output_name: + type: stdout + ``` + is equivalent to + ``` + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + + stdout: random_stdout_filenameABCDEFG + ``` + + +- name: stderr + type: enum + symbols: [ "cwl:stderr" ] + docParent: "#CommandOutputParameter" + doc: | + Only valid as a `type` for a `CommandLineTool` output with no + `outputBinding` set. + + The following + ``` + outputs: + an_output_name: + type: stderr + + stderr: a_stderr_file + ``` + is equivalent to + ``` + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + + stderr: a_stderr_file + ``` + + If there is no `stderr` name provided, a random filename will be created. + For example, the following + ``` + outputs: + an_output_name: + type: stderr + ``` + is equivalent to + ``` + outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + + stderr: random_stderr_filenameABCDEFG + ``` + + +- type: record + name: CommandLineTool + extends: Process + documentRoot: true + specialize: + - specializeFrom: InputParameter + specializeTo: CommandInputParameter + - specializeFrom: OutputParameter + specializeTo: CommandOutputParameter + doc: | + This defines the schema of the CWL Command Line Tool Description document. + + fields: + - name: class + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + type: string + - name: baseCommand + doc: | + Specifies the program to execute. If an array, the first element of + the array is the command to execute, and subsequent elements are + mandatory command line arguments. The elements in `baseCommand` must + appear before any command line bindings from `inputBinding` or + `arguments`. + + If `baseCommand` is not provided or is an empty array, the first + element of the command line produced after processing `inputBinding` or + `arguments` must be used as the program to execute. + + If the program includes a path separator character it must + be an absolute path, otherwise it is an error. If the program does not + include a path separator, search the `$PATH` variable in the runtime + environment of the workflow runner find the absolute path of the + executable. + type: + - string? + - string[]? + jsonldPredicate: + "_id": "cwl:baseCommand" + "_container": "@list" + - name: arguments + doc: | + Command line bindings which are not directly associated with input + parameters. If the value is a string, it is used as a string literal + argument. If it is an Expression, the result of the evaluation is used + as an argument. + type: + - "null" + - type: array + items: [string, Expression, CommandLineBinding] + jsonldPredicate: + "_id": "cwl:arguments" + "_container": "@list" + - name: stdin + type: ["null", string, Expression] + jsonldPredicate: "https://w3id.org/cwl/cwl#stdin" + doc: | + A path to a file whose contents must be piped into the command's + standard input stream. + - name: stderr + type: ["null", string, Expression] + jsonldPredicate: "https://w3id.org/cwl/cwl#stderr" + doc: | + Capture the command's standard error stream to a file written to + the designated output directory. + + If `stderr` is a string, it specifies the file name to use. + + If `stderr` is an expression, the expression is evaluated and must + return a string with the file name to use to capture stderr. If the + return value is not a string, or the resulting path contains illegal + characters (such as the path separator `/`) it is an error. + - name: stdout + type: ["null", string, Expression] + jsonldPredicate: "https://w3id.org/cwl/cwl#stdout" + doc: | + Capture the command's standard output stream to a file written to + the designated output directory. + + If `stdout` is a string, it specifies the file name to use. + + If `stdout` is an expression, the expression is evaluated and must + return a string with the file name to use to capture stdout. If the + return value is not a string, or the resulting path contains illegal + characters (such as the path separator `/`) it is an error. + - name: successCodes + type: int[]? + doc: | + Exit codes that indicate the process completed successfully. + + - name: temporaryFailCodes + type: int[]? + doc: | + Exit codes that indicate the process failed due to a possibly + temporary condition, where executing the process with the same + runtime environment and inputs may produce different results. + + - name: permanentFailCodes + type: int[]? + doc: + Exit codes that indicate the process failed due to a permanent logic + error, where executing the process with the same runtime environment and + same inputs is expected to always fail. + + +- type: record + name: DockerRequirement + extends: ProcessRequirement + doc: | + Indicates that a workflow component should be run in a + [Docker](http://docker.com) or Docker-compatible (such as + [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and + specifies how to fetch or build the image. + + If a CommandLineTool lists `DockerRequirement` under + `hints` (or `requirements`), it may (or must) be run in the specified Docker + container. + + The platform must first acquire or install the correct Docker image as + specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + + The platform must execute the tool in the container using `docker run` with + the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output + directory through the use of volume bind mounts. The platform should rewrite + file paths in the input object to correspond to the Docker bind mounted + locations. That is, the platform should rewrite values in the parameter context + such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths + within the container. The platform must ensure that `runtime.outdir` and + `runtime.tmpdir` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not + assume anything about the contents of the Docker container, such as the + presence or absence of specific software, except to assume that the + generated command line represents a valid command within the runtime + environment of the container. + + A container image may specify an + [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) + and/or + [CMD](https://docs.docker.com/engine/reference/builder/#cmd). + Command line arguments will be appended after all elements of + ENTRYPOINT, and will override all elements specified using CMD (in + other words, CMD is only used when the CommandLineTool definition + produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility + concerns of the implicit hidden execution point (For further discussion, see + [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable + CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. + CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the + `requirements` section. + + ## Interaction with other requirements + + If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a + DockerRequirement, the environment variables must be provided to Docker + using `--env` or `--env-file` and interact with the container's preexisting + environment as defined by Docker. + + fields: + - name: class + type: string + doc: "Always 'DockerRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: dockerPull + type: string? + doc: | + Specify a Docker image to retrieve using `docker pull`. Can contain the + immutable digest to ensure an exact container is used: + `dockerPull: ubuntu@sha256:45b23dee08af5e43a7fea6c4cf9c25ccf269ee113168c19722f87876677c5cb2` + - name: dockerLoad + type: string? + doc: "Specify a HTTP URL from which to download a Docker image using `docker load`." + - name: dockerFile + type: string? + doc: "Supply the contents of a Dockerfile which will be built using `docker build`." + - name: dockerImport + type: string? + doc: "Provide HTTP URL to download and gunzip a Docker images using `docker import." + - name: dockerImageId + type: string? + doc: | + The image id that will be used for `docker run`. May be a + human-readable image name or the image identifier hash. May be skipped + if `dockerPull` is specified, in which case the `dockerPull` image id + must be used. + - name: dockerOutputDirectory + type: string? + doc: | + Set the designated output directory to a specific location inside the + Docker container. + + +- type: record + name: SoftwareRequirement + extends: ProcessRequirement + doc: | + A list of software packages that should be configured in the environment of + the defined process. + fields: + - name: class + type: string + doc: "Always 'SoftwareRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: packages + type: SoftwarePackage[] + doc: "The list of software to be configured." + jsonldPredicate: + mapSubject: package + mapPredicate: specs + +- name: SoftwarePackage + type: record + fields: + - name: package + type: string + doc: | + The name of the software to be made available. If the name is + common, inconsistent, or otherwise ambiguous it should be combined with + one or more identifiers in the `specs` field. + - name: version + type: string[]? + doc: | + The (optional) versions of the software that are known to be + compatible. + - name: specs + type: string[]? + jsonldPredicate: {_type: "@id", noLinkCheck: true} + doc: | + One or more [IRI](https://en.wikipedia.org/wiki/Internationalized_Resource_Identifier)s + identifying resources for installing or enabling the software named in + the `package` field. Implementations may provide resolvers which map + these software identifer IRIs to some configuration action; or they can + use only the name from the `package` field on a best effort basis. + + For example, the IRI https://packages.debian.org/bowtie could + be resolved with `apt-get install bowtie`. The IRI + https://anaconda.org/bioconda/bowtie could be resolved with `conda + install -c bioconda bowtie`. + + IRIs can also be system independent and used to map to a specific + software installation or selection mechanism. + Using [RRID](https://www.identifiers.org/rrid/) as an example: + https://identifiers.org/rrid/RRID:SCR_005476 + could be fulfilled using the above mentioned Debian or bioconda + package, a local installation managed by [Environement Modules](http://modules.sourceforge.net/), + or any other mechanism the platform chooses. IRIs can also be from + identifer sources that are discipline specific yet still system + independent. As an example, the equivalent [ELIXIR Tools and Data + Service Registry](https://bio.tools) IRI to the previous RRID example is + https://bio.tools/tool/bowtie2/version/2.2.8. + If supported by a given registry, implementations are encouraged to + query these system independent sofware identifier IRIs directly for + links to packaging systems. + + A site specific IRI can be listed as well. For example, an academic + computing cluster using Environement Modules could list the IRI + `https://hpc.example.edu/modules/bowtie-tbb/1.22` to indicate that + `module load bowtie-tbb/1.1.2` should be executed to make available + `bowtie` version 1.1.2 compiled with the TBB library prior to running + the accompanying Workflow or CommandLineTool. Note that the example IRI + is specific to a particular institution and computing environment as + the Environment Modules system does not have a common namespace or + standardized naming convention. + + This last example is the least portable and should only be used if + mechanisms based off of the `package` field or more generic IRIs are + unavailable or unsuitable. While harmless to other sites, site specific + software IRIs should be left out of shared CWL descriptions to avoid + clutter. + +- name: Dirent + type: record + doc: | + Define a file or subdirectory that must be placed in the designated output + directory prior to executing the command line tool. May be the result of + executing an expression, such as building a configuration file from a + template. + fields: + - name: entryname + type: ["null", string, Expression] + jsonldPredicate: + _id: cwl:entryname + doc: | + The name of the file or subdirectory to create in the output directory. + If `entry` is a File or Directory, the `entryname` field overrides the value + of `basename` of the File or Directory object. Optional. + - name: entry + type: [string, Expression] + jsonldPredicate: + _id: cwl:entry + doc: | + If the value is a string literal or an expression which evaluates to a + string, a new file must be created with the string as the file contents. + + If the value is an expression that evaluates to a `File` object, this + indicates the referenced file should be added to the designated output + directory prior to executing the tool. + + If the value is an expression that evaluates to a `Dirent` object, this + indicates that the File or Directory in `entry` should be added to the + designated output directory with the name in `entryname`. + + If `writable` is false, the file may be made available using a bind + mount or file system link to avoid unnecessary copying of the input + file. + - name: writable + type: boolean? + default: false + doc: | + If true, the file or directory must be writable by the tool. Changes + to the file or directory must be isolated and not visible by any other + CommandLineTool process. This may be implemented by making a copy of + the original file or directory. Default false (files and directories + read-only by default). + + A directory marked as `writable: true` implies that all files and + subdirectories are recursively writable as well. + + +- name: InitialWorkDirRequirement + type: record + extends: ProcessRequirement + doc: + Define a list of files and subdirectories that must be created by the + workflow platform in the designated output directory prior to executing the + command line tool. + fields: + - name: class + type: string + doc: InitialWorkDirRequirement + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: listing + type: + - type: array + items: + - "null" + - File + - type: array + items: + - File + - Directory + - Directory + - Dirent + - Expression + - Expression + jsonldPredicate: + _id: "cwl:listing" + doc: | + The list of files or subdirectories that must be placed in the + designated output directory prior to executing the command line tool. + + May be an expression. If so, the expression return value must validate as + `{type: array, items: ["null", File, File[], Directory, Directory[], Dirent]}`. + + Files or Directories which are listed in the input parameters and + appear in the `InitialWorkDirRequirement` listing must have their + `path` set to their staged location in the designated output directory. + If the same File or Directory appears more than once in the + `InitialWorkDirRequirement` listing, the implementation must choose + exactly one value for `path`; how this value is chosen is undefined. + + +- name: EnvVarRequirement + type: record + extends: ProcessRequirement + doc: | + Define a list of environment variables which will be set in the + execution environment of the tool. See `EnvironmentDef` for details. + fields: + - name: class + type: string + doc: "Always 'EnvVarRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: envDef + type: EnvironmentDef[] + doc: The list of environment variables. + jsonldPredicate: + mapSubject: envName + mapPredicate: envValue + + +- type: record + name: ShellCommandRequirement + extends: ProcessRequirement + doc: | + Modify the behavior of CommandLineTool to generate a single string + containing a shell command line. Each item in the argument list must be + joined into a string separated by single spaces and quoted to prevent + intepretation by the shell, unless `CommandLineBinding` for that argument + contains `shellQuote: false`. If `shellQuote: false` is specified, the + argument is joined into the command string without quoting, which allows + the use of shell metacharacters such as `|` for pipes. + fields: + - name: class + type: string + doc: "Always 'ShellCommandRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + + +- type: record + name: ResourceRequirement + extends: ProcessRequirement + doc: | + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to schedule + a job. If "min" cannot be satisfied, the job should not be run. + + "max" is the maximum amount of a resource that the job shall be permitted + to use. If a node has sufficient resources, multiple jobs may be scheduled + on a single node provided each job's "max" resource requirements are + met. If a job attempts to exceed its "max" resource allocation, an + implementation may deny additional resources, which may result in job + failure. + + If "min" is specified but "max" is not, then "max" == "min" + If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. + + fields: + - name: class + type: string + doc: "Always 'ResourceRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: coresMin + type: ["null", long, Expression] + doc: Minimum reserved number of CPU cores (default is 1) + + - name: coresMax + type: ["null", int, Expression] + doc: Maximum reserved number of CPU cores + + - name: ramMin + type: ["null", long, Expression] + doc: Minimum reserved RAM in mebibytes (2**20) (default is 256) + + - name: ramMax + type: ["null", long, Expression] + doc: Maximum reserved RAM in mebibytes (2**20) + + - name: tmpdirMin + type: ["null", long, Expression] + doc: Minimum reserved filesystem based storage for the designated temporary directory, in mebibytes (2**20) (default is 1024) + + - name: tmpdirMax + type: ["null", long, Expression] + doc: Maximum reserved filesystem based storage for the designated temporary directory, in mebibytes (2**20) + + - name: outdirMin + type: ["null", long, Expression] + doc: Minimum reserved filesystem based storage for the designated output directory, in mebibytes (2**20) (default is 1024) + + - name: outdirMax + type: ["null", long, Expression] + doc: Maximum reserved filesystem based storage for the designated output directory, in mebibytes (2**20) + + +- type: record + name: WorkReuse + extends: ProcessRequirement + doc: | + For implementations that support reusing output from past work (on + the assumption that same code and same input produce same + results), control whether to enable or disable the reuse behavior + for a particular tool or step (to accomodate situations where that + assumption is incorrect). A reused step is not executed but + instead returns the same output as the original execution. + + If `enableReuse` is not specified, correct tools should assume it + is enabled by default. + fields: + - name: class + type: string + doc: "Always 'WorkReuse'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: enableReuse + type: [boolean, Expression] + default: true + + +- type: record + name: NetworkAccess + extends: ProcessRequirement + doc: | + Indicate whether a process requires outgoing IPv4/IPv6 network + access. Choice of IPv4 or IPv6 is implementation and site + specific, correct tools must support both. + + If `networkAccess` is false or not specified, tools must not + assume network access, except for localhost (the loopback device). + + If `networkAccess` is true, the tool must be able to make outgoing + connections to network resources. Resources may be on a private + subnet or the public Internet. However, implementations and sites + may apply their own security policies to restrict what is + accessible by the tool. + + Enabling network access does not imply a publically routable IP + address or the ability to accept inbound connections. + + fields: + - name: class + type: string + doc: "Always 'NetworkAccess'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: networkAccess + type: [boolean, Expression] + +- name: InplaceUpdateRequirement + type: record + extends: cwl:ProcessRequirement + doc: | + + If `inplaceUpdate` is true, then an implementation supporting this + feature may permit tools to directly update files with `writable: + true` in InitialWorkDirRequirement. That is, as an optimization, + files may be destructively modified in place as opposed to copied + and updated. + + An implementation must ensure that only one workflow step may + access a writable file at a time. It is an error if a file which + is writable by one workflow step file is accessed (for reading or + writing) by any other workflow step running independently. + However, a file which has been updated in a previous completed + step may be used as input to multiple steps, provided it is + read-only in every step. + + Workflow steps which modify a file must produce the modified file + as output. Downstream steps which futher process the file must + use the output of previous steps, and not refer to a common input + (this is necessary for both ordering and correctness). + + Workflow authors should provide this in the `hints` section. The + intent of this feature is that workflows produce the same results + whether or not InplaceUpdateRequirement is supported by the + implementation, and this feature is primarily available as an + optimization for particular environments. + + Users and implementers should be aware that workflows that + destructively modify inputs may not be repeatable or reproducible. + In particular, enabling this feature implies that WorkReuse should + not be enabled. + + fields: + class: + type: string + doc: "Always 'InplaceUpdateRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + inplaceUpdate: + type: boolean + +- type: record + name: ToolTimeLimit + extends: ProcessRequirement + doc: | + Set an upper limit on the execution time of a CommandLineTool. + A CommandLineTool whose execution duration exceeds the time + limit may be preemptively terminated and considered failed. + May also be used by batch systems to make scheduling decisions. + The execution duration excludes external operations, such as + staging of files, pulling a docker image etc, and only counts + wall-time for the execution of the command line itself. + fields: + - name: class + type: string + doc: "Always 'ToolTimeLimit'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: timelimit + type: [long, Expression] + doc: | + The time limit, in seconds. A time limit of zero means no + time limit. Negative time limits are an error. diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/CommonWorkflowLanguage.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/CommonWorkflowLanguage.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/CommonWorkflowLanguage.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/CommonWorkflowLanguage.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,12 @@ +saladVersion: v1.1 +$base: "https://w3id.org/cwl/cwl#" + +$namespaces: + cwl: "https://w3id.org/cwl/cwl#" + sld: "https://w3id.org/cwl/salad#" + +$graph: + +- $import: Process.yml +- $import: CommandLineTool.yml +- $import: Workflow.yml diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/concepts.md cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/concepts.md --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/concepts.md 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/concepts.md 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,599 @@ +## References to other specifications + +**Javascript Object Notation (JSON)**: http://json.org + +**JSON Linked Data (JSON-LD)**: http://json-ld.org + +**YAML**: http://yaml.org + +**Avro**: https://avro.apache.org/docs/1.8.1/spec.html + +**Uniform Resource Identifier (URI) Generic Syntax**: https://tools.ietf.org/html/rfc3986) + +**Internationalized Resource Identifiers (IRIs)**: +https://tools.ietf.org/html/rfc3987 + +**Portable Operating System Interface (POSIX.1-2008)**: http://pubs.opengroup.org/onlinepubs/9699919799/ + +**Resource Description Framework (RDF)**: http://www.w3.org/RDF/ + +**XDG Base Directory Specification**: https://specifications.freedesktop.org/basedir-spec/basedir-spec-0.6.html + + +## Scope + +This document describes CWL syntax, execution, and object model. It +is not intended to document a CWL specific implementation, however it may +serve as a reference for the behavior of conforming implementations. + +## Terminology + +The terminology used to describe CWL documents is defined in the +Concepts section of the specification. The terms defined in the +following list are used in building those definitions and in describing the +actions of a CWL implementation: + +**may**: Conforming CWL documents and CWL implementations are permitted but +not required to behave as described. + +**must**: Conforming CWL documents and CWL implementations are required to behave +as described; otherwise they are in error. + +**error**: A violation of the rules of this specification; results are +undefined. Conforming implementations may detect and report an error and may +recover from it. + +**fatal error**: A violation of the rules of this specification; results are +undefined. Conforming implementations must not continue to execute the current +process and may report an error. + +**at user option**: Conforming software may or must (depending on the modal verb in +the sentence) behave as described; if it does, it must provide users a means to +enable or disable the behavior described. + +**deprecated**: Conforming software may implement a behavior for backwards +compatibility. Portable CWL documents should not rely on deprecated behavior. +Behavior marked as deprecated may be removed entirely from future revisions of +the CWL specification. + +# Data model + +## Data concepts + +An **object** is a data structure equivalent to the "object" type in JSON, +consisting of a unordered set of name/value pairs (referred to here as +**fields**) and where the name is a string and the value is a string, number, +boolean, array, or object. + +A **document** is a file containing a serialized object, or an array of objects. + +A **process** is a basic unit of computation which accepts input data, +performs some computation, and produces output data. Examples include +CommandLineTools, Workflows, and ExpressionTools. + +An **input object** is an object describing the inputs to an invocation of +a process. + +An **output object** is an object describing the output resulting from an +invocation of a process. + +An **input schema** describes the valid format (required fields, data types) +for an input object. + +An **output schema** describes the valid format for an output object. + +**Metadata** is information about workflows, tools, or input items. + +## Syntax + +CWL documents must consist of an object or array of objects represented using +JSON or YAML syntax. Upon loading, a CWL implementation must apply the +preprocessing steps described in the +[Semantic Annotations for Linked Avro Data (SALAD) Specification](SchemaSalad.html). +An implementation may formally validate the structure of a CWL document using +SALAD schemas located at https://github.com/common-workflow-language/cwl-v1.1/ + +### map + +Note: This section is non-normative. +> type: array<ComplexType> | +> map<`key_field`, ComplexType> + +The above syntax in the CWL specifications means there are two or more ways to write the given value. + +Option one is a array and is the most verbose option. + +Option one generic example: +``` +some_cwl_field: + - key_field: a_complex_type1 + field2: foo + field3: bar + - key_field: a_complex_type2 + field2: foo2 + field3: bar2 + - key_field: a_complex_type3 +``` + +Option one specific example using [Workflow](Workflow.html#Workflow).[inputs](Workflow.html#WorkflowInputParameter): +> array<InputParameter> | +> map<`id`, `type` | InputParameter> + + +``` +inputs: + - id: workflow_input01 + type: string + - id: workflow_input02 + type: File + format: http://edamontology.org/format_2572 +``` + +Option two is enabled by the `map<…>` syntax. Instead of an array of entries we +use a mapping, where one field of the `ComplexType` (here named `key_field`) +becomes the key in the map, and its value is the rest of the `ComplexType` +without the key field. If all of the other fields of the `ComplexType` are +optional and unneeded, then we can indicate this with an empty mapping as the +value: `a_complex_type3: {}` + +Option two generic example: +``` +some_cwl_field: + a_complex_type1: # this was the "key_field" from above + field2: foo + field3: bar + a_complex_type2: + field2: foo2 + field3: bar2 + a_complex_type3: {} # we accept the defualt values for "field2" and "field3" +``` + +Option two specific example using [Workflow](Workflow.html#Workflow).[inputs](Workflow.html#WorkflowInputParameter): +> array<InputParameter> | +> map<`id`, `type` | InputParameter> + + +``` +inputs: + workflow_input01: + type: string + workflow_input02: + type: File + format: http://edamontology.org/format_2572 +``` + +Option two specific example using [SoftwareRequirement](#SoftwareRequirement).[packages](#SoftwarePackage): +> array<SoftwarePackage> | +> map<`package`, `specs` | SoftwarePackage> + + +``` +hints: + SoftwareRequirement: + packages: + sourmash: + specs: [ https://doi.org/10.21105/joss.00027 ] + screed: + version: [ "1.0" ] + python: {} +``` +` +Sometimes we have a third and even more compact option denoted like this: +> type: array<ComplexType> | +> map<`key_field`, `field2` | ComplexType> + +For this example, if we only need the `key_field` and `field2` when specifying +our `ComplexType`s (because the other fields are optional and we are fine with +their default values) then we can abbreviate. + +Option three generic example: +``` +some_cwl_field: + a_complex_type1: foo # we accept the default value for field3 + a_complex_type2: foo2 # we accept the default value for field3 + a_complex_type3: {} # we accept the default values for "field2" and "field3" +``` + +Option three specific example using [Workflow](Workflow.html#Workflow).[inputs](Workflow.html#WorkflowInputParameter): +> array<InputParameter> | +> map<`id`, `type` | InputParameter> + + +``` +inputs: + workflow_input01: string + workflow_input02: File # we accept the default of no File format +``` + +Option three specific example using [SoftwareRequirement](#SoftwareRequirement).[packages](#SoftwarePackage): +> array<SoftwarePackage> | +> map<`package`, `specs` | SoftwarePackage> + + +``` +hints: + SoftwareRequirement: + packages: + sourmash: [ https://doi.org/10.21105/joss.00027 ] + python: {} +``` + + +What if some entries we want to mix the option 2 and 3? You can! + +Mixed option 2 and 3 generic example: +``` +some_cwl_field: + my_complex_type1: foo # we accept the default value for field3 + my_complex_type2: + field2: foo2 + field3: bar2 # we did not accept the default value for field3 + # so we had to use the slightly expanded syntax + my_complex_type3: {} # as before, we accept the default values for both + # "field2" and "field3" +``` + +Mixed option 2 and 3 specific example using [Workflow](Workflow.html#Workflow).[inputs](Workflow.html#WorkflowInputParameter): +> array<InputParameter> | +> map<`id`, `type` | InputParameter> + + +``` +inputs: + workflow_input01: string + workflow_input02: # we use the longer way + type: File # because we want to specify the "format" too + format: http://edamontology.org/format_2572 +``` + +Mixed option 2 and 3 specific example using [SoftwareRequirement](#SoftwareRequirement).[packages](#SoftwarePackage): +> array<SoftwarePackage> | +> map<`package`, `specs` | SoftwarePackage> + + +``` +hints: + SoftwareRequirement: + packages: + sourmash: [ https://doi.org/10.21105/joss.00027 ] + screed: + specs: [ https://github.com/dib-lab/screed ] + version: [ "1.0" ] + python: {} +``` + +Note: The `map<…>` (compact) versions are optional for users, the verbose option #1 is +always allowed, but for presentation reasons option 3 and 2 may be preferred +by human readers. Consumers of CWL must support all three options. + +The normative explanation for these variations, aimed at implementors, is in the +[Schema Salad specification](SchemaSalad.html#Identifier_maps). + +## Identifiers + +If an object contains an `id` field, that is used to uniquely identify the +object in that document. The value of the `id` field must be unique over the +entire document. Identifiers may be resolved relative to either the document +base and/or other identifiers following the rules are described in the +[Schema Salad specification](SchemaSalad.html#Identifier_resolution). + +An implementation may choose to only honor references to object types for +which the `id` field is explicitly listed in this specification. + +## Document preprocessing + +An implementation must resolve [$import](SchemaSalad.html#Import) and +[$include](SchemaSalad.html#Import) directives as described in the +[Schema Salad specification](SchemaSalad.html). + +Another transformation defined in Schema salad is simplification of data type definitions. +Type `` ending with `?` should be transformed to `[, "null"]`. +Type `` ending with `[]` should be transformed to `{"type": "array", "items": }` + +## Extensions and metadata + +Input metadata (for example, a sample identifier) may be represented within +a tool or workflow using input parameters which are explicitly propagated to +output. Future versions of this specification may define additional facilities +for working with input/output metadata. + +Implementation extensions not required for correct execution (for example, +fields related to GUI presentation) and metadata about the tool or workflow +itself (for example, authorship for use in citations) may be provided as +additional fields on any object. Such extensions fields must use a namespace +prefix listed in the `$namespaces` section of the document as described in the +[Schema Salad specification](SchemaSalad.html#Explicit_context). + +It is recommended that concepts from schema.org are used whenever possible. +For the `$schema` field we recommend their RDF encoding: http://schema.org/version/latest/schema.rdf + +Implementation extensions which modify execution semantics must be [listed in +the `requirements` field](#Requirements_and_hints). + +# Execution model + +## Execution concepts + +A **parameter** is a named symbolic input or output of process, with an +associated datatype or schema. During execution, values are assigned to +parameters to make the input object or output object used for concrete +process invocation. + +A **CommandLineTool** is a process characterized by the execution of a +standalone, non-interactive program which is invoked on some input, +produces output, and then terminates. + +A **workflow** is a process characterized by multiple subprocess steps, +where step outputs are connected to the inputs of downstream steps to +form a directed acylic graph, and independent steps may run concurrently. + +A **runtime environment** is the actual hardware and software environment when +executing a command line tool. It includes, but is not limited to, the +hardware architecture, hardware resources, operating system, software runtime +(if applicable, such as the specific Python interpreter or the specific Java +virtual machine), libraries, modules, packages, utilities, and data files +required to run the tool. + +A **workflow platform** is a specific hardware and software implementation +capable of interpreting CWL documents and executing the processes specified by +the document. The responsibilities of the workflow platform may include +scheduling process invocation, setting up the necessary runtime environment, +making input data available, invoking the tool process, and collecting output. + +A workflow platform may choose to only implement the Command Line Tool +Description part of the CWL specification. + +It is intended that the workflow platform has broad leeway outside of this +specification to optimize use of computing resources and enforce policies +not covered by this specification. Some areas that are currently out of +scope for CWL specification but may be handled by a specific workflow +platform include: + +* Data security and permissions +* Scheduling tool invocations on remote cluster or cloud compute nodes. +* Using virtual machines or operating system containers to manage the runtime +(except as described in [DockerRequirement](CommandLineTool.html#DockerRequirement)). +* Using remote or distributed file systems to manage input and output files. +* Transforming file paths. +* Pausing, resuming or checkpointing processes or workflows. + +Conforming CWL processes must not assume anything about the runtime +environment or workflow platform unless explicitly declared though the use +of [process requirements](#Requirements_and_hints). + +## Generic execution process + +The generic execution sequence of a CWL process (including workflows and +command line line tools) is as follows. + +1. Load input object. +1. Load, process and validate a CWL document, yielding one or more process objects. +The [`$namespaces`](SchemaSalad.html#Explicit_context) present in the CWL document +are also used when validating and processing the input object. +1. If there are multiple process objects (due to [`$graph`](SchemaSalad.html#Document_graph)) +and which process object to start with is not specified in the input object (via +a [`cwl:tool`](#Executing_CWL_documents_as_scripts) entry) or by any other means +(like a URL fragment) then choose the process with the `id` of "#main" or "main". +1. Validate the input object against the `inputs` schema for the process. +1. Validate process requirements are met. +1. Perform any further setup required by the specific process type. +1. Execute the process. +1. Capture results of process execution into the output object. +1. Validate the output object against the `outputs` schema for the process. +1. Report the output object to the process caller. + +## Requirements and hints + +A **process requirement** modifies the semantics or runtime +environment of a process. If an implementation cannot satisfy all +requirements, or a requirement is listed which is not recognized by the +implementation, it is a fatal error and the implementation must not attempt +to run the process, unless overridden at user option. + +A **hint** is similar to a requirement; however, it is not an error if an +implementation cannot satisfy all hints. The implementation may report a +warning if a hint cannot be satisfied. + +Optionally, implementations may allow requirements to be specified in the input +object document as an array of requirements under the field name +`cwl:requirements`. If implementations allow this, then such requirements +should be combined with any requirements present in the corresponding Process +as if they were specified there. + +Requirements specified in a parent Workflow are inherited by step processes +if they are valid for that step. If the substep is a CommandLineTool +only the `InlineJavascriptRequirement`, `SchemaDefRequirement`, `DockerRequirement`, +`SoftwareRequirement`, `InitialWorkDirRequirement`, `EnvVarRequirement`, +`ShellCommandRequirement`, `ResourceRequirement` are valid. + +*As good practice, it is best to have process requirements be self-contained, +such that each process can run successfully by itself.* + +If the same process requirement appears at different levels of the +workflow, the most specific instance of the requirement is used, that is, +an entry in `requirements` on a process implementation such as +CommandLineTool will take precedence over an entry in `requirements` +specified in a workflow step, and an entry in `requirements` on a workflow +step takes precedence over the workflow. Entries in `hints` are resolved +the same way. + +Requirements override hints. If a process implementation provides a +process requirement in `hints` which is also provided in `requirements` by +an enclosing workflow or workflow step, the enclosing `requirements` takes +precedence. + +## Parameter references + +Parameter references are denoted by the syntax `$(...)` and may be used in any +field permitting the pseudo-type `Expression`, as specified by this document. +Conforming implementations must support parameter references. Parameter +references use the following subset of +[Javascript/ECMAScript 5.1](http://www.ecma-international.org/ecma-262/5.1/) +syntax, but they are designed to not require a Javascript engine for evaluation. + +In the following [BNF +grammar](https://en.wikipedia.org/wiki/Backus%E2%80%93Naur_Form), character +classes, and grammar rules are denoted in '{}', '-' denotes exclusion from a +character class, '(())' denotes grouping, '|' denotes alternates, trailing +'*' denotes zero or more repeats, '+' denote one or more repeats, '/' escapes +these special characters, and all other characters are literal values. + +

+ + + + + + + +
symbol:: {Unicode alphanumeric}+
singleq:: [' (( {character - '} | \' ))* ']
doubleq:: [" (( {character - "} | \" ))* "]
index:: [ {decimal digit}+ ]
segment:: . {symbol} | {singleq} | {doubleq} | {index}
parameter reference::$( {symbol} {segment}*)
+

+ +Use the following algorithm to resolve a parameter reference: + + 1. Match the leading symbol as the key + 2. Look up the key in the parameter context (described below) to get the current value. + It is an error if the key is not found in the parameter context. + 3. If there are no subsequent segments, terminate and return current value + 4. Else, match the next segment + 5. Extract the symbol, string, or index from the segment as the key + 6. Look up the key in current value and assign as new current value. If + the key is a symbol or string, the current value must be an object. + If the key is an index, the current value must be an array or string. + It is an error if the key does not match the required type, or the key is not found or out + of range. + 7. Repeat steps 3-6 + +The root namespace is the parameter context. The following parameters must +be provided: + + * `inputs`: The input object to the current Process. + * `self`: A context-specific value. The contextual values for 'self' are + documented for specific fields elsewhere in this specification. If + a contextual value of 'self' is not documented for a field, it + must be 'null'. + * `runtime`: An object containing configuration details. Specific to the + process type. An implementation may provide + opaque strings for any or all fields of `runtime`. These must be + filled in by the platform after processing the Tool but before actual + execution. Parameter references and expressions may only use the + literal string value of the field and must not perform computation on + the contents, except where noted otherwise. + +If the value of a field has no leading or trailing non-whitespace +characters around a parameter reference, the effective value of the field +becomes the value of the referenced parameter, preserving the return type. + +If the value of a field has non-whitespace leading or trailing characters +around a parameter reference, it is subject to string interpolation. The +effective value of the field is a string containing the leading characters, +followed by the string value of the parameter reference, followed by the +trailing characters. The string value of the parameter reference is its +textual JSON representation with the following rules: + + * Leading and trailing quotes are stripped from strings + * Objects entries are sorted by key + +Multiple parameter references may appear in a single field. This case +must be treated as a string interpolation. After interpolating the first +parameter reference, interpolation must be recursively applied to the +trailing characters to yield the final string value. + +## Expressions (Optional) + +An expression is a fragment of [Javascript/ECMAScript +5.1](http://www.ecma-international.org/ecma-262/5.1/) code evaluated by the +workflow platform to affect the inputs, outputs, or +behavior of a process. In the generic execution sequence, expressions may +be evaluated during step 5 (process setup), step 6 (execute process), +and/or step 7 (capture output). Expressions are distinct from regular +processes in that they are intended to modify the behavior of the workflow +itself rather than perform the primary work of the workflow. + +Expressions in CWL are an optional feature and are not required to be +implemented by all consumers of CWL documents. They should be used sparingly, +when there is no other way to achieve the desired outcome. Excessive use of +expressions may be a signal that other refactoring of the tools or workflows +would benefit the author, runtime, and users of the CWL document in question. + +To declare the use of expressions, the document must include the process +requirement `InlineJavascriptRequirement`. Expressions may be used in any +field permitting the pseudo-type `Expression`, as specified by this +document. + +Expressions are denoted by the syntax `$(...)` or `${...}`. A code +fragment wrapped in the `$(...)` syntax must be evaluated as a +[ECMAScript expression](http://www.ecma-international.org/ecma-262/5.1/#sec-11). A +code fragment wrapped in the `${...}` syntax must be evaluated as a +[ECMAScript function body](http://www.ecma-international.org/ecma-262/5.1/#sec-13) +for an anonymous, zero-argument function. Expressions must return a valid JSON +data type: one of null, string, number, boolean, array, object. Other return +values must result in a `permanentFailure`. Implementations must permit any +syntactically valid Javascript and account for nesting of parenthesis or braces +and that strings that may contain parenthesis or braces when scanning for +expressions. + +The runtime must include any code defined in the ["expressionLib" field of +InlineJavascriptRequirement](#InlineJavascriptRequirement) prior to +executing the actual expression. + +Before executing the expression, the runtime must initialize as global +variables the fields of the parameter context described above. + +The effective value of the field after expression evaluation follows the +same rules as parameter references discussed above. Multiple expressions +may appear in a single field. + +Expressions must be evaluated in an isolated context (a "sandbox") which +permits no side effects to leak outside the context. Expressions also must +be evaluated in [Javascript strict mode](http://www.ecma-international.org/ecma-262/5.1/#sec-4.2.2). + +The order in which expressions are evaluated is undefined except where +otherwise noted in this document. + +An implementation may choose to implement parameter references by +evaluating as a Javascript expression. The results of evaluating +parameter references must be identical whether implemented by Javascript +evaluation or some other means. + +Implementations may apply other limits, such as process isolation, timeouts, +and operating system containers/jails to minimize the security risks associated +with running untrusted code embedded in a CWL document. + +Javascript exceptions thrown from a CWL expression must result in a +`permanentFailure` of the CWL process. + +## Executing CWL documents as scripts + +By convention, a CWL document may begin with `#!/usr/bin/env cwl-runner` +and be marked as executable (the POSIX "+x" permission bits) to enable it +to be executed directly. A workflow platform may support this mode of +operation; if so, it must provide `cwl-runner` as an alias for the +platform's CWL implementation. + +A CWL input object document may similarly begin with `#!/usr/bin/env +cwl-runner` and be marked as executable. In this case, the input object +must include the field `cwl:tool` supplying an IRI to the default CWL +document that should be executed using the fields of the input object as +input parameters. + +The `cwl-runner` interface is required for conformance testing and is +documented in [cwl-runner.cwl](cwl-runner.cwl). + +## Discovering CWL documents on a local filesystem + +To discover CWL documents look in the following locations: + +For each value in the `XDG_DATA_DIRS` environment variable (which is a `:` colon +separated list), check the `./commonwl` subdirectory. If `XDG_DATA_DIRS` is +unset or empty, then check using the default value for `XDG_DATA_DIRS`: +`/usr/local/share/:/usr/share/` (That is to say, check `/usr/share/commonwl/` +and `/usr/local/share/commonwl/`) + +Then check `$XDG_DATA_HOME/commonwl/`. + +If the `XDG_DATA_HOME` environment variable is unset, its default value is +`$HOME/.local/share` (That is to say, check `$HOME/.local/share/commonwl`) + +`$XDG_DATA_HOME` and `$XDG_DATA_DIRS` are from the [XDG Base Directory +Specification](http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html) diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/contrib.md cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/contrib.md --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/contrib.md 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/contrib.md 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,28 @@ +Authors: + +* Peter Amstutz , Arvados Project, Veritas Genetics +* Michael R. Crusoe , Common Workflow Language + project + +Contributors: + +* Brad Chapman , Harvard Chan School of Public Health +* John Chilton , Galaxy Project, Pennsylvania State University +* Bogdan Gavrilovic , Seven Bridges Genomics +* Kaushik Ghose, Seven Bridges Genomics, Inc; https://orcid.org/0000-0003-2933-1260 +* Dan Leehr , Duke University +* Hervé Ménager , Institut Pasteur +* Maya Nedeljkovich , Seven Bridges Genomics +* Matt Scales , Institute of Cancer Research, London +* Stian Soiland-Reyes , University of Manchester +* Luka Stojanovic , Seven Bridges Genomics + +This specification was [approved on 6 June 2019](https://github.com/common-workflow-language/common-workflow-language/issues/861) by the CWL leadership team consisting of: + +* Peter Amstutz, Curoverse Inc. / Arvados; https://orcid.org/0000-0003-3566-7705 +* John Chilton, Pennsylvania State University / Galaxy Project; https://orcid.org/0000-0002-6794-0756 +* Michael R. Crusoe, CWL Project Lead; https://orcid.org/0000-0002-2961-9670 +* Brandi Davis Dusenbery, Seven Bridges Genomics, Inc.; https://orcid.org/0000-0001-7811-8613 +* Jeff Gentry, Broad Institute; https://orcid.org/0000-0001-5351-8442 +* Hervé Ménager, Institut Pasteur; https://orcid.org/0000-0002-7552-1009 +* Stian Soiland-Reyes, University of Manchester; https://orcid.org/0000-0001-9842-9718 diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/intro.md cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/intro.md --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/intro.md 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/intro.md 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,23 @@ +# Status of this document + +This document is the product of the [Common Workflow Language working +group](https://www.commonwl.org/). The +source for the latest stable version of this document is available in the "v1.0" directory at + +https://github.com/common-workflow-language/common-workflow-language + +The current development version is at https://github.com/common-workflow-language/cwl-v1.2/ + +The products of the CWL working group (including this document) are made available +under the terms of the Apache License, version 2.0. + + + +# Introduction + +The Common Workflow Language (CWL) working group is an informal, multi-vendor +working group consisting of various organizations and individuals that have an +interest in portability of data analysis workflows. The goal is to create +specifications like this one that enable data scientists to describe analysis +tools and workflows that are powerful, easy to use, portable, and support +reproducibility. diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/invocation.md cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/invocation.md --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/invocation.md 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/invocation.md 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,160 @@ +# Running a Command + +To accommodate the enormous variety in syntax and semantics for input, runtime +environment, invocation, and output of arbitrary programs, a CommandLineTool +defines an "input binding" that describes how to translate abstract input +parameters to an concrete program invocation, and an "output binding" that +describes how to generate output parameters from program output. + +## Input binding + +The tool command line is built by applying command line bindings to the +input object. Bindings are listed either as part of an [input +parameter](#CommandInputParameter) using the `inputBinding` field, or +separately using the `arguments` field of the CommandLineTool. + +The algorithm to build the command line is as follows. In this algorithm, +the sort key is a list consisting of one or more numeric or string +elements. Strings are sorted lexicographically based on UTF-8 encoding. + + 1. Collect `CommandLineBinding` objects from `arguments`. Assign a sorting + key `[position, i]` where `position` is + [`CommandLineBinding.position`](#CommandLineBinding) and `i` + is the index in the `arguments` list. + + 2. Collect `CommandLineBinding` objects from the `inputs` schema and + associate them with values from the input object. Where the input type + is a record, array, or map, recursively walk the schema and input object, + collecting nested `CommandLineBinding` objects and associating them with + values from the input object. + + 3. Create a sorting key by taking the value of the `position` field at + each level leading to each leaf binding object. If `position` is not + specified, it is not added to the sorting key. For bindings on arrays + and maps, the sorting key must include the array index or map key + following the position. If and only if two bindings have the same sort + key, the tie must be broken using the ordering of the field or parameter + name immediately containing the leaf binding. + + 4. Sort elements using the assigned sorting keys. Numeric entries sort + before strings. + + 5. In the sorted order, apply the rules defined in + [`CommandLineBinding`](#CommandLineBinding) to convert bindings to actual + command line elements. + + 6. Insert elements from `baseCommand` at the beginning of the command + line. + +## Runtime environment + +All files listed in the input object must be made available in the runtime +environment. The implementation may use a shared or distributed file +system or transfer files via explicit download to the host. Implementations +may choose not to provide access to files not explicitly specified in the input +object or process requirements. + +Output files produced by tool execution must be written to the +**designated output directory**. The initial current working +directory when executing the tool must be the designated output +directory. The designated output directory should be empty, except +for files or directories specified using +[InitialWorkDirRequirement](InitialWorkDirRequirement). + +Files may also be written to the **designated temporary directory**. This +directory must be isolated and not shared with other processes. Any files +written to the designated temporary directory may be automatically deleted by +the workflow platform immediately after the tool terminates. + +For compatibility, files may be written to the **system temporary directory** +which must be located at `/tmp`. Because the system temporary directory may be +shared with other processes on the system, files placed in the system temporary +directory are not guaranteed to be deleted automatically. A tool +must not use the system temporary directory as a backchannel communication with +other tools. It is valid for the system temporary directory to be the same as +the designated temporary directory. + +When executing the tool, the tool must execute in a new, empty environment +with only the environment variables described below; the child process must +not inherit environment variables from the parent process except as +specified or at user option. + + * `HOME` must be set to the designated output directory. + * `TMPDIR` must be set to the designated temporary directory. + * `PATH` may be inherited from the parent process, except when run in a + container that provides its own `PATH`. + * Variables defined by [EnvVarRequirement](#EnvVarRequirement) + * The default environment of the container, such as when using + [DockerRequirement](#DockerRequirement) + +An implementation may forbid the tool from writing to any location in the +runtime environment file system other than the designated temporary directory, +system temporary directory, and designated output directory. An implementation +may provide read-only input files, and disallow in-place update of input files. +The designated temporary directory, system temporary directory and designated +output directory may each reside on different mount points on different file +systems. + +An implementation may forbid the tool from directly accessing network +resources. Correct tools must not assume any network access unless they have +the 'networkAccess' field of a ['NetworkAccess'](#NetworkAccess) requirement set +to `true` but even then this does not imply a publically routable IP address or +the ability to accept inbound connections. + +The `runtime` section available in [parameter references](#Parameter_references) +and [expressions](#Expressions) contains the following fields. As noted +earlier, an implementation may perform deferred resolution of runtime fields by providing +opaque strings for any or all of the following fields; parameter references +and expressions may only use the literal string value of the field and must +not perform computation on the contents. + + * `runtime.outdir`: an absolute path to the designated output directory + * `runtime.tmpdir`: an absolute path to the designated temporary directory + * `runtime.cores`: number of CPU cores reserved for the tool process + * `runtime.ram`: amount of RAM in mebibytes (2\*\*20) reserved for the tool process + * `runtime.outdirSize`: reserved storage space available in the designated output directory + * `runtime.tmpdirSize`: reserved storage space available in the designated temporary directory + +For `cores`, `ram`, `outdirSize` and `tmpdirSize`, if an implementation can't +provide the actual number of reserved resources during the expression evaluation time, +it should report back the minimal requested amount. + +See [ResourceRequirement](#ResourceRequirement) for details on how to +describe the hardware resources required by a tool. + +The standard input stream, the standard output stream, and/or the standard error +stream may be redirected as described in the [`stdin`](#stdin), +[`stdout`](#stdout), and [`stderr`](#stderr) fields. + +## Execution + +Once the command line is built and the runtime environment is created, the +actual tool is executed. + +The standard error stream and standard output stream may be captured by +platform logging facilities for storage and reporting. + +Tools may be multithreaded or spawn child processes; however, when the +parent process exits, the tool is considered finished regardless of whether +any detached child processes are still running. Tools must not require any +kind of console, GUI, or web based user interaction in order to start and +run to completion. + +The exit code of the process indicates if the process completed +successfully. By convention, an exit code of zero is treated as success +and non-zero exit codes are treated as failure. This may be customized by +providing the fields `successCodes`, `temporaryFailCodes`, and +`permanentFailCodes`. An implementation may choose to default unspecified +non-zero exit codes to either `temporaryFailure` or `permanentFailure`. + +The exit code of the process is available to expressions in +`outputEval` as `runtime.exitCode`. + +## Output binding + +If the output directory contains a file named "cwl.output.json", that file +must be loaded and used as the output object. Otherwise, the output object +must be generated by walking the parameters listed in `outputs` and +applying output bindings to the tool output. Output bindings are +associated with output parameters using the `outputBinding` field. See +[`CommandOutputBinding`](#CommandOutputBinding) for details. diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/Operation.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/Operation.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/Operation.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/Operation.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,90 @@ +saladVersion: v1.1 +$base: "https://w3id.org/cwl/cwl#" + +$namespaces: + cwl: "https://w3id.org/cwl/cwl#" + +$graph: + +- name: OperationInputParameter + type: record + extends: InputParameter + docParent: "#Operation" + doc: | + Describe an input parameter of an operation. + fields: + - name: type + type: + - CWLType + - InputRecordSchema + - InputEnumSchema + - InputArraySchema + - string + - type: array + items: + - CWLType + - InputRecordSchema + - InputEnumSchema + - InputArraySchema + - string + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + +- name: OperationOutputParameter + type: record + extends: OutputParameter + docParent: "#Operation" + doc: | + Describe an output parameter of an operation. + fields: + - name: type + type: + - CWLType + - OutputRecordSchema + - OutputEnumSchema + - OutputArraySchema + - string + - type: array + items: + - CWLType + - OutputRecordSchema + - OutputEnumSchema + - OutputArraySchema + - string + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + +- type: record + name: Operation + extends: Process + documentRoot: true + specialize: + - specializeFrom: InputParameter + specializeTo: OperationInputParameter + - specializeFrom: OutputParameter + specializeTo: OperationOutputParameter + doc: | + This record describes an abstract operation. It is a potential + step of a workflow that has not yet been bound to a concrete + implementation. It specifies an input and output signature, but + does not provide enough information to be executed. An + implementation (or other tooling) may provide a means of binding + an Operation to a concrete process (such as Workflow, + CommandLineTool, or ExpressionTool) with a compatible signature. + + fields: + - name: class + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + type: string diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/Process.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/Process.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/Process.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/Process.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,959 @@ +saladVersion: v1.1 +$base: "https://w3id.org/cwl/cwl#" + +$namespaces: + cwl: "https://w3id.org/cwl/cwl#" + sld: "https://w3id.org/cwl/salad#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + +$graph: + +- name: "Common Workflow Language, v1.2.0-dev1" + type: documentation + doc: {$include: concepts.md} + +- $import: "salad/schema_salad/metaschema/metaschema_base.yml" + +- name: BaseTypesDoc + type: documentation + doc: | + ## Base types + docChild: + - "#CWLType" + - "#Process" + +- type: enum + name: CWLVersion + doc: "Version symbols for published CWL document versions." + symbols: + - cwl:draft-2 + - cwl:draft-3.dev1 + - cwl:draft-3.dev2 + - cwl:draft-3.dev3 + - cwl:draft-3.dev4 + - cwl:draft-3.dev5 + - cwl:draft-3 + - cwl:draft-4.dev1 + - cwl:draft-4.dev2 + - cwl:draft-4.dev3 + - cwl:v1.0.dev4 + - cwl:v1.0 + - cwl:v1.1.0-dev1 # a dash is required by the semver 2.0 rules + - cwl:v1.1 + - cwl:v1.2.0-dev1 + +- name: CWLType + type: enum + extends: "sld:PrimitiveType" + symbols: + - cwl:File + - cwl:Directory + doc: + - "Extends primitive types with the concept of a file and directory as a builtin type." + - "File: A File object" + - "Directory: A Directory object" + +- name: File + type: record + docParent: "#CWLType" + doc: | + Represents a file (or group of files when `secondaryFiles` is provided) that + will be accessible by tools using standard POSIX file system call API such as + open(2) and read(2). + + Files are represented as objects with `class` of `File`. File objects have + a number of properties that provide metadata about the file. + + The `location` property of a File is a URI that uniquely identifies the + file. Implementations must support the file:// URI scheme and may support + other schemes such as http://. The value of `location` may also be a + relative reference, in which case it must be resolved relative to the URI + of the document it appears in. Alternately to `location`, implementations + must also accept the `path` property on File, which must be a filesystem + path available on the same host as the CWL runner (for inputs) or the + runtime environment of a command line tool execution (for command line tool + outputs). + + If no `location` or `path` is specified, a file object must specify + `contents` with the UTF-8 text content of the file. This is a "file + literal". File literals do not correspond to external resources, but are + created on disk with `contents` with when needed for a executing a tool. + Where appropriate, expressions can return file literals to define new files + on a runtime. The maximum size of `contents` is 64 kilobytes. + + The `basename` property defines the filename on disk where the file is + staged. This may differ from the resource name. If not provided, + `basename` must be computed from the last path part of `location` and made + available to expressions. + + The `secondaryFiles` property is a list of File or Directory objects that + must be staged in the same directory as the primary file. It is an error + for file names to be duplicated in `secondaryFiles`. + + The `size` property is the size in bytes of the File. It must be computed + from the resource and made available to expressions. The `checksum` field + contains a cryptographic hash of the file content for use it verifying file + contents. Implementations may, at user option, enable or disable + computation of the `checksum` field for performance or other reasons. + However, the ability to compute output checksums is required to pass the + CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be + staged to an arbitrary directory, but must use the value of `basename` for + the filename. The `path` property must be file path in the context of the + tool execution runtime (local to the compute node, or within the executing + container). All computed properties should be available to expressions. + File literals also must be staged and `path` must be set. + + When collecting CommandLineTool outputs, `glob` matching returns file paths + (with the `path` property) and the derived properties. This can all be + modified by `outputEval`. Alternately, if the file `cwl.output.json` is + present in the output, `outputBinding` is ignored. + + File objects in the output must provide either a `location` URI or a `path` + property in the context of the tool execution runtime (local to the compute + node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via + `location` (the expression tool does not have access to files on disk so + `path` is meaningless) or as file literals. It is legal to return a file + object with an existing `location` but a different `basename`. The + `loadContents` field of ExpressionTool inputs behaves the same as on + CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + fields: + - name: class + type: + type: enum + name: File_class + symbols: + - cwl:File + jsonldPredicate: + _id: "@type" + _type: "@vocab" + doc: Must be `File` to indicate this object describes a file. + - name: location + type: string? + doc: | + An IRI that identifies the file resource. This may be a relative + reference, in which case it must be resolved using the base IRI of the + document. The location may refer to a local or remote resource; the + implementation must use the IRI to retrieve file content. If an + implementation is unable to retrieve the file content stored at a + remote resource (due to unsupported protocol, access denied, or other + issue) it must signal an error. + + If the `location` field is not provided, the `contents` field must be + provided. The implementation must assign a unique identifier for + the `location` field. + + If the `path` field is provided but the `location` field is not, an + implementation may assign the value of the `path` field to `location`, + then follow the rules above. + jsonldPredicate: + _id: "@id" + _type: "@id" + - name: path + type: string? + doc: | + The local host path where the File is available when a CommandLineTool is + executed. This field must be set by the implementation. The final + path component must match the value of `basename`. This field + must not be used in any other context. The command line tool being + executed must be able to to access the file at `path` using the POSIX + `open(2)` syscall. + + As a special case, if the `path` field is provided but the `location` + field is not, an implementation may assign the value of the `path` + field to `location`, and remove the `path` field. + + If the `path` contains [POSIX shell metacharacters](http://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_02) + (`|`,`&`, `;`, `<`, `>`, `(`,`)`, `$`,`` ` ``, `\`, `"`, `'`, + ``, ``, and ``) or characters + [not allowed](http://www.iana.org/assignments/idna-tables-6.3.0/idna-tables-6.3.0.xhtml) + for [Internationalized Domain Names for Applications](https://tools.ietf.org/html/rfc6452) + then implementations may terminate the process with a + `permanentFailure`. + jsonldPredicate: + "_id": "cwl:path" + "_type": "@id" + - name: basename + type: string? + doc: | + The base name of the file, that is, the name of the file without any + leading directory path. The base name must not contain a slash `/`. + + If not provided, the implementation must set this field based on the + `location` field by taking the final path component after parsing + `location` as an IRI. If `basename` is provided, it is not required to + match the value from `location`. + + When this file is made available to a CommandLineTool, it must be named + with `basename`, i.e. the final component of the `path` field must match + `basename`. + jsonldPredicate: "cwl:basename" + - name: dirname + type: string? + doc: | + The name of the directory containing file, that is, the path leading up + to the final slash in the path such that `dirname + '/' + basename == + path`. + + The implementation must set this field based on the value of `path` + prior to evaluating parameter references or expressions in a + CommandLineTool document. This field must not be used in any other + context. + - name: nameroot + type: string? + doc: | + The basename root such that `nameroot + nameext == basename`, and + `nameext` is empty or begins with a period and contains at most one + period. For the purposess of path splitting leading periods on the + basename are ignored; a basename of `.cshrc` will have a nameroot of + `.cshrc`. + + The implementation must set this field automatically based on the value + of `basename` prior to evaluating parameter references or expressions. + - name: nameext + type: string? + doc: | + The basename extension such that `nameroot + nameext == basename`, and + `nameext` is empty or begins with a period and contains at most one + period. Leading periods on the basename are ignored; a basename of + `.cshrc` will have an empty `nameext`. + + The implementation must set this field automatically based on the value + of `basename` prior to evaluating parameter references or expressions. + - name: checksum + type: string? + doc: | + Optional hash code for validating file integrity. Currently must be in the form + "sha1$ + hexadecimal string" using the SHA-1 algorithm. + - name: size + type: long? + doc: Optional file size + - name: "secondaryFiles" + type: + - "null" + - type: array + items: [File, Directory] + jsonldPredicate: + _id: "cwl:secondaryFiles" + secondaryFilesDSL: true + doc: | + A list of additional files or directories that are associated with the + primary file and must be transferred alongside the primary file. + Examples include indexes of the primary file, or external references + which must be included when loading primary document. A file object + listed in `secondaryFiles` may itself include `secondaryFiles` for + which the same rules apply. + - name: format + type: string? + jsonldPredicate: + _id: cwl:format + _type: "@id" + identity: true + doc: | + The format of the file: this must be an IRI of a concept node that + represents the file format, preferrably defined within an ontology. + If no ontology is available, file formats may be tested by exact match. + + Reasoning about format compatability must be done by checking that an + input file format is the same, `owl:equivalentClass` or + `rdfs:subClassOf` the format required by the input parameter. + `owl:equivalentClass` is transitive with `rdfs:subClassOf`, e.g. if + ` owl:equivalentClass ` and ` owl:subclassOf ` then infer + ` owl:subclassOf `. + + File format ontologies may be provided in the "$schema" metadata at the + root of the document. If no ontologies are specified in `$schema`, the + runtime may perform exact file format matches. + - name: contents + type: string? + doc: | + File contents literal. Maximum of 64 KiB. + + If neither `location` nor `path` is provided, `contents` must be + non-null. The implementation must assign a unique identifier for the + `location` field. When the file is staged as input to CommandLineTool, + the value of `contents` must be written to a file. + + If `loadContents` of `inputBinding` or `outputBinding` is true and + `location` is valid, the implementation must read up to the first 64 + KiB of text from the file and place it in the "contents" field. + + +- name: Directory + type: record + docAfter: "#File" + doc: | + Represents a directory to present to a command line tool. + + Directories are represented as objects with `class` of `Directory`. Directory objects have + a number of properties that provide metadata about the directory. + + The `location` property of a Directory is a URI that uniquely identifies + the directory. Implementations must support the file:// URI scheme and may + support other schemes such as http://. Alternately to `location`, + implementations must also accept the `path` property on Directory, which + must be a filesystem path available on the same host as the CWL runner (for + inputs) or the runtime environment of a command line tool execution (for + command line tool outputs). + + A Directory object may have a `listing` field. This is a list of File and + Directory objects that are contained in the Directory. For each entry in + `listing`, the `basename` property defines the name of the File or + Subdirectory when staged to disk. If `listing` is not provided, the + implementation must have some way of fetching the Directory listing at + runtime based on the `location` field. + + If a Directory does not have `location`, it is a Directory literal. A + Directory literal must provide `listing`. Directory literals must be + created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied + relationship in their `location`. For example, a Directory listing may + contain two files located on different hosts. It is the responsibility of + the runtime to ensure that those files are staged to disk appropriately. + Secondary files associated with files in `listing` must also be staged to + the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged + first and have local values of `path` assigend. + + Directory objects in CommandLineTool output must provide either a + `location` URI or a `path` property in the context of the tool execution + runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + Name conflicts (the same `basename` appearing multiple times in `listing` + or in any entry in `secondaryFiles` in the listing) is a fatal error. + + fields: + - name: class + type: + type: enum + name: Directory_class + symbols: + - cwl:Directory + jsonldPredicate: + _id: "@type" + _type: "@vocab" + doc: Must be `Directory` to indicate this object describes a Directory. + - name: location + type: string? + doc: | + An IRI that identifies the directory resource. This may be a relative + reference, in which case it must be resolved using the base IRI of the + document. The location may refer to a local or remote resource. If + the `listing` field is not set, the implementation must use the + location IRI to retrieve directory listing. If an implementation is + unable to retrieve the directory listing stored at a remote resource (due to + unsupported protocol, access denied, or other issue) it must signal an + error. + + If the `location` field is not provided, the `listing` field must be + provided. The implementation must assign a unique identifier for + the `location` field. + + If the `path` field is provided but the `location` field is not, an + implementation may assign the value of the `path` field to `location`, + then follow the rules above. + jsonldPredicate: + _id: "@id" + _type: "@id" + - name: path + type: string? + doc: | + The local path where the Directory is made available prior to executing a + CommandLineTool. This must be set by the implementation. This field + must not be used in any other context. The command line tool being + executed must be able to to access the directory at `path` using the POSIX + `opendir(2)` syscall. + + If the `path` contains [POSIX shell metacharacters](http://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_02) + (`|`,`&`, `;`, `<`, `>`, `(`,`)`, `$`,`` ` ``, `\`, `"`, `'`, + ``, ``, and ``) or characters + [not allowed](http://www.iana.org/assignments/idna-tables-6.3.0/idna-tables-6.3.0.xhtml) + for [Internationalized Domain Names for Applications](https://tools.ietf.org/html/rfc6452) + then implementations may terminate the process with a + `permanentFailure`. + jsonldPredicate: + _id: "cwl:path" + _type: "@id" + - name: basename + type: string? + doc: | + The base name of the directory, that is, the name of the file without any + leading directory path. The base name must not contain a slash `/`. + + If not provided, the implementation must set this field based on the + `location` field by taking the final path component after parsing + `location` as an IRI. If `basename` is provided, it is not required to + match the value from `location`. + + When this file is made available to a CommandLineTool, it must be named + with `basename`, i.e. the final component of the `path` field must match + `basename`. + jsonldPredicate: "cwl:basename" + - name: listing + type: + - "null" + - type: array + items: [File, Directory] + doc: | + List of files or subdirectories contained in this directory. The name + of each file or subdirectory is determined by the `basename` field of + each `File` or `Directory` object. It is an error if a `File` shares a + `basename` with any other entry in `listing`. If two or more + `Directory` object share the same `basename`, this must be treated as + equivalent to a single subdirectory with the listings recursively + merged. + jsonldPredicate: + _id: "cwl:listing" + +- name: Labeled + type: record + abstract: true + fields: + - name: label + type: + - "null" + - string + jsonldPredicate: "rdfs:label" + doc: "A short, human-readable label of this object." + + +- name: Identified + type: record + abstract: true + fields: + - name: id + type: string? + jsonldPredicate: "@id" + doc: "The unique identifier for this object." + + +- name: LoadListingEnum + type: enum + symbols: [no_listing, shallow_listing, deep_listing] + doc: + - | + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + - "no_listing: Do not load the directory listing." + - "shallow_listing: Only load the top level listing, do not recurse into subdirectories." + - "deep_listing: Load the directory listing and recursively load all subdirectories as well." + + +- name: LoadContents + type: record + abstract: true + fields: + - name: loadContents + type: boolean? + jsonldPredicate: "cwl:loadContents" + doc: | + Only valid when `type: File` or is an array of `items: File`. + + Read up to the first 64 KiB of text from the file and place it in the + "contents" field of the file object for use by expressions. + - name: loadListing + type: LoadListingEnum? + jsonldPredicate: "cwl:loadListing" + doc: | + Only valid when `type: Directory` or is an array of `items: Directory`. + + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + + The order of precedence for loadListing is: + + 1. `loadListing` on an individual parameter + 2. Inherited from `LoadListingRequirement` + 3. By default: `no_listing` + +- name: FieldBase + type: record + extends: Labeled + abstract: true + fields: + secondaryFiles: + type: + - "null" + - SecondaryFileSchema + - type: array + items: SecondaryFileSchema + jsonldPredicate: + _id: "cwl:secondaryFiles" + secondaryFilesDSL: true + doc: | + Only valid when `type: File` or is an array of `items: File`. + + Provides a pattern or expression specifying files or + directories that should be included alongside the primary + file. Secondary files may be required or optional. When not + explicitly specified, secondary files specified for `inputs` + are required and `outputs` are optional. An implementation + must include matching Files and Directories in the + `secondaryFiles` property of the primary file. These Files + and Directories must be transferred and staged alongside the + primary file. An implementation may fail workflow execution + if a required secondary file does not exist. + + If the value is an expression, the value of `self` in the expression + must be the primary input or output File object to which this binding + applies. The `basename`, `nameroot` and `nameext` fields must be + present in `self`. For `CommandLineTool` outputs the `path` field must + also be present. The expression must return a filename string relative + to the path to the primary File, a File or Directory object with either + `path` or `location` and `basename` fields set, or an array consisting + of strings or File or Directory objects. It is legal to reference an + unchanged File or Directory object taken from input as a secondaryFile. + The expression may return "null" in which case there is no secondaryFile + from that expression. + + To work on non-filename-preserving storage systems, portable tool + descriptions should avoid constructing new values from `location`, but + should construct relative references using `basename` or `nameroot` + instead. + + If a value in `secondaryFiles` is a string that is not an expression, + it specifies that the following pattern should be applied to the path + of the primary file to yield a filename relative to the primary File: + + 1. If string ends with `?` character, remove the last `?` and mark + the resulting secondary file as optional. + 2. If string begins with one or more caret `^` characters, for each + caret, remove the last file extension from the path (the last + period `.` and all following characters). If there are no file + extensions, the path is unchanged. + 3. Append the remainder of the string to the end of the file path. + + streamable: + type: boolean? + doc: | + Only valid when `type: File` or is an array of `items: File`. + + A value of `true` indicates that the file is read or written + sequentially without seeking. An implementation may use this flag to + indicate whether it is valid to stream file contents using a named + pipe. Default: `false`. + + +- name: InputFormat + type: record + abstract: true + fields: + format: + type: + - "null" + - string + - type: array + items: string + - Expression + jsonldPredicate: + _id: cwl:format + _type: "@id" + identity: true + doc: | + Only valid when `type: File` or is an array of `items: File`. + + This must be one or more IRIs of concept nodes + that represents file formats which are allowed as input to this + parameter, preferrably defined within an ontology. If no ontology is + available, file formats may be tested by exact match. + + +- name: OutputFormat + type: record + abstract: true + fields: + format: + type: + - "null" + - string + - Expression + jsonldPredicate: + _id: cwl:format + _type: "@id" + identity: true + doc: | + Only valid when `type: File` or is an array of `items: File`. + + This is the file format that will be assigned to the output + File object. + + +- name: Parameter + type: record + extends: [FieldBase, sld:Documented, Identified] + abstract: true + doc: | + Define an input or output parameter to a process. + + +- type: enum + name: Expression + doc: | + 'Expression' is not a real type. It indicates that a field must allow + runtime parameter references. If [InlineJavascriptRequirement](#InlineJavascriptRequirement) + is declared and supported by the platform, the field must also allow + Javascript expressions. + symbols: + - cwl:ExpressionPlaceholder + + +- name: InputBinding + type: record + fields: + - name: loadContents + type: + - "null" + - boolean + jsonldPredicate: "cwl:loadContents" + doc: | + Use of `loadContents` in `InputBinding` is deprecated. + Preserved for v1.0 backwards compatability. Will be removed in + CWL v2.0. Use `InputParameter.loadContents` instead. + + Only valid when `type: File` or is an array of `items: File`. + + Read up to the first 64 KiB of text from the file and place it in the + "contents" field of the file object for use by expressions. + + +- name: IOSchema + extends: [Labeled, sld:Documented] + type: record + abstract: true + fields: + - name: name + type: string? + jsonldPredicate: "@id" + doc: "The identifier for this type" + +- name: InputSchema + extends: [IOSchema] + type: record + abstract: true + +- name: OutputSchema + extends: [IOSchema] + type: record + abstract: true + + +- name: InputRecordField + type: record + extends: [sld:RecordField, FieldBase, InputFormat, LoadContents] + specialize: + - specializeFrom: "sld:RecordSchema" + specializeTo: InputRecordSchema + - specializeFrom: "sld:EnumSchema" + specializeTo: InputEnumSchema + - specializeFrom: "sld:ArraySchema" + specializeTo: InputArraySchema + - specializeFrom: "sld:PrimitiveType" + specializeTo: CWLType + + +- name: InputRecordSchema + type: record + extends: ["sld:RecordSchema", InputSchema] + specialize: + - specializeFrom: "sld:RecordField" + specializeTo: InputRecordField + + +- name: InputEnumSchema + type: record + extends: ["sld:EnumSchema", InputSchema] + + +- name: InputArraySchema + type: record + extends: ["sld:ArraySchema", InputSchema] + specialize: + - specializeFrom: "sld:RecordSchema" + specializeTo: InputRecordSchema + - specializeFrom: "sld:EnumSchema" + specializeTo: InputEnumSchema + - specializeFrom: "sld:ArraySchema" + specializeTo: InputArraySchema + - specializeFrom: "sld:PrimitiveType" + specializeTo: CWLType + + +- name: OutputRecordField + type: record + extends: [sld:RecordField, FieldBase, OutputFormat] + specialize: + - specializeFrom: "sld:RecordSchema" + specializeTo: OutputRecordSchema + - specializeFrom: "sld:EnumSchema" + specializeTo: OutputEnumSchema + - specializeFrom: "sld:ArraySchema" + specializeTo: OutputArraySchema + - specializeFrom: "sld:PrimitiveType" + specializeTo: CWLType + + +- name: OutputRecordSchema + type: record + extends: ["sld:RecordSchema", "#OutputSchema"] + docParent: "#OutputParameter" + specialize: + - specializeFrom: "sld:RecordField" + specializeTo: OutputRecordField + + +- name: OutputEnumSchema + type: record + extends: ["sld:EnumSchema", OutputSchema] + docParent: "#OutputParameter" + + +- name: OutputArraySchema + type: record + extends: ["sld:ArraySchema", OutputSchema] + docParent: "#OutputParameter" + specialize: + - specializeFrom: "sld:RecordSchema" + specializeTo: OutputRecordSchema + - specializeFrom: "sld:EnumSchema" + specializeTo: OutputEnumSchema + - specializeFrom: "sld:ArraySchema" + specializeTo: OutputArraySchema + - specializeFrom: "sld:PrimitiveType" + specializeTo: CWLType + + +- name: InputParameter + type: record + abstract: true + extends: [Parameter, InputFormat, LoadContents] + fields: + - name: default + type: Any? + jsonldPredicate: + _id: sld:default + noLinkCheck: true + doc: | + The default value to use for this parameter if the parameter is missing + from the input object, or if the value of the parameter in the input + object is `null`. Default values are applied before evaluating expressions + (e.g. dependent `valueFrom` fields). + + +- name: OutputParameter + type: record + extends: [Parameter, OutputFormat] + abstract: true + + +- type: record + name: ProcessRequirement + abstract: true + doc: | + A process requirement declares a prerequisite that may or must be fulfilled + before executing a process. See [`Process.hints`](#process) and + [`Process.requirements`](#process). + + Process requirements are the primary mechanism for specifying extensions to + the CWL core specification. + + +- type: record + name: Process + extends: [Identified, Labeled, sld:Documented] + abstract: true + doc: | + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + fields: + - name: inputs + type: + type: array + items: InputParameter + jsonldPredicate: + _id: "cwl:inputs" + mapSubject: id + mapPredicate: type + doc: | + Defines the input parameters of the process. The process is ready to + run when all required input parameters are associated with concrete + values. Input parameters include a schema for each parameter which is + used to validate the input object. It may also be used to build a user + interface for constructing the input object. + + When accepting an input object, all input parameters must have a value. + If an input parameter is missing from the input object, it must be + assigned a value of `null` (or the value of `default` for that + parameter, if provided) for the purposes of validation and evaluation + of expressions. + + - name: outputs + type: + type: array + items: OutputParameter + jsonldPredicate: + _id: "cwl:outputs" + mapSubject: id + mapPredicate: type + doc: | + Defines the parameters representing the output of the process. May be + used to generate and/or validate the output object. + - name: requirements + type: ProcessRequirement[]? + jsonldPredicate: + _id: "cwl:requirements" + mapSubject: class + doc: | + Declares requirements that apply to either the runtime environment or the + workflow engine that must be met in order to execute this process. If + an implementation cannot satisfy all requirements, or a requirement is + listed which is not recognized by the implementation, it is a fatal + error and the implementation must not attempt to run the process, + unless overridden at user option. + - name: hints + type: Any[]? + doc: | + Declares hints applying to either the runtime environment or the + workflow engine that may be helpful in executing this process. It is + not an error if an implementation cannot satisfy all hints, however + the implementation may report a warning. + jsonldPredicate: + _id: cwl:hints + noLinkCheck: true + mapSubject: class + - name: cwlVersion + type: CWLVersion? + doc: | + CWL document version. Always required at the document root. Not + required for a Process embedded inside another Process. + jsonldPredicate: + "_id": "cwl:cwlVersion" + "_type": "@vocab" + +- name: InlineJavascriptRequirement + type: record + extends: ProcessRequirement + doc: | + Indicates that the workflow platform must support inline Javascript expressions. + If this requirement is not present, the workflow platform must not perform expression + interpolatation. + fields: + - name: class + type: string + doc: "Always 'InlineJavascriptRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: expressionLib + type: string[]? + doc: | + Additional code fragments that will also be inserted + before executing the expression code. Allows for function definitions that may + be called from CWL expressions. + + +- name: CommandInputSchema + type: record + abstract: true + +- name: SchemaDefRequirement + type: record + extends: ProcessRequirement + doc: | + This field consists of an array of type definitions which must be used when + interpreting the `inputs` and `outputs` fields. When a `type` field + contain a IRI, the implementation must check if the type is defined in + `schemaDefs` and use that definition. If the type is not found in + `schemaDefs`, it is an error. The entries in `schemaDefs` must be + processed in the order listed such that later schema definitions may refer + to earlier schema definitions. + fields: + - name: class + type: string + doc: "Always 'SchemaDefRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + - name: types + type: + type: array + items: CommandInputSchema + doc: The list of type definitions. + +- name: SecondaryFileSchema + type: record + fields: + - name: pattern + type: + - string + - Expression + doc: | + Provides a pattern or expression specifying files or directories that + should be included alongside the primary file. + + If the value is an expression, the value of `self` in the expression + must be the primary input or output File object to which this binding + applies. The `basename`, `nameroot` and `nameext` fields must be + present in `self`. For `CommandLineTool` outputs the `path` field must + also be present. The expression must return a filename string relative + to the path to the primary File, a File or Directory object with either + `path` or `location` and `basename` fields set, or an array consisting + of strings or File or Directory objects. It is legal to reference an + unchanged File or Directory object taken from input as a secondaryFile. + The expression may return "null" in which case there is no secondaryFile + from that expression. + + To work on non-filename-preserving storage systems, portable tool + descriptions should avoid constructing new values from `location`, but + should construct relative references using `basename` or `nameroot` + instead. + + If a value in `secondaryFiles` is a string that is not an expression, + it specifies that the following pattern should be applied to the path + of the primary file to yield a filename relative to the primary File: + + 1. If string ends with `?` character, remove the last `?` and mark + the resulting secondary file as optional. + 2. If string begins with one or more caret `^` characters, for each + caret, remove the last file extension from the path (the last + period `.` and all following characters). If there are no file + extensions, the path is unchanged. + 3. Append the remainder of the string to the end of the file path. + - name: required + type: ["null", boolean, Expression] + doc: | + An implementation must not fail workflow execution if `required` is + set to `false` and the expected secondary file does not exist. + Default value for `required` field is `true` for secondary files on + input and `false` for secondary files on output. + +- name: LoadListingRequirement + type: record + extends: ProcessRequirement + doc: | + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + fields: + class: + type: string + doc: "Always 'LoadListingRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + loadListing: + type: LoadListingEnum? + jsonldPredicate: "cwl:loadListing" diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/README.md cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/README.md --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/README.md 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,7 @@ +Hello! + +This repo holds the unreleased v1.2 of the Common Workflow Language standard and the history of its development. + +You can render this using https://github.com/common-workflow-language/cwl-website/blob/master/website.sh + +You may browse the spec online at https://www.commonwl.org/v1.2/ diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_proc.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_proc.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_proc.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_proc.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,8 @@ + { + "base": "one", + "form": { + "base": "two", + "http://example.com/three": "three", + }, + "http://example.com/acid#four": "four" + } diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_schema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_schema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_schema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_schema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,14 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "base", + "type": "string", + "jsonldPredicate": "http://example.com/base" + }] + }] +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_src.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_src.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_src.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_src.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,8 @@ + { + "base": "one", + "form": { + "http://example.com/base": "two", + "http://example.com/three": "three", + }, + "acid:four": "four" + } diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,46 @@ +- | + ## Field name resolution + + The document schema declares the vocabulary of known field names. During + preprocessing traversal, field name in the document which are not part of + the schema vocabulary must be resolved to absolute URIs. Under "strict" + validation, it is an error for a document to include fields which are not + part of the vocabulary and not resolvable to absolute URIs. Fields names + which are not part of the vocabulary are resolved using the following + rules: + + * If an field name URI begins with a namespace prefix declared in the + document context (`@context`) followed by a colon `:`, the prefix and + colon must be replaced by the namespace declared in `@context`. + + * If there is a vocabulary term which maps to the URI of a resolved + field, the field name must be replace with the vocabulary term. + + * If a field name URI is an absolute URI consisting of a scheme and path + and is not part of the vocabulary, no processing occurs. + + Field name resolution is not relative. It must not be affected by the + base URI. + + ### Field name resolution example + + Given the following schema: + + ``` +- $include: field_name_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: field_name_src.yml +- | + ``` + + This becomes: + + ``` +- $include: field_name_proc.yml +- | + ``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_proc.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_proc.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_proc.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_proc.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,25 @@ +{ + "id": "http://example.com/base", + "form": { + "id": "http://example.com/base#one", + "things": [ + { + "id": "http://example.com/base#one/two" + }, + { + "id": "http://example.com/base#three" + }, + { + "id": "http://example.com/four#five", + }, + { + "id": "http://example.com/acid#six", + }, + { + "subscopeField": { + "id": "http://example.com/base#one/thisIsASubscope/seven" + } + } + ], + } +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_schema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_schema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_schema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_schema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,23 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "id", + "type": "string", + "jsonldPredicate": "@id" + }]}, { + "name": "SubscopeType", + "type": "record", + "fields": [{ + "name": "subscopeField", + "type": "ExampleType", + "jsonldPredicate": { + "subscope": "thisIsASubscope" + } + }] + }] +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_src.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_src.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_src.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_src.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,25 @@ + { + "id": "http://example.com/base", + "form": { + "id": "one", + "things": [ + { + "id": "two" + }, + { + "id": "#three", + }, + { + "id": "four#five", + }, + { + "id": "acid:six", + }, + { + "subscopeField": { + "id": "seven" + } + } + ], + } + } diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,70 @@ +- | + ## Identifier resolution + + The schema may designate one or more fields as identifier fields to identify + specific objects. Processing must resolve relative identifiers to absolute + identifiers using the following rules: + + * If an identifier URI begins with `#` it is a current document + fragment identifier. It is resolved relative to the base URI by + setting or replacing the fragment portion of the base URI. + + * If an identifier URI contains `#` in some other position is a + relative URI with fragment identifier. It is resolved relative + to the base URI by stripping the last path segment from the base + URI and adding the identifier followed by the fragment. + + * If an identifier URI does not contain a scheme and does not + contain `#` it is a parent relative fragment identifier. + + * If an identifier URI is a parent relative fragment identifier + and the base URI does not contain a document fragment, set the + document fragment on the base URI. + + * If an identifier URI is a parent relative fragment identifier + and the object containing this identifier is assigned to a + parent object field defined with `subscope` in + `jsonldPredicate`, append a slash `/` to the base URI fragment + followed by the value of the parent field `subscope`. Then + append the identifier as described in the next rule. + + * If an identifier URI is a parent relative fragment identifier + and the base URI contains a document fragment, append a slash + `/` to the fragment followed by the identifier field to the + fragment portion of the base URI. + + * If an identifier URI begins with a namespace prefix declared in + `$namespaces` followed by a colon `:`, the prefix and colon must be + replaced by the namespace declared in `$namespaces`. + + * If an identifier URI is an absolute URI consisting of a scheme and path, + no processing occurs. + + When preprocessing visits a node containing an identifier, that identifier + must be used as the base URI to process child nodes. + + It is an error for more than one object in a document to have the same + absolute URI. + + ### Identifier resolution example + + Given the following schema: + + ``` +- $include: ident_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: ident_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: ident_res_proc.yml +- | + ``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/import_include.md cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/import_include.md --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/import_include.md 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/import_include.md 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,112 @@ +## Import + +During preprocessing traversal, an implementation must resolve `$import` +directives. An `$import` directive is an object consisting of exactly one +field `$import` specifying resource by URI string. It is an error if there +are additional fields in the `$import` object, such additional fields must +be ignored. + +The URI string must be resolved to an absolute URI using the link +resolution rules described previously. Implementations must support +loading from `file`, `http` and `https` resources. The URI referenced by +`$import` must be loaded and recursively preprocessed as a Salad document. +The external imported document does not inherit the context of the +importing document, and the default base URI for processing the imported +document must be the URI used to retrieve the imported document. If the +`$import` URI includes a document fragment, the fragment must be excluded +from the base URI used to preprocess the imported document. + +Once loaded and processed, the `$import` node is replaced in the document +structure by the object or array yielded from the import operation. + +URIs may reference document fragments which refer to specific an object in +the target document. This indicates that the `$import` node must be +replaced by only the object with the appropriate fragment identifier. + +It is a fatal error if an import directive refers to an external resource +or resource fragment which does not exist or is not accessible. + +### Import example + +import.yml: +``` +{ + "hello": "world" +} + +``` + +parent.yml: +``` +{ + "form": { + "bar": { + "$import": "import.yml" + } + } +} + +``` + +This becomes: + +``` +{ + "form": { + "bar": { + "hello": "world" + } + } +} +``` + +## Include + +During preprocessing traversal, an implementation must resolve `$include` +directives. An `$include` directive is an object consisting of exactly one +field `$include` specifying a URI string. It is an error if there are +additional fields in the `$include` object, such additional fields must be +ignored. + +The URI string must be resolved to an absolute URI using the link +resolution rules described previously. The URI referenced by `$include` must +be loaded as a text data. Implementations must support loading from +`file`, `http` and `https` resources. Implementations may transcode the +character encoding of the text data to match that of the parent document, +but must not interpret or parse the text document in any other way. + +Once loaded, the `$include` node is replaced in the document structure by a +string containing the text data loaded from the resource. + +It is a fatal error if an import directive refers to an external resource +which does not exist or is not accessible. + +### Include example + +parent.yml: +``` +{ + "form": { + "bar": { + "$include": "include.txt" + } + } +} + +``` + +include.txt: +``` +hello world + +``` + +This becomes: + +``` +{ + "form": { + "bar": "hello world" + } +} +``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_proc.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_proc.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_proc.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_proc.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,21 @@ +{ + "$base": "http://example.com/base", + "link": "http://example.com/base/zero", + "form": { + "link": "http://example.com/one", + "things": [ + { + "link": "http://example.com/two" + }, + { + "link": "http://example.com/base#three" + }, + { + "link": "http://example.com/four#five", + }, + { + "link": "http://example.com/acid#six", + } + ] + } +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_schema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_schema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_schema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_schema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,16 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "link", + "type": "string", + "jsonldPredicate": { + "_type": "@id" + } + }] + }] +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_src.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_src.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_src.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_src.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,21 @@ +{ + "$base": "http://example.com/base", + "link": "http://example.com/base/zero", + "form": { + "link": "one", + "things": [ + { + "link": "two" + }, + { + "link": "#three", + }, + { + "link": "four#five", + }, + { + "link": "acid:six", + } + ] + } +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,55 @@ +- | + ## Link resolution + + The schema may designate one or more fields as link fields reference other + objects. Processing must resolve links to either absolute URIs using the + following rules: + + * If a reference URI is prefixed with `#` it is a relative + fragment identifier. It is resolved relative to the base URI by setting + or replacing the fragment portion of the base URI. + + * If a reference URI does not contain a scheme and is not prefixed with `#` + it is a path relative reference. If the reference URI contains `#` in any + position other than the first character, the reference URI must be divided + into a path portion and a fragment portion split on the first instance of + `#`. The path portion is resolved relative to the base URI by the following + rule: if the path portion of the base URI ends in a slash `/`, append the + path portion of the reference URI to the path portion of the base URI. If + the path portion of the base URI does not end in a slash, replace the final + path segment with the path portion of the reference URI. Replace the + fragment portion of the base URI with the fragment portion of the reference + URI. + + * If a reference URI begins with a namespace prefix declared in `$namespaces` + followed by a colon `:`, the prefix and colon must be replaced by the + namespace declared in `$namespaces`. + + * If a reference URI is an absolute URI consisting of a scheme and path, + no processing occurs. + + Link resolution must not affect the base URI used to resolve identifiers + and other links. + + ### Link resolution example + + Given the following schema: + + ``` +- $include: link_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: link_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: link_res_proc.yml +- | + ``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_proc.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_proc.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_proc.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_proc.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,12 @@ +{ + "mapped": [ + { + "value": "daphne", + "key": "fred" + }, + { + "value": "scooby", + "key": "shaggy" + } + ] +} \ No newline at end of file diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_schema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_schema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_schema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_schema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,30 @@ +{ + "$graph": [{ + "name": "MappedType", + "type": "record", + "documentRoot": true, + "fields": [{ + "name": "mapped", + "type": { + "type": "array", + "items": "ExampleRecord" + }, + "jsonldPredicate": { + "mapSubject": "key", + "mapPredicate": "value" + } + }], + }, + { + "name": "ExampleRecord", + "type": "record", + "fields": [{ + "name": "key", + "type": "string" + }, { + "name": "value", + "type": "string" + } + ] + }] +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_src.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_src.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_src.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_src.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,8 @@ +{ + "mapped": { + "shaggy": { + "value": "scooby" + }, + "fred": "daphne" + } +} \ No newline at end of file diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,36 @@ +- | + ## Identifier maps + + The schema may designate certain fields as having a `mapSubject`. If the + value of the field is a JSON object, it must be transformed into an array of + JSON objects. Each key-value pair from the source JSON object is a list + item, each list item must be a JSON objects, and the value of the key is + assigned to the field specified by `mapSubject`. + + Fields which have `mapSubject` specified may also supply a `mapPredicate`. + If the value of a map item is not a JSON object, the item is transformed to a + JSON object with the key assigned to the field specified by `mapSubject` and + the value assigned to the field specified by `mapPredicate`. + + ### Identifier map example + + Given the following schema: + + ``` +- $include: map_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: map_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: map_res_proc.yml +- | + ``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema_base.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema_base.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema_base.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema_base.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,176 @@ +$base: "https://w3id.org/cwl/salad#" + +$namespaces: + sld: "https://w3id.org/cwl/salad#" + dct: "http://purl.org/dc/terms/" + rdf: "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + xsd: "http://www.w3.org/2001/XMLSchema#" + +$graph: + +- name: "Schema" + type: documentation + doc: | + # Schema + +- name: Documented + type: record + abstract: true + docParent: "#Schema" + fields: + - name: doc + type: + - string? + - string[]? + doc: "A documentation string for this object, or an array of strings which should be concatenated." + jsonldPredicate: "rdfs:comment" + + +- name: PrimitiveType + type: enum + symbols: + - "sld:null" + - "xsd:boolean" + - "xsd:int" + - "xsd:long" + - "xsd:float" + - "xsd:double" + - "xsd:string" + doc: + - | + Salad data types are based on Avro schema declarations. Refer to the + [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for + detailed information. + - "null: no value" + - "boolean: a binary value" + - "int: 32-bit signed integer" + - "long: 64-bit signed integer" + - "float: single precision (32-bit) IEEE 754 floating-point number" + - "double: double precision (64-bit) IEEE 754 floating-point number" + - "string: Unicode character sequence" + + +- name: Any + type: enum + symbols: ["#Any"] + docAfter: "#PrimitiveType" + doc: | + The **Any** type validates for any non-null value. + + +- name: RecordField + type: record + extends: Documented + doc: A field of a record. + fields: + - name: name + type: string + jsonldPredicate: "@id" + doc: | + The name of the field + + - name: type + type: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + - type: array + items: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + jsonldPredicate: + _id: sld:type + _type: "@vocab" + typeDSL: true + refScope: 2 + doc: | + The field type + + +- name: RecordSchema + type: record + fields: + type: + doc: "Must be `record`" + type: + type: enum + symbols: + - "sld:record" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 + fields: + type: RecordField[]? + jsonldPredicate: + _id: sld:fields + mapSubject: name + mapPredicate: type + doc: "Defines the fields of the record." + + +- name: EnumSchema + type: record + doc: | + Define an enumerated type. + fields: + type: + doc: "Must be `enum`" + type: + type: enum + symbols: + - "sld:enum" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 + symbols: + type: string[] + jsonldPredicate: + _id: "sld:symbols" + _type: "@id" + identity: true + doc: "Defines the set of valid symbols." + + +- name: ArraySchema + type: record + fields: + type: + doc: "Must be `array`" + type: + type: enum + symbols: + - "sld:array" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 + items: + type: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + - type: array + items: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + jsonldPredicate: + _id: "sld:items" + _type: "@vocab" + refScope: 2 + doc: "Defines the type of the array elements." diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,377 @@ +$base: "https://w3id.org/cwl/salad#" + +$namespaces: + sld: "https://w3id.org/cwl/salad#" + dct: "http://purl.org/dc/terms/" + rdf: "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + xsd: "http://www.w3.org/2001/XMLSchema#" + +$graph: + +- name: "Semantic_Annotations_for_Linked_Avro_Data" + type: documentation + doc: + - $include: salad.md + - $import: field_name.yml + - $import: ident_res.yml + - $import: link_res.yml + - $import: vocab_res.yml + - $include: import_include.md + - $import: map_res.yml + - $import: typedsl_res.yml + - $import: sfdsl_res.yml + +- name: "Link_Validation" + type: documentation + doc: | + # Link validation + + Once a document has been preprocessed, an implementation may validate + links. The link validation traversal may visit fields which the schema + designates as link fields and check that each URI references an existing + object in the current document, an imported document, file system, or + network resource. Failure to validate links may be a fatal error. Link + validation behavior for individual fields may be modified by `identity` and + `noLinkCheck` in the `jsonldPredicate` section of the field schema. + + +- name: "Schema_Validation" + type: documentation + doc: | + # Validating a document against a schema + + To validate a document against the schema, first [apply + preprocessing](#Document_preprocessing), then, use the following + algorithm. + + 1. The document root must be an object or a list. If the document root is an + object containing the field `$graph` (which must be a list of + objects), then validation applies to each object in the list. + 2. For each object, attempt to validate as one of the record types + flagged with `documentRoot: true`. + 3. To validate a record, go through `fields` and recursively + validate each field of the object. + 4. For fields with a list of types (type union), go through each + type in the list and recursively validate the type. For the + field to be valid, at least one type in the union must be valid. + 5. Missing fields are considered `null`. To validate, the allowed types + for the field must include `null` + 6. Primitive types are null, boolean, int, long, float, double, + string. To validate, the value in the document must have one + of these type. For numerics, the value appearing in the + document must fit into the specified type. + 7. To validate an array, the value in the document must be a list, + and each item in the list must recursively validate as a type + in `items`. + 8. To validate an enum, the value in the document be a string, and + the value must be equal to the short name of one of the values + listed in `symbols`. + 9. As a special case, a field with the `Expression` type validates string values + which contain a CWL parameter reference or expression in the form + `$(...)` or `${...}` + +# - name: "JSON_LD_Context" +# type: documentation +# doc: | +# # Generating JSON-LD Context + +# How to generate the json-ld context... + + +- $import: metaschema_base.yml + +- name: JsonldPredicate + type: record + doc: | + Attached to a record field to define how the parent record field is handled for + URI resolution and JSON-LD context generation. + fields: + - name: _id + type: string? + jsonldPredicate: + _id: sld:_id + _type: "@id" + identity: true + doc: | + The predicate URI that this field corresponds to. + Corresponds to JSON-LD `@id` directive. + - name: _type + type: string? + doc: | + The context type hint, corresponds to JSON-LD `@type` directive. + + * If the value of this field is `@id` and `identity` is false or + unspecified, the parent field must be resolved using the link + resolution rules. If `identity` is true, the parent field must be + resolved using the identifier expansion rules. + + * If the value of this field is `@vocab`, the parent field must be + resolved using the vocabulary resolution rules. + + - name: _container + type: string? + doc: | + Structure hint, corresponds to JSON-LD `@container` directive. + - name: identity + type: boolean? + doc: | + If true and `_type` is `@id` this indicates that the parent field must + be resolved according to identity resolution rules instead of link + resolution rules. In addition, the field value is considered an + assertion that the linked value exists; absence of an object in the loaded document + with the URI is not an error. + - name: noLinkCheck + type: boolean? + doc: | + If true, this indicates that link validation traversal must stop at + this field. This field (it is is a URI) or any fields under it (if it + is an object or array) are not subject to link checking. + - name: mapSubject + type: string? + doc: | + If the value of the field is a JSON object, it must be transformed + into an array of JSON objects, where each key-value pair from the + source JSON object is a list item, the list items must be JSON objects, + and the key is assigned to the field specified by `mapSubject`. + - name: mapPredicate + type: string? + doc: | + Only applies if `mapSubject` is also provided. If the value of the + field is a JSON object, it is transformed as described in `mapSubject`, + with the addition that when the value of a map item is not an object, + the item is transformed to a JSON object with the key assigned to the + field specified by `mapSubject` and the value assigned to the field + specified by `mapPredicate`. + - name: refScope + type: int? + doc: | + If the field contains a relative reference, it must be resolved by + searching for valid document references in each successive parent scope + in the document fragment. For example, a reference of `foo` in the + context `#foo/bar/baz` will first check for the existence of + `#foo/bar/baz/foo`, followed by `#foo/bar/foo`, then `#foo/foo` and + then finally `#foo`. The first valid URI in the search order shall be + used as the fully resolved value of the identifier. The value of the + refScope field is the specified number of levels from the containing + identifer scope before starting the search, so if `refScope: 2` then + "baz" and "bar" must be stripped to get the base `#foo` and search + `#foo/foo` and the `#foo`. The last scope searched must be the top + level scope before determining if the identifier cannot be resolved. + - name: typeDSL + type: boolean? + doc: | + Field must be expanded based on the the Schema Salad type DSL. + - name: secondaryFilesDSL + type: boolean? + doc: | + Field must be expanded based on the the Schema Salad secondary file DSL. + - name: subscope + type: string? + doc: | + Append the subscope to the current scope when performing + identifier resolution to objects under this field. + +- name: SpecializeDef + type: record + fields: + - name: specializeFrom + type: string + doc: "The data type to be replaced" + jsonldPredicate: + _id: "sld:specializeFrom" + _type: "@id" + refScope: 1 + + - name: specializeTo + type: string + doc: "The new data type to replace with" + jsonldPredicate: + _id: "sld:specializeTo" + _type: "@id" + refScope: 1 + + +- name: NamedType + type: record + abstract: true + docParent: "#Schema" + fields: + - name: name + type: string + jsonldPredicate: "@id" + doc: "The identifier for this type" + - name: inVocab + type: boolean? + default: true + doc: | + If "true" (the default), include the short name of this type + in the vocabulary. The vocabulary are all the symbols (field + names and other identifiers, such as classes and enum values) + which can be used in the document without a namespace prefix. + These are the keys of the JSON-LD context. If false, do not + include the short name in the vocabulary. + + This is useful for specifying schema extensions that will be + included in validation without introducing ambiguity by + introducing non-standard terms into the vocabulary. + + +- name: DocType + type: record + extends: Documented + abstract: true + docParent: "#Schema" + fields: + - name: docParent + type: string? + doc: | + Hint to indicate that during documentation generation, documentation + for this type should appear in a subsection under `docParent`. + jsonldPredicate: + _id: "sld:docParent" + _type: "@id" + + - name: docChild + type: + - string? + - string[]? + doc: | + Hint to indicate that during documentation generation, documentation + for `docChild` should appear in a subsection under this type. + jsonldPredicate: + _id: "sld:docChild" + _type: "@id" + + - name: docAfter + type: string? + doc: | + Hint to indicate that during documentation generation, documentation + for this type should appear after the `docAfter` section at the same + level. + jsonldPredicate: + _id: "sld:docAfter" + _type: "@id" + + +- name: SchemaDefinedType + type: record + extends: DocType + doc: | + Abstract base for schema-defined types. + abstract: true + fields: + - name: jsonldPredicate + type: + - string? + - JsonldPredicate? + doc: | + Annotate this type with linked data context. + jsonldPredicate: sld:jsonldPredicate + + - name: documentRoot + type: boolean? + doc: | + If true, indicates that the type is a valid at the document root. At + least one type in a schema must be tagged with `documentRoot: true`. + + +- name: SaladRecordField + type: record + extends: RecordField + doc: "A field of a record." + fields: + - name: jsonldPredicate + type: + - string? + - JsonldPredicate? + doc: | + Annotate this type with linked data context. + jsonldPredicate: "sld:jsonldPredicate" + - name: default + type: Any? + jsonldPredicate: + _id: sld:default + noLinkCheck: true + doc: | + The default value to use for this field if the field is missing or "null". + + +- name: SaladRecordSchema + docParent: "#Schema" + type: record + extends: [NamedType, RecordSchema, SchemaDefinedType] + documentRoot: true + specialize: + RecordField: SaladRecordField + fields: + - name: abstract + type: boolean? + doc: | + If true, this record is abstract and may be used as a base for other + records, but is not valid on its own. + + - name: extends + type: + - string? + - string[]? + jsonldPredicate: + _id: "sld:extends" + _type: "@id" + refScope: 1 + doc: | + Indicates that this record inherits fields from one or more base records. + + - name: specialize + type: + - SpecializeDef[]? + doc: | + Only applies if `extends` is declared. Apply type specialization using the + base record as a template. For each field inherited from the base + record, replace any instance of the type `specializeFrom` with + `specializeTo`. + jsonldPredicate: + _id: "sld:specialize" + mapSubject: specializeFrom + mapPredicate: specializeTo + +- name: SaladEnumSchema + docParent: "#Schema" + type: record + extends: [NamedType, EnumSchema, SchemaDefinedType] + documentRoot: true + doc: | + Define an enumerated type. + fields: + - name: extends + type: + - string? + - string[]? + jsonldPredicate: + _id: "sld:extends" + _type: "@id" + refScope: 1 + doc: | + Indicates that this enum inherits symbols from a base enum. + + +- name: Documentation + type: record + docParent: "#Schema" + extends: [NamedType, DocType] + documentRoot: true + doc: | + A documentation section. This type exists to facilitate self-documenting + schemas but has no role in formal validation. + fields: + - name: type + doc: "Must be `documentation`" + type: + type: enum + symbols: + - "sld:documentation" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/salad.md cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/salad.md --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/salad.md 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/salad.md 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,306 @@ +# Semantic Annotations for Linked Avro Data (SALAD) + +Author: + +* Peter Amstutz , Veritas Genetics + +Contributors: + +* The developers of Apache Avro +* The developers of JSON-LD +* Nebojša Tijanić , Seven Bridges Genomics + +# Abstract + +Salad is a schema language for describing structured linked data documents +in JSON or YAML documents. A Salad schema provides rules for +preprocessing, structural validation, and link checking for documents +described by a Salad schema. Salad builds on JSON-LD and the Apache Avro +data serialization system, and extends Avro with features for rich data +modeling such as inheritance, template specialization, object identifiers, +and object references. Salad was developed to provide a bridge between the +record oriented data modeling supported by Apache Avro and the Semantic +Web. + +# Status of This Document + +This document is the product of the [Common Workflow Language working +group](https://groups.google.com/forum/#!forum/common-workflow-language). The +latest version of this document is available in the "schema_salad" repository at + +https://github.com/common-workflow-language/schema_salad + +The products of the CWL working group (including this document) are made available +under the terms of the Apache License, version 2.0. + + + +# Introduction + +The JSON data model is an extremely popular way to represent structured +data. It is attractive because of its relative simplicity and is a +natural fit with the standard types of many programming languages. +However, this simplicity means that basic JSON lacks expressive features +useful for working with complex data structures and document formats, such +as schemas, object references, and namespaces. + +JSON-LD is a W3C standard providing a way to describe how to interpret a +JSON document as Linked Data by means of a "context". JSON-LD provides a +powerful solution for representing object references and namespaces in JSON +based on standard web URIs, but is not itself a schema language. Without a +schema providing a well defined structure, it is difficult to process an +arbitrary JSON-LD document as idiomatic JSON because there are many ways to +express the same data that are logically equivalent but structurally +distinct. + +Several schema languages exist for describing and validating JSON data, +such as the Apache Avro data serialization system, however none understand +linked data. As a result, to fully take advantage of JSON-LD to build the +next generation of linked data applications, one must maintain separate +JSON schema, JSON-LD context, RDF schema, and human documentation, despite +significant overlap of content and obvious need for these documents to stay +synchronized. + +Schema Salad is designed to address this gap. It provides a schema +language and processing rules for describing structured JSON content +permitting URI resolution and strict document validation. The schema +language supports linked data through annotations that describe the linked +data interpretation of the content, enables generation of JSON-LD context +and RDF schema, and production of RDF triples by applying the JSON-LD +context. The schema language also provides for robust support of inline +documentation. + +## Introduction to v1.1 + +This is the third version of of the Schema Salad specification. It is +developed concurrently with v1.1 of the Common Workflow Language for use in +specifying the Common Workflow Language, however Schema Salad is intended to be +useful to a broader audience. Compared to the v1.0 schema salad +specification, the following changes have been made: + +* Support for `default` values on record fields to specify default values +* Add subscoped fields (fields which introduce a new inner scope for identifiers) +* Add the *inVocab* flag (default true) to indicate if a type is added to the vocabulary of well known terms or must be prefixed +* Add *secondaryFilesDSL* micro DSL (domain specific language) to convert text strings to a secondaryFiles record type used in CWL +* The `$mixin` feature has been removed from the specification, as it + is poorly documented, not included in conformance testing, + and not widely supported. + +## References to Other Specifications + +**Javascript Object Notation (JSON)**: http://json.org + +**JSON Linked Data (JSON-LD)**: http://json-ld.org + +**YAML**: https://yaml.org/spec/1.2/spec.html + +**Avro**: https://avro.apache.org/docs/current/spec.html + +**Uniform Resource Identifier (URI) Generic Syntax**: https://tools.ietf.org/html/rfc3986) + +**Resource Description Framework (RDF)**: http://www.w3.org/RDF/ + +**UTF-8**: https://www.ietf.org/rfc/rfc2279.txt) + +## Scope + +This document describes the syntax, data model, algorithms, and schema +language for working with Salad documents. It is not intended to document +a specific implementation of Salad, however it may serve as a reference for +the behavior of conforming implementations. + +## Terminology + +The terminology used to describe Salad documents is defined in the Concepts +section of the specification. The terms defined in the following list are +used in building those definitions and in describing the actions of an +Salad implementation: + +**may**: Conforming Salad documents and Salad implementations are permitted but +not required to be interpreted as described. + +**must**: Conforming Salad documents and Salad implementations are required +to be interpreted as described; otherwise they are in error. + +**error**: A violation of the rules of this specification; results are +undefined. Conforming implementations may detect and report an error and may +recover from it. + +**fatal error**: A violation of the rules of this specification; results +are undefined. Conforming implementations must not continue to process the +document and may report an error. + +**at user option**: Conforming software may or must (depending on the modal verb in +the sentence) behave as described; if it does, it must provide users a means to +enable or disable the behavior described. + +# Document model + +## Data concepts + +An **object** is a data structure equivalent to the "object" type in JSON, +consisting of a unordered set of name/value pairs (referred to here as +**fields**) and where the name is a string and the value is a string, number, +boolean, array, or object. + +A **document** is a file containing a serialized object, or an array of +objects. + +A **document type** is a class of files that share a common structure and +semantics. + +A **document schema** is a formal description of the grammar of a document type. + +A **base URI** is a context-dependent URI used to resolve relative references. + +An **identifier** is a URI that designates a single document or single +object within a document. + +A **vocabulary** is the set of symbolic field names and enumerated symbols defined +by a document schema, where each term maps to absolute URI. + +## Syntax + +Conforming Salad v1.1 documents are serialized and loaded using a +subset of YAML 1.2 syntax and UTF-8 text encoding. Salad documents +are written using the [JSON-compatible subset of YAML described in +section 10.2](https://yaml.org/spec/1.2/spec.html#id2803231). The +following features of YAML must not be used in conforming Salad +documents: + +* Use of explicit node tags with leading `!` or `!!` +* Use of anchors with leading `&` and aliases with leading `*` +* %YAML directives +* %TAG directives + +It is a fatal error if the document is not valid YAML. + +A Salad document must consist only of either a single root object or an +array of objects. + +## Document context + +### Implied context + +The implicit context consists of the vocabulary defined by the schema and +the base URI. By default, the base URI must be the URI that was used to +load the document. It may be overridden by an explicit context. + +### Explicit context + +If a document consists of a root object, this object may contain the +fields `$base`, `$namespaces`, `$schemas`, and `$graph`: + + * `$base`: Must be a string. Set the base URI for the document used to + resolve relative references. + + * `$namespaces`: Must be an object with strings as values. The keys of + the object are namespace prefixes used in the document; the values of + the object are the prefix expansions. + + * `$schemas`: Must be an array of strings. This field may list URI + references to documents in RDF-XML format which will be queried for RDF + schema data. The subjects and predicates described by the RDF schema + may provide additional semantic context for the document, and may be + used for validation of prefixed extension fields found in the document. + +Other directives beginning with `$` must be ignored. + +## Document graph + +If a document consists of a single root object, this object may contain the +field `$graph`. This field must be an array of objects. If present, this +field holds the primary content of the document. A document that consists +of array of objects at the root is an implicit graph. + +## Document metadata + +If a document consists of a single root object, metadata about the +document, such as authorship, may be declared in the root object. + +## Document schema + +Document preprocessing, link validation and schema validation require a +document schema. A schema may consist of: + + * At least one record definition object which defines valid fields that + make up a record type. Record field definitions include the valid types + that may be assigned to each field and annotations to indicate fields + that represent identifiers and links, described below in "Semantic + Annotations". + + * Any number of enumerated type objects which define a set of finite set of symbols that are + valid value of the type. + + * Any number of documentation objects which allow in-line documentation of the schema. + +The schema for defining a salad schema (the metaschema) is described in +detail in the [Schema](#Schema) section. + +## Record field annotations + +In a document schema, record field definitions may include the field +`jsonldPredicate`, which may be either a string or object. Implementations +must use the following document preprocessing of fields by the following +rules: + + * If the value of `jsonldPredicate` is `@id`, the field is an identifier + field. + + * If the value of `jsonldPredicate` is an object, and contains that + object contains the field `_type` with the value `@id`, the field is a + link field subject to [link validation](#Link_validation). + + * If the value of `jsonldPredicate` is an object which contains the + field `_type` with the value `@vocab`, the field value is subject to + [vocabulary resolution](#Vocabulary_resolution). + +## Document traversal + +To perform document document preprocessing, link validation and schema +validation, the document must be traversed starting from the fields or +array items of the root object or array and recursively visiting each child +item which contains an object or arrays. + +## Short names + +The "short name" of an fully qualified identifier is the portion of +the identifier following the final slash `/` of either the fragment +identifier following `#` or the path portion, if there is no fragment. +Some examples: + +* the short name of `http://example.com/foo` is `foo` +* the short name of `http://example.com/#bar` is `bar` +* the short name of `http://example.com/foo/bar` is `bar` +* the short name of `http://example.com/foo#bar` is `bar` +* the short name of `http://example.com/#foo/bar` is `bar` +* the short name of `http://example.com/foo#bar/baz` is `baz` + +## Inheritance and specialization + +A record definition may inherit from one or more record definitions +with the `extends` field. This copies the fields defined in the +parent record(s) as the base for the new record. A record definition +may `specialize` type declarations of the fields inherited from the +base record. For each field inherited from the base record, any +instance of the type in `specializeFrom` is replaced with the type in +`specializeTo`. The type in `specializeTo` should extend from the +type in `specializeFrom`. + +A record definition may be `abstract`. This means the record +definition is not used for validation on its own, but may be extended +by other definitions. If an abstract type appears in a field +definition, it is logically replaced with a union of all concrete +subtypes of the abstract type. In other words, the field value does +not validate as the abstract type, but must validate as some concrete +type that inherits from the abstract type. + +# Document preprocessing + +After processing the explicit context (if any), document preprocessing +begins. Starting from the document root, object fields values or array +items which contain objects or arrays are recursively traversed +depth-first. For each visited object, field names, identifier fields, link +fields, vocabulary fields, and `$import` and `$include` directives must be +processed as described in this section. The order of traversal of child +nodes within a parent node is undefined. diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_proc.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_proc.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_proc.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_proc.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,21 @@ +[ + { + "secondaryFiles": { + "pattern": ".bai", + "required": null + }, + { + "secondaryFiles": { + "pattern": ".bai", + "required": false + }, + { + "secondaryFiles": { + "pattern": ".bai?" + }, + { + "secondaryFiles": { + "pattern": ".bai?", + "required": true + }, +] diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_schema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_schema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_schema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_schema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,16 @@ +{ + "$graph": [ + { + "name": "SecondaryFilesDSLExample", + "type": "record", + "documentRoot": true, + "fields": [{ + "name": "secondaryFiles", + "type": "string", + "jsonldPredicate": { + _type: "@vocab", + "secondaryFilesDSL": true + } + }] + }] +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_src.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_src.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_src.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_src.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,12 @@ +[{ + "secondaryFiles": ".bai" +}, { + "secondaryFiles": ".bai?" +}, { + "secondaryFiles": { + "pattern": ".bai?" +}, { + "secondaryFiles": { + "pattern": ".bai?", + "required": true +}] diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,34 @@ +- | + ## Domain Specific Language for secondary files + + Fields may be tagged `secondaryFilesDSL: true` in `jsonldPredicate`. If so, the field is expanded using the + following micro-DSL for secondary files: + + * If the value is a string, it is transformed to an object with two fields `pattern` and `required` + * By default, the value of `required` is `null` (this indicates default behavior, which may be based on the context) + * If the value ends with a question mark `?` the question mark is + stripped off and the value of the field `required` is set to `False` + * The remaining value is assigned to the field `pattern` + + ### Type DSL example + + Given the following schema: + + ``` +- $include: sfdsl_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: sfdsl_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: sfdsl_res_proc.yml +- | + ``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_proc.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_proc.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_proc.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_proc.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,26 @@ +[ + { + "extype": "string" + }, + { + "extype": [ + "null", + "string" + ] + }, + { + "extype": { + "type": "array", + "items": "string" + } + }, + { + "extype": [ + "null", + { + "type": "array", + "items": "string" + } + ] + } +] diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_schema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_schema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_schema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_schema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,17 @@ +{ + "$graph": [ + {"$import": "metaschema_base.yml"}, + { + "name": "TypeDSLExample", + "type": "record", + "documentRoot": true, + "fields": [{ + "name": "extype", + "type": "string", + "jsonldPredicate": { + _type: "@vocab", + "typeDSL": true + } + }] + }] +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_src.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_src.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_src.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_src.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,9 @@ +[{ + "extype": "string" +}, { + "extype": "string?" +}, { + "extype": "string[]" +}, { + "extype": "string[]?" +}] diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,33 @@ +- | + ## Domain Specific Language for types + + Fields may be tagged `typeDSL: true` in `jsonldPredicate`. If so, the field is expanded using the + following micro-DSL for schema salad types: + + * If the type ends with a question mark `?`, the question mark is stripped off and the type is expanded to a union with `null` + * If the type ends with square brackets `[]` it is expanded to an array with items of the preceeding type symbol + * The type may end with both `[]?` to indicate it is an optional array. + * Identifier resolution is applied after type DSL expansion. + + ### Type DSL example + + Given the following schema: + + ``` +- $include: typedsl_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: typedsl_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: typedsl_res_proc.yml +- | + ``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_proc.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_proc.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_proc.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_proc.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,15 @@ + { + "form": { + "things": [ + { + "voc": "red", + }, + { + "voc": "red", + }, + { + "voc": "http://example.com/acid#blue", + } + ] + } + } diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_schema.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_schema.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_schema.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_schema.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,21 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "Colors", + "type": "enum", + "symbols": ["acid:red"] + }, + { + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "voc", + "type": "string", + "jsonldPredicate": { + "_type": "@vocab" + } + }] + }] +} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_src.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_src.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_src.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_src.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,15 @@ + { + "form": { + "things": [ + { + "voc": "red", + }, + { + "voc": "http://example.com/acid#red", + }, + { + "voc": "http://example.com/acid#blue", + } + ] + } + } diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,36 @@ +- | + ## Vocabulary resolution + + The schema may designate one or more vocabulary fields which use + terms defined in the vocabulary. The vocabulary are the short + names of all the identifiers in the schema. Processing must + resolve vocabulary fields to either vocabulary terms or absolute + URIs by first applying the link resolution rules defined above, + then applying the following additional rule: + + * If a reference URI is a vocabulary field, and there is a vocabulary + term which maps to the resolved URI, the reference must be replace with + the vocabulary term. + + ### Vocabulary resolution example + + Given the following schema: + + ``` +- $include: vocab_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: vocab_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: vocab_res_proc.yml +- | + ``` diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/Workflow.yml cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/Workflow.yml --- cwltool-2.0.20200126090152+dfsg/cwltool/schemas/v1.2.0-dev1/Workflow.yml 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/schemas/v1.2.0-dev1/Workflow.yml 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,738 @@ +saladVersion: v1.1 +$base: "https://w3id.org/cwl/cwl#" + +$namespaces: + cwl: "https://w3id.org/cwl/cwl#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + +$graph: + +- name: "WorkflowDoc" + type: documentation + doc: + - | + # Common Workflow Language (CWL) Workflow Description, v1.2.0-dev1 + + This version: + * https://w3id.org/cwl/v1.2.0-dev1/ + + Current version: + * https://w3id.org/cwl/ + - "\n\n" + - {$include: contrib.md} + - "\n\n" + - | + # Abstract + + This specification defines the Common Workflow Language (CWL) + Workflow description, a vendor-neutral standard for representing + analysis tasks where a sequence of operations are described + using a directed graph of operations to transform input to + output. CWL is portable across a variety of computing + platforms. + + - {$include: intro.md} + + - | + + ## Introduction to CWL Workflow standard v1.2.0-dev1 + + This specification represents the latest stable release from the + CWL group. Since the v1.1 release, v1.2.0-dev1 introduces the + following updates to the CWL Workflow standard. + Documents should to use `cwlVersion: v1.2.0-dev1` to make use of new + syntax and features introduced in v1.2.0-dev1. Existing v1.1 documents + should be trivially updatable by changing `cwlVersion`, however + CWL documents that relied on previously undefined or + underspecified behavior may have slightly different behavior in + v1.2.0-dev1. + + ## Changelog + + * Adds `when` field to [WorkflowStep](#WorkflowStep) for conditional + execution + * Adds `pickValue` field to [WorkflowStepInput](#WorkflowStepInput) and + [WorkflowOutputParameter](#WorkflowOutputParameter) for selecting among null and + non-null source values + + See also the [CWL Command Line Tool Description, v1.2.0-dev1 changelog](CommandLineTool.html#Changelog). + + ## Purpose + + The Common Workflow Language Command Line Tool Description express + workflows for data-intensive science, such as Bioinformatics, Chemistry, + Physics, and Astronomy. This specification is intended to define a data + and execution model for Workflows that can be implemented on top of a + variety of computing platforms, ranging from an individual workstation to + cluster, grid, cloud, and high performance computing systems. Details related + to execution of these workflow not laid out in this specification are open to + interpretation by the computing platform implementing this specification. + + - {$include: concepts.md} + +- name: ExpressionToolOutputParameter + type: record + extends: OutputParameter + fields: + - name: type + type: + - CWLType + - OutputRecordSchema + - OutputEnumSchema + - OutputArraySchema + - string + - type: array + items: + - CWLType + - OutputRecordSchema + - OutputEnumSchema + - OutputArraySchema + - string + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + + +- name: WorkflowInputParameter + type: record + extends: InputParameter + docParent: "#Workflow" + fields: + - name: type + type: + - CWLType + - InputRecordSchema + - InputEnumSchema + - InputArraySchema + - string + - type: array + items: + - CWLType + - InputRecordSchema + - InputEnumSchema + - InputArraySchema + - string + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + - name: inputBinding + type: InputBinding? + doc: | + Deprecated. Preserved for v1.0 backwards compatability. Will be removed in + CWL v2.0. Use `WorkflowInputParameter.loadContents` instead. + jsonldPredicate: "cwl:inputBinding" + + +- type: record + name: ExpressionTool + extends: Process + specialize: + - specializeFrom: InputParameter + specializeTo: WorkflowInputParameter + - specializeFrom: OutputParameter + specializeTo: ExpressionToolOutputParameter + documentRoot: true + doc: | + An ExpressionTool is a type of Process object that can be run by itself + or as a Workflow step. It executes a pure Javascript expression that has + access to the same input parameters as a workflow. It is meant to be used + sparingly as a way to isolate complex Javascript expressions that need to + operate on input data and produce some result; perhaps just a + rearrangement of the inputs. No Docker software container is required + or allowed. + fields: + - name: class + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + type: string + - name: expression + type: Expression + doc: | + The expression to execute. The expression must return a JSON object which + matches the output parameters of the ExpressionTool. + +- name: LinkMergeMethod + type: enum + docParent: "#WorkflowStepInput" + doc: The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). + symbols: + - merge_nested + - merge_flattened + + +- name: PickValueMethod + type: enum + docParent: "#WorkflowStepInput" + doc: | + Picking non-null values among inbound data links, described in [WorkflowStepInput](#WorkflowStepInput). + symbols: + - first_non_null + - only_non_null + - all_non_null + + +- name: WorkflowOutputParameter + type: record + extends: OutputParameter + docParent: "#Workflow" + doc: | + Describe an output parameter of a workflow. The parameter must be + connected to one or more parameters defined in the workflow that + will provide the value of the output parameter. It is legal to + connect a WorkflowInputParameter to a WorkflowOutputParameter. + + See [WorkflowStepInput](#WorkflowStepInput) for discussion of + `linkMerge` and `pickValue`. + fields: + - name: outputSource + doc: | + Specifies one or more workflow parameters that supply the value of to + the output parameter. + jsonldPredicate: + "_id": "cwl:outputSource" + "_type": "@id" + refScope: 0 + type: + - string? + - string[]? + - name: linkMerge + type: ["null", LinkMergeMethod] + jsonldPredicate: "cwl:linkMerge" + default: merge_nested + doc: | + The method to use to merge multiple sources into a single array. + If not specified, the default method is "merge_nested". + + - name: pickValue + type: ["null", PickValueMethod] + jsonldPredicate: "cwl:pickValue" + doc: | + The method to use to choose non-null elements among multiple sources. + + - name: type + type: + - CWLType + - OutputRecordSchema + - OutputEnumSchema + - OutputArraySchema + - string + - type: array + items: + - CWLType + - OutputRecordSchema + - OutputEnumSchema + - OutputArraySchema + - string + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + + +- name: Sink + type: record + abstract: true + fields: + - name: source + doc: | + Specifies one or more workflow parameters that will provide input to + the underlying step parameter. + jsonldPredicate: + "_id": "cwl:source" + "_type": "@id" + refScope: 2 + type: + - string? + - string[]? + - name: linkMerge + type: LinkMergeMethod? + jsonldPredicate: "cwl:linkMerge" + default: merge_nested + doc: | + The method to use to merge multiple inbound links into a single array. + If not specified, the default method is "merge_nested". + - name: pickValue + type: ["null", PickValueMethod] + jsonldPredicate: "cwl:pickValue" + doc: | + The method to use to choose non-null elements among multiple sources. + + +- type: record + name: WorkflowStepInput + extends: [Identified, Sink, LoadContents, Labeled] + docParent: "#WorkflowStep" + doc: | + The input of a workflow step connects an upstream parameter (from the + workflow inputs, or the outputs of other workflows steps) with the input + parameters of the process specified by the `run` field. Only input parameters + declared by the target process will be passed through at runtime to the process + though additonal parameters may be specified (for use within `valueFrom` + expressions for instance) - unconnected or unused parameters do not represent an + error condition. + + # Input object + + A WorkflowStepInput object must contain an `id` field in the form + `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash + `/` the field name consists of the characters following the final slash + (the prefix portion may contain one or more slashes to indicate scope). + This defines a field of the workflow step input object with the value of + the `source` parameter(s). + + # Merging multiple inbound data links + + To merge multiple inbound data links, + [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a [workflow + scatter](#WorkflowStep) operation, there may be multiple inbound data links + listed in the `source` field. The values from the input links are merged + depending on the method specified in the `linkMerge` field. If not + specified, the default method is "merge_nested". + + * **merge_nested** + + The input must be an array consisting of exactly one entry for each + input link. If "merge_nested" is specified with a single link, the value + from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source + type must be compatible with single element from the "items" type of + the destination array parameter. + 2. Source parameters which are arrays are concatenated. + Source parameters which are single element types are appended as + single elements. + + # Picking non-null values among inbound data links + + If present, `pickValue` specifies how to picking non-null values among inbound data links. + + `pickValue` is evaluated + 1. Once all source values from upstream step or parameters are available. + 2. After `linkMerge`. + 3. Before `scatter` or `valueFrom`. + + This is specifically intended to be useful in combination with + [conditional execution](#WorkflowStep), where several upstream + steps may be connected to a single input (`source` is a list), and + skipped steps produce null values. + + Static type checkers should check for type consistency after infering what the type + will be after `pickValue` is applied, just as they do currently for `linkMerge`. + + * **first_non_null** + + For the first level of a list input, pick the first non-null element. The result is a scalar. + It is an error if there is no non-null element. Examples: + * `[null, x, null, y] -> x` + * `[null, [null], null, y] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: If-else pattern where the + value comes either from a conditional step or from a default or + fallback value. The conditional step(s) should be placed first in + the list. + + * **only_non_null** + + For the first level of a list input, pick the single non-null element. The result is a scalar. + It is an error if there is more than one non-null element. Examples: + + * `[null, x, null] -> x` + * `[null, x, null, y] -> Runtime Error` + * `[null, [null], null] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: Switch type patterns where developer considers + more than one active code path as a workflow error + (possibly indicating an error in writing `when` condition expressions). + + * **all_non_null** + + For the first level of a list input, pick all non-null values. + The result is a list, which may be empty. Examples: + + * `[null, x, null] -> [x]` + * `[x, null, y] -> [x, y]` + * `[null, [x], [null]] -> [[x], [null]]` + * `[null, null, null] -> []` + + *Intended use case*: It is valid to have more than one source, but + sources are conditional, so null sources (from skipped steps) + should be filtered out. + + fields: + - name: default + type: ["null", Any] + doc: | + The default value for this parameter to use if either there is no + `source` field, or the value produced by the `source` is `null`. The + default must be applied prior to scattering or evaluating `valueFrom`. + jsonldPredicate: + _id: "sld:default" + noLinkCheck: true + - name: valueFrom + type: + - "null" + - string + - Expression + jsonldPredicate: "cwl:valueFrom" + doc: | + To use valueFrom, [StepInputExpressionRequirement](#StepInputExpressionRequirement) must + be specified in the workflow or workflow step requirements. + + If `valueFrom` is a constant string value, use this as the value for + this input parameter. + + If `valueFrom` is a parameter reference or expression, it must be + evaluated to yield the actual value to be assiged to the input field. + + The `self` value in the parameter reference or expression must be + 1. `null` if there is no `source` field + 2. the value of the parameter(s) specified in the `source` field when this + workflow input parameter **is not** specified in this workflow step's `scatter` field. + 3. an element of the parameter specified in the `source` field when this workflow input + parameter **is** specified in this workflow step's `scatter` field. + + The value of `inputs` in the parameter reference or expression must be + the input object to the workflow step after assigning the `source` + values, applying `default`, and then scattering. The order of + evaluating `valueFrom` among step input parameters is undefined and the + result of evaluating `valueFrom` on a parameter must not be visible to + evaluation of `valueFrom` on other parameters. + + +- type: record + name: WorkflowStepOutput + docParent: "#WorkflowStep" + extends: Identified + doc: | + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is + the identifier to use in the `source` field of `WorkflowStepInput` + to connect the output value to downstream parameters. + + +- name: ScatterMethod + type: enum + docParent: "#WorkflowStep" + doc: The scatter method, as described in [workflow step scatter](#WorkflowStep). + symbols: + - dotproduct + - nested_crossproduct + - flat_crossproduct + + +- name: WorkflowStep + type: record + extends: [Identified, Labeled, sld:Documented] + docParent: "#Workflow" + doc: | + A workflow step is an executable element of a workflow. It specifies the + underlying process implementation (such as `CommandLineTool` or another + `Workflow`) in the `run` field and connects the input and output parameters + of the underlying process to workflow parameters. + + # Scatter/gather + + To use scatter/gather, + [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or + subworkflow should execute separately over a list of input elements. Each + job making up a scatter operation is independent and may be executed + concurrently. + + The `scatter` field specifies one or more input parameters which will be + scattered. An input parameter may be listed more than once. The declared + type of each input parameter is implicitly becomes an array of items of the + input parameter type. If a parameter is listed more than once, it becomes + a nested array. As a result, upstream parameters which are connected to + scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job + in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are + set to empty arrays and no work is done for the step, according to + applicable scattering rules. + + If `scatter` declares more than one input parameter, `scatterMethod` + describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one + element taken from each array to construct each job. It is an error + if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output must be nested arrays for each level of scattering, in the + order that the input arrays are listed in the `scatter` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output arrays must be flattened to a single level, but otherwise listed in the + order that the input arrays are listed in the `scatter` field. + + # Conditional execution + + Conditional execution makes execution of a step conditional on an + expression. A step that is not executed is "skipped". A skipped + step produces `null` for all output parameters. + + The condition is evaluated after `scatter`, using the input object + of each individual scatter job. This means over a set of scatter + jobs, some may be executed and some may be skipped. When the + results are gathered, skipped steps must be `null` in the output + arrays. + + The `when` field controls conditional execution. This is an + expression that must be evaluated with `inputs` bound to the step + input object (or individual scatter job), and returns a boolean + value. It is an error if this expression returns a value other + than `true` or `false`. + + # Subworkflows + + To specify a nested workflow as part of a workflow step, + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be + specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as + a subworkflow (recursive workflows are not allowed). + + fields: + - name: in + type: WorkflowStepInput[] + jsonldPredicate: + _id: "cwl:in" + mapSubject: id + mapPredicate: source + doc: | + Defines the input parameters of the workflow step. The process is ready to + run when all required input parameters are associated with concrete + values. Input parameters include a schema for each parameter which is + used to validate the input object. It may also be used build a user + interface for constructing the input object. + - name: out + type: + - type: array + items: [string, WorkflowStepOutput] + jsonldPredicate: + _id: "cwl:out" + _type: "@id" + identity: true + doc: | + Defines the parameters representing the output of the process. May be + used to generate and/or validate the output object. + - name: requirements + type: ProcessRequirement[]? + jsonldPredicate: + _id: "cwl:requirements" + mapSubject: class + doc: | + Declares requirements that apply to either the runtime environment or the + workflow engine that must be met in order to execute this workflow step. If + an implementation cannot satisfy all requirements, or a requirement is + listed which is not recognized by the implementation, it is a fatal + error and the implementation must not attempt to run the process, + unless overridden at user option. + - name: hints + type: Any[]? + jsonldPredicate: + _id: "cwl:hints" + noLinkCheck: true + mapSubject: class + doc: | + Declares hints applying to either the runtime environment or the + workflow engine that may be helpful in executing this workflow step. It is + not an error if an implementation cannot satisfy all hints, however + the implementation may report a warning. + - name: run + type: [string, Process] + jsonldPredicate: + _id: "cwl:run" + _type: "@id" + subscope: run + doc: | + Specifies the process to run. + - name: when + type: + - "null" + - Expression + jsonldPredicate: "cwl:when" + doc: | + If defined, only run the step when the expression evaluates to + `true`. If `false` the step is skipped. A skipped step + produces a `null` on each output. + - name: scatter + type: + - string? + - string[]? + jsonldPredicate: + "_id": "cwl:scatter" + "_type": "@id" + "_container": "@list" + refScope: 0 + - name: scatterMethod + doc: | + Required if `scatter` is an array of more than one element. + type: ScatterMethod? + jsonldPredicate: + "_id": "cwl:scatterMethod" + "_type": "@vocab" + + +- name: Workflow + type: record + extends: "#Process" + documentRoot: true + specialize: + - specializeFrom: InputParameter + specializeTo: WorkflowInputParameter + - specializeFrom: OutputParameter + specializeTo: WorkflowOutputParameter + doc: | + A workflow describes a set of **steps** and the **dependencies** between + those steps. When a step produces output that will be consumed by a + second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceeding + step and wait for it to successfully produce output before executing the + dependent step. If two steps are defined in the workflow graph that + are not directly or indirectly dependent, these steps are **independent**, + and may execute in any order or execute concurrently. A workflow is + complete when all steps have been executed. + + Dependencies between parameters are expressed using the `source` field on + [workflow step input parameters](#WorkflowStepInput) and [workflow output + parameters](#WorkflowOutputParameter). + + The `source` field expresses the dependency of one parameter on another + such that when a value is associated with the parameter specified by + `source`, that value is propagated to the destination parameter. When all + data links inbound to a given step are fufilled, the step is ready to + execute. + + ## Workflow success and failure + + A completed step must result in one of `success`, `temporaryFailure` or + `permanentFailure` states. An implementation may choose to retry a step + execution which resulted in `temporaryFailure`. An implementation may + choose to either continue running other steps of a workflow, or terminate + immediately upon `permanentFailure`. + + * If any step of a workflow execution results in `permanentFailure`, then + the workflow status is `permanentFailure`. + + * If one or more steps result in `temporaryFailure` and all other steps + complete `success` or are not executed, then the workflow status is + `temporaryFailure`. + + * If all workflow steps are executed and complete with `success`, then the + workflow status is `success`. + + # Extensions + + [ScatterFeatureRequirement](#ScatterFeatureRequirement) and + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are + available as standard [extensions](#Extensions_and_Metadata) to core + workflow semantics. + + fields: + - name: "class" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + type: string + - name: steps + doc: | + The individual steps that make up the workflow. Each step is executed when all of its + input data links are fufilled. An implementation may choose to execute + the steps in a different order than listed and/or execute steps + concurrently, provided that dependencies between steps are met. + type: + - type: array + items: "#WorkflowStep" + jsonldPredicate: + mapSubject: id + + +- type: record + name: SubworkflowFeatureRequirement + extends: ProcessRequirement + doc: | + Indicates that the workflow platform must support nested workflows in + the `run` field of [WorkflowStep](#WorkflowStep). + fields: + - name: "class" + type: "string" + doc: "Always 'SubworkflowFeatureRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + +- name: ScatterFeatureRequirement + type: record + extends: ProcessRequirement + doc: | + Indicates that the workflow platform must support the `scatter` and + `scatterMethod` fields of [WorkflowStep](#WorkflowStep). + fields: + - name: "class" + type: "string" + doc: "Always 'ScatterFeatureRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + +- name: MultipleInputFeatureRequirement + type: record + extends: ProcessRequirement + doc: | + Indicates that the workflow platform must support multiple inbound data links + listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + fields: + - name: "class" + type: "string" + doc: "Always 'MultipleInputFeatureRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + +- type: record + name: StepInputExpressionRequirement + extends: ProcessRequirement + doc: | + Indicate that the workflow platform must support the `valueFrom` field + of [WorkflowStepInput](#WorkflowStepInput). + fields: + - name: "class" + type: "string" + doc: "Always 'StepInputExpressionRequirement'" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + +- {$import: Operation.yml} diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/update.py cwltool-2.0.20200224214940+dfsg/cwltool/update.py --- cwltool-2.0.20200126090152+dfsg/cwltool/update.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/update.py 2020-02-24 22:00:06.000000000 +0000 @@ -22,6 +22,22 @@ from .utils import aslist, visit_class, visit_field +def v1_1to1_2(doc, loader, baseuri): # pylint: disable=unused-argument + # type: (Any, Loader, str) -> Tuple[Any, str] + """Public updater for v1.1 to v1.2""" + + doc = copy.deepcopy(doc) + + upd = doc + if isinstance(upd, MutableMapping) and "$graph" in upd: + upd = upd["$graph"] + for proc in aslist(upd): + if "cwlVersion" in proc: + del proc["cwlVersion"] + + return doc, "v1.2.0-dev1" + + def v1_0to1_1( doc: Any, loader: Loader, baseuri: str ) -> Tuple[Any, str]: # pylint: disable=unused-argument @@ -129,20 +145,22 @@ UPDATES = { - "v1.0": v1_0to1_1, - "v1.1": None, + u"v1.0": v1_0to1_1, + u"v1.1": v1_1to1_2, } # type: Dict[str, Optional[Callable[[Any, Loader, str], Tuple[Any, str]]]] DEVUPDATES = { - "v1.0": v1_0to1_1, - "v1.1.0-dev1": v1_1_0dev1to1_1, - "v1.1": None, + u"v1.1.0-dev1": v1_1_0dev1to1_1, + u"v1.2.0-dev1": None, } # type: Dict[str, Optional[Callable[[Any, Loader, str], Tuple[Any, str]]]] + ALLUPDATES = UPDATES.copy() ALLUPDATES.update(DEVUPDATES) -INTERNAL_VERSION = "v1.1" +INTERNAL_VERSION = u"v1.2.0-dev1" + +ORIGINAL_CWLVERSION = "http://commonwl.org/cwltool#original_cwlVersion" def identity(doc, loader, baseuri): # pylint: disable=unused-argument @@ -179,7 +197,15 @@ version = metadata["cwlVersion"] cdoc["cwlVersion"] = version - if version not in UPDATES: + updated_from = metadata.get(ORIGINAL_CWLVERSION) or cdoc.get(ORIGINAL_CWLVERSION) + + if updated_from: + if version != INTERNAL_VERSION: + raise validate.ValidationException( + "original_cwlVersion is set (%s) but cwlVersion is '%s', expected '%s' " + % (updated_from, version, INTERNAL_VERSION) + ) + elif version not in UPDATES: if version in DEVUPDATES: if enable_dev: pass @@ -187,7 +213,7 @@ keys = list(UPDATES.keys()) keys.sort() raise validate.ValidationException( - "Version '%s' is a development or deprecated version.\n " + u"Version '%s' is a development or deprecated version.\n " "Update your document to a stable version (%s) or use " "--enable-dev to enable support for development and " "deprecated versions." % (version, ", ".join(keys)) @@ -201,12 +227,6 @@ def update(doc, loader, baseuri, enable_dev, metadata): # type: (Union[CommentedSeq, CommentedMap], Loader, str, bool, Any) -> CommentedMap - if isinstance(doc, CommentedMap): - if metadata.get("http://commonwl.org/cwltool#original_cwlVersion") or doc.get( - "http://commonwl.org/cwltool#original_cwlVersion" - ): - return doc - (cdoc, version) = checkversion(doc, metadata, enable_dev) originalversion = copy.copy(version) diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool/workflow.py cwltool-2.0.20200224214940+dfsg/cwltool/workflow.py --- cwltool-2.0.20200126090152+dfsg/cwltool/workflow.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool/workflow.py 2020-02-24 22:00:06.000000000 +0000 @@ -61,6 +61,8 @@ return Workflow(toolpath_object, loadingContext) if toolpath_object["class"] == "ProcessGenerator": return procgenerator.ProcessGenerator(toolpath_object, loadingContext) + if toolpath_object["class"] == "Operation": + return command_line_tool.AbstractOperation(toolpath_object, loadingContext) raise WorkflowException( "Missing or invalid 'class' field in " @@ -146,7 +148,7 @@ supportsMultipleInput, # type: bool sourceField, # type: str incomplete=False, # type: bool -): # type: (...) -> Optional[Dict[str, Any]] +): # type: (...) -> Optional[MutableMapping[str, Any]] inputobj = {} # type: Dict[str, Any] for inp in parms: iid = inp["id"] @@ -162,7 +164,9 @@ ) for src in connections: a_state = state.get(src, None) - if a_state is not None and (a_state.success == "success" or incomplete): + if a_state is not None and ( + a_state.success in ("success", "skipped") or incomplete + ): if not match_types( inp["type"], a_state, @@ -187,6 +191,37 @@ elif not incomplete: return None + if "pickValue" in inp and isinstance(inputobj.get(iid), MutableSequence): + seq = cast(MutableSequence[Any], inputobj.get(iid)) + if inp["pickValue"] == "first_non_null": + found = False + for v in seq: + if v is not None: + found = True + inputobj[iid] = v + break + if not found: + raise WorkflowException( + u"All sources for '%s' are null" % (shortname(inp["id"])) + ) + elif inp["pickValue"] == "only_non_null": + found = False + for v in seq: + if v is not None: + if found: + raise WorkflowException( + u"Expected only one source for '%s' to be non-null, got %s" + % (shortname(inp["id"]), seq) + ) + found = True + inputobj[iid] = v + if not found: + raise WorkflowException( + u"All sources for '%s' are null" % (shortname(inp["id"])) + ) + elif inp["pickValue"] == "all_non_null": + inputobj[iid] = [v for v in seq if v is not None] + if inputobj.get(iid) is None and "default" in inp: inputobj[iid] = inp["default"] @@ -277,7 +312,7 @@ self.workflow.get_requirement("MultipleInputFeatureRequirement")[0] ) - wo = None # type: Optional[Dict[str, str]] + wo = None # type: Optional[MutableMapping[str, str]] try: wo = object_from_state( self.state, @@ -313,7 +348,7 @@ _logger.info("[%s] completed %s", self.name, self.processStatus) if _logger.isEnabledFor(logging.DEBUG): - _logger.debug("[%s] %s", self.name, json_dumps(wo, indent=4)) + _logger.debug("[%s] outputs %s", self.name, json_dumps(wo, indent=4)) self.did_callback = True @@ -340,7 +375,7 @@ "[%s] produced output %s", step.name, json_dumps(jobout, indent=4) ) - if processStatus != "success": + if processStatus not in ("success", "skipped"): if self.processStatus != "permanentFail": self.processStatus = processStatus @@ -365,6 +400,9 @@ runtimeContext, # type: RuntimeContext ): # type: (...) -> Generator[Union[ExpressionTool.ExpressionJob, JobBase, CallbackJob, None], None, None] + if step.submitted: + return + inputparms = step.tool["inputs"] outputparms = step.tool["outputs"] @@ -406,7 +444,7 @@ vfinputs = {shortname(k): v for k, v in inputobj.items()} def postScatterEval(io): - # type: (MutableMapping[str, Any]) -> Dict[str, Any] + # type: (MutableMapping[str, Any]) -> Optional[MutableMapping[str, Any]] shortio = {shortname(k): v for k, v in io.items()} fs_access = getdefault(runtimeContext.make_fs_access, StdFsAccess)("") @@ -434,7 +472,41 @@ ) return v - return {k: valueFromFunc(k, v) for k, v in io.items()} + psio = {k: valueFromFunc(k, v) for k, v in io.items()} + if "when" in step.tool: + evalinputs = {shortname(k): v for k, v in psio.items()} + whenval = expression.do_eval( + step.tool["when"], + evalinputs, + self.workflow.requirements, + None, + None, + {}, + context=v, + debug=runtimeContext.debug, + js_console=runtimeContext.js_console, + timeout=runtimeContext.eval_timeout, + ) + if whenval is True: + pass + elif whenval is False: + _logger.debug( + "[%s] conditional %s evaluated to %s", + step.name, + step.tool["when"], + whenval, + ) + _logger.debug( + "[%s] inputs was %s", + step.name, + json_dumps(evalinputs, indent=2), + ) + return None + else: + raise WorkflowException( + "Conditional 'when' must evaluate to 'true' or 'false'" + ) + return psio if "scatter" in step.tool: scatter = aslist(step.tool["scatter"]) @@ -470,20 +542,23 @@ else: if _logger.isEnabledFor(logging.DEBUG): _logger.debug( - "[job %s] job input %s", - step.name, - json_dumps(inputobj, indent=4), + u"[%s] job input %s", step.name, json_dumps(inputobj, indent=4) ) inputobj = postScatterEval(inputobj) - - if _logger.isEnabledFor(logging.DEBUG): - _logger.debug( - "[job %s] evaluated job input to %s", - step.name, - json_dumps(inputobj, indent=4), - ) - jobs = step.job(inputobj, callback, runtimeContext) + if inputobj is not None: + if _logger.isEnabledFor(logging.DEBUG): + _logger.debug( + u"[%s] evaluated job input to %s", + step.name, + json_dumps(inputobj, indent=4), + ) + jobs = step.job(inputobj, callback, runtimeContext) + else: + _logger.info(u"[%s] will be skipped", step.name) + callback({k["id"]: None for k in outputparms}, "skipped") + step.completed = True + jobs = (_ for _ in ()) step.submitted = True @@ -514,7 +589,7 @@ self.processStatus = "success" if _logger.isEnabledFor(logging.DEBUG): - _logger.debug("[%s] %s", self.name, json_dumps(joborder, indent=4)) + _logger.debug("[%s] inputs %s", self.name, json_dumps(joborder, indent=4)) runtimeContext = runtimeContext.copy() runtimeContext.outdir = None @@ -665,6 +740,8 @@ step_outputs.extend(step.tool["outputs"]) for s in step.tool["inputs"]: param_to_step[s["id"]] = step.tool + for s in step.tool["outputs"]: + param_to_step[s["id"]] = step.tool if getdefault(loadingContext.do_validate, True): static_checker( @@ -714,6 +791,17 @@ step.visit(op) +def used_by_step(step: MutableMapping[str, Any], shortinputid: str) -> bool: + for st in step["in"]: + if st.get("valueFrom"): + if ("inputs.%s" % shortinputid) in st.get("valueFrom"): + return True + if step.get("when"): + if ("inputs.%s" % shortinputid) in cast(str, step.get("when")): + return True + return False + + class WorkflowStep(Process): def __init__( self, @@ -794,6 +882,7 @@ if not found: if stepfield == "in": param["type"] = "Any" + param["used_by_step"] = used_by_step(self.tool, shortinputid) param["not_connected"] = True else: if isinstance(step_entry, Mapping): @@ -1025,19 +1114,17 @@ while rc.completed < rc.total: made_progress = False for index, step in enumerate(steps): - if ( - getdefault(runtimeContext.on_error, "stop") == "stop" - and rc.processStatus != "success" - ): + if getdefault( + runtimeContext.on_error, "stop" + ) == "stop" and rc.processStatus not in ("success", "skipped"): break if step is None: continue try: for j in step: - if ( - getdefault(runtimeContext.on_error, "stop") == "stop" - and rc.processStatus != "success" - ): + if getdefault( + runtimeContext.on_error, "stop" + ) == "stop" and rc.processStatus not in ("success", "skipped"): break if j is not None: made_progress = True @@ -1083,20 +1170,19 @@ [] ) # type: List[Optional[Generator[Union[ExpressionTool.ExpressionJob, JobBase, CallbackJob, None], None, None]]] for index in range(0, jobl): - sjobo = copy.copy(joborder) + sjobo = copy.copy(joborder) # type: Optional[MutableMapping[str, Any]] + assert sjobo is not None # nosec for key in scatter_keys: sjobo[key] = joborder[key][index] if runtimeContext.postScatterEval is not None: sjobo = runtimeContext.postScatterEval(sjobo) - - steps.append( - process.job( - sjobo, - functools.partial(rc.receive_scatter_output, index), - runtimeContext, - ) - ) + curriedcallback = functools.partial(rc.receive_scatter_output, index) + if sjobo is not None: + steps.append(process.job(sjobo, curriedcallback, runtimeContext)) + else: + curriedcallback({}, "skipped") + steps.append(None) rc.setTotal(jobl, steps) return parallel_steps(steps, rc, runtimeContext) @@ -1123,19 +1209,19 @@ [] ) # type: List[Optional[Generator[Union[ExpressionTool.ExpressionJob, JobBase, CallbackJob, None], None, None]]] for index in range(0, jobl): - sjob = copy.copy(joborder) + sjob = copy.copy(joborder) # type: Optional[MutableMapping[str, Any]] + assert sjob is not None # nosec sjob[scatter_key] = joborder[scatter_key][index] if len(scatter_keys) == 1: if runtimeContext.postScatterEval is not None: sjob = runtimeContext.postScatterEval(sjob) - steps.append( - process.job( - sjob, - functools.partial(rc.receive_scatter_output, index), - runtimeContext, - ) - ) + curriedcallback = functools.partial(rc.receive_scatter_output, index) + if sjob is not None: + steps.append(process.job(sjob, curriedcallback, runtimeContext)) + else: + curriedcallback({}, "skipped") + steps.append(None) else: steps.append( nested_crossproduct_scatter( @@ -1199,19 +1285,19 @@ ) # type: List[Optional[Generator[Union[ExpressionTool.ExpressionJob, JobBase, CallbackJob, None], None, None]]] put = startindex for index in range(0, jobl): - sjob = copy.copy(joborder) + sjob = copy.copy(joborder) # type: Optional[MutableMapping[str, Any]] + assert sjob is not None # nosec sjob[scatter_key] = joborder[scatter_key][index] if len(scatter_keys) == 1: if runtimeContext.postScatterEval is not None: sjob = runtimeContext.postScatterEval(sjob) - steps.append( - process.job( - sjob, - functools.partial(callback.receive_scatter_output, put), - runtimeContext, - ) - ) + curriedcallback = functools.partial(callback.receive_scatter_output, put) + if sjob is not None: + steps.append(process.job(sjob, curriedcallback, runtimeContext)) + else: + curriedcallback({}, "skipped") + steps.append(None) put += 1 else: (add, _) = _flat_crossproduct_scatter( diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool.egg-info/PKG-INFO cwltool-2.0.20200224214940+dfsg/cwltool.egg-info/PKG-INFO --- cwltool-2.0.20200126090152+dfsg/cwltool.egg-info/PKG-INFO 2020-01-27 14:51:56.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool.egg-info/PKG-INFO 2020-02-24 22:00:20.000000000 +0000 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: cwltool -Version: 2.0.20200126090152 +Version: 2.0.20200224214940 Summary: Common workflow language reference implementation Home-page: https://github.com/common-workflow-language/cwltool Author: Common workflow language working group @@ -33,49 +33,49 @@ .. |Downloads| image:: https://pepy.tech/badge/cwltool/month :target: https://pepy.tech/project/cwltool - .. |CommandLineTool Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/command_line_tool.json + .. |CommandLineTool Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/command_line_tool.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |DockerRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/docker.json + .. |DockerRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/docker.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |EnvVarRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/env_var.json + .. |EnvVarRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/env_var.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ExpressionTool Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/expression_tool.json + .. |ExpressionTool Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/expression_tool.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |InitialWorkDirRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/initial_work_dir.json + .. |InitialWorkDirRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/initial_work_dir.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |InlineJavascriptRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/inline_javascript.json + .. |InlineJavascriptRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/inline_javascript.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |MultipleInputRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/multiple_input.json + .. |MultipleInputRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/multiple_input.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |Core Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/required.json + .. |Core Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/required.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ResourceRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/resource.json + .. |ResourceRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/resource.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ScatterRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/scatter.json + .. |ScatterRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/scatter.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |SchemaDefRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/schema_def.json + .. |SchemaDefRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/schema_def.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ShellCommandequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/shell_command.json + .. |ShellCommandequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/shell_command.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |StepInputRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/step_input.json + .. |StepInputRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/step_input.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |SubWorkflowRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/subworkflow.json + .. |SubWorkflowRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/subworkflow.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |Workflow Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/workflow.json + .. |Workflow Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/workflow.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ diff -Nru cwltool-2.0.20200126090152+dfsg/cwltool.egg-info/SOURCES.txt cwltool-2.0.20200224214940+dfsg/cwltool.egg-info/SOURCES.txt --- cwltool-2.0.20200126090152+dfsg/cwltool.egg-info/SOURCES.txt 2020-01-27 14:51:56.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/cwltool.egg-info/SOURCES.txt 2020-02-24 22:00:20.000000000 +0000 @@ -176,6 +176,49 @@ cwltool/schemas/v1.1/salad/schema_salad/metaschema/vocab_res_proc.yml cwltool/schemas/v1.1/salad/schema_salad/metaschema/vocab_res_schema.yml cwltool/schemas/v1.1/salad/schema_salad/metaschema/vocab_res_src.yml +cwltool/schemas/v1.2.0-dev1/CommandLineTool-standalone.yml +cwltool/schemas/v1.2.0-dev1/CommandLineTool.yml +cwltool/schemas/v1.2.0-dev1/CommonWorkflowLanguage.yml +cwltool/schemas/v1.2.0-dev1/Operation.yml +cwltool/schemas/v1.2.0-dev1/Process.yml +cwltool/schemas/v1.2.0-dev1/README.md +cwltool/schemas/v1.2.0-dev1/Workflow.yml +cwltool/schemas/v1.2.0-dev1/concepts.md +cwltool/schemas/v1.2.0-dev1/contrib.md +cwltool/schemas/v1.2.0-dev1/intro.md +cwltool/schemas/v1.2.0-dev1/invocation.md +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_proc.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_schema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/field_name_src.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_proc.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_schema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/ident_res_src.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/import_include.md +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_proc.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_schema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/link_res_src.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_proc.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_schema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/map_res_src.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/metaschema_base.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/salad.md +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_proc.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_schema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/sfdsl_res_src.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_proc.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_schema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/typedsl_res_src.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_proc.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_schema.yml +cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/vocab_res_src.yml tests/2.fasta tests/2.fastq tests/__init__.py @@ -217,6 +260,7 @@ tests/test_deps_mapping.yml tests/test_docker.py tests/test_docker_info.py +tests/test_docker_paths_with_colons.py tests/test_docker_warning.py tests/test_empty_input.py tests/test_examples.py @@ -245,6 +289,8 @@ tests/test_validate_js.py tests/utf_doc_example.cwl tests/util.py +tests/with_doc.cwl +tests/without_doc.cwl tests/checker_wf/broken-wf.cwl tests/checker_wf/broken-wf2.cwl tests/checker_wf/broken-wf3.cwl diff -Nru cwltool-2.0.20200126090152+dfsg/debian/changelog cwltool-2.0.20200224214940+dfsg/debian/changelog --- cwltool-2.0.20200126090152+dfsg/debian/changelog 2020-02-14 12:16:02.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/debian/changelog 2020-02-26 11:50:44.000000000 +0000 @@ -1,3 +1,10 @@ +cwltool (2.0.20200224214940+dfsg-1) unstable; urgency=medium + + * New upstream version + * Standards-Version: 4.5.0 (routine-update) + + -- Michael R. Crusoe Wed, 26 Feb 2020 12:50:44 +0100 + cwltool (2.0.20200126090152+dfsg-1) unstable; urgency=medium * New upstream version diff -Nru cwltool-2.0.20200126090152+dfsg/debian/control cwltool-2.0.20200224214940+dfsg/debian/control --- cwltool-2.0.20200126090152+dfsg/debian/control 2020-01-28 13:00:58.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/debian/control 2020-02-26 11:29:19.000000000 +0000 @@ -28,7 +28,7 @@ python3-prov, nodejs, help2man -Standards-Version: 4.4.1 +Standards-Version: 4.5.0 Vcs-Browser: https://salsa.debian.org/med-team/cwltool Vcs-Git: https://salsa.debian.org/med-team/cwltool.git Homepage: https://www.commonwl.org diff -Nru cwltool-2.0.20200126090152+dfsg/MANIFEST.in cwltool-2.0.20200224214940+dfsg/MANIFEST.in --- cwltool-2.0.20200126090152+dfsg/MANIFEST.in 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/MANIFEST.in 2020-02-24 22:00:06.000000000 +0000 @@ -24,6 +24,10 @@ include cwltool/schemas/v1.1.0-dev1/*.md include cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/*.yml include cwltool/schemas/v1.1.0-dev1/salad/schema_salad/metaschema/*.md +include cwltool/schemas/v1.2.0-dev1/*.yml +include cwltool/schemas/v1.2.0-dev1/*.md +include cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/*.yml +include cwltool/schemas/v1.2.0-dev1/salad/schema_salad/metaschema/*.md include cwltool/cwlNodeEngine.js include cwltool/cwlNodeEngineJSConsole.js include cwltool/cwlNodeEngineWithContext.js diff -Nru cwltool-2.0.20200126090152+dfsg/PKG-INFO cwltool-2.0.20200224214940+dfsg/PKG-INFO --- cwltool-2.0.20200126090152+dfsg/PKG-INFO 2020-01-27 14:51:56.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/PKG-INFO 2020-02-24 22:00:20.000000000 +0000 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: cwltool -Version: 2.0.20200126090152 +Version: 2.0.20200224214940 Summary: Common workflow language reference implementation Home-page: https://github.com/common-workflow-language/cwltool Author: Common workflow language working group @@ -33,49 +33,49 @@ .. |Downloads| image:: https://pepy.tech/badge/cwltool/month :target: https://pepy.tech/project/cwltool - .. |CommandLineTool Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/command_line_tool.json + .. |CommandLineTool Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/command_line_tool.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |DockerRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/docker.json + .. |DockerRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/docker.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |EnvVarRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/env_var.json + .. |EnvVarRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/env_var.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ExpressionTool Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/expression_tool.json + .. |ExpressionTool Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/expression_tool.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |InitialWorkDirRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/initial_work_dir.json + .. |InitialWorkDirRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/initial_work_dir.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |InlineJavascriptRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/inline_javascript.json + .. |InlineJavascriptRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/inline_javascript.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |MultipleInputRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/multiple_input.json + .. |MultipleInputRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/multiple_input.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |Core Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/required.json + .. |Core Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/required.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ResourceRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/resource.json + .. |ResourceRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/resource.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ScatterRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/scatter.json + .. |ScatterRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/scatter.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |SchemaDefRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/schema_def.json + .. |SchemaDefRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/schema_def.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |ShellCommandequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/shell_command.json + .. |ShellCommandequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/shell_command.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |StepInputRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/step_input.json + .. |StepInputRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/step_input.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |SubWorkflowRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/subworkflow.json + .. |SubWorkflowRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/subworkflow.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ - .. |Workflow Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/workflow.json + .. |Workflow Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/workflow.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ diff -Nru cwltool-2.0.20200126090152+dfsg/README.rst cwltool-2.0.20200224214940+dfsg/README.rst --- cwltool-2.0.20200126090152+dfsg/README.rst 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/README.rst 2020-02-24 22:00:06.000000000 +0000 @@ -24,49 +24,49 @@ .. |Downloads| image:: https://pepy.tech/badge/cwltool/month :target: https://pepy.tech/project/cwltool -.. |CommandLineTool Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/command_line_tool.json +.. |CommandLineTool Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/command_line_tool.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |DockerRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/docker.json +.. |DockerRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/docker.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |EnvVarRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/env_var.json +.. |EnvVarRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/env_var.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |ExpressionTool Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/expression_tool.json +.. |ExpressionTool Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/expression_tool.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |InitialWorkDirRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/initial_work_dir.json +.. |InitialWorkDirRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/initial_work_dir.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |InlineJavascriptRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/inline_javascript.json +.. |InlineJavascriptRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/inline_javascript.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |MultipleInputRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/multiple_input.json +.. |MultipleInputRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/multiple_input.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |Core Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/required.json +.. |Core Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/required.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |ResourceRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/resource.json +.. |ResourceRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/resource.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |ScatterRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/scatter.json +.. |ScatterRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/scatter.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |SchemaDefRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/schema_def.json +.. |SchemaDefRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/schema_def.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |ShellCommandequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/shell_command.json +.. |ShellCommandequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/shell_command.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |StepInputRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/step_input.json +.. |StepInputRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/step_input.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |SubWorkflowRequirement Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/subworkflow.json +.. |SubWorkflowRequirement Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/subworkflow.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ -.. |Workflow Support| image:: https://flat.badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/workflow.json +.. |Workflow Support| image:: https://badgen.net/https/raw.githubusercontent.com/common-workflow-language/conformance/master/cwltool/cwl_v1.0/cwltool_latest/workflow.json?icon=commonwl :target: https://ci.commonwl.org/job/cwltool-conformance/ diff -Nru cwltool-2.0.20200126090152+dfsg/setup.cfg cwltool-2.0.20200224214940+dfsg/setup.cfg --- cwltool-2.0.20200126090152+dfsg/setup.cfg 2020-01-27 14:51:56.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/setup.cfg 2020-02-24 22:00:20.000000000 +0000 @@ -10,6 +10,6 @@ testpaths = tests [egg_info] -tag_build = .20200126090152 +tag_build = .20200224214940 tag_date = 0 diff -Nru cwltool-2.0.20200126090152+dfsg/setup.py cwltool-2.0.20200224214940+dfsg/setup.py --- cwltool-2.0.20200126090152+dfsg/setup.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/setup.py 2020-02-24 22:00:06.000000000 +0000 @@ -79,7 +79,7 @@ setup_requires=PYTEST_RUNNER, test_suite="tests", tests_require=[ - "pytest < 4.3.0", + "pytest < 6", "mock >= 2.0.0", "pytest-mock >= 1.10.0", "arcp >= 0.2.0", diff -Nru cwltool-2.0.20200126090152+dfsg/tests/test_docker_paths_with_colons.py cwltool-2.0.20200224214940+dfsg/tests/test_docker_paths_with_colons.py --- cwltool-2.0.20200126090152+dfsg/tests/test_docker_paths_with_colons.py 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/tests/test_docker_paths_with_colons.py 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,35 @@ +import pytest + +from cwltool.docker import DockerCommandLineJob +from cwltool.main import main + +from .util import needs_docker + + +def test_docker_append_volume_read_only(mocker): + mocker.patch("os.mkdir") + runtime = ["runtime"] + characters = ":,\"'" + DockerCommandLineJob.append_volume( + runtime, "/source" + characters, "/target" + characters + ) + assert runtime == [ + "runtime", + "--mount=type=bind," + '"source=/source:,""\'",' + '"target=/target:,""\'",' + "readonly", + ] + + +def test_docker_append_volume_read_write(mocker): + mocker.patch("os.mkdir") + runtime = ["runtime"] + characters = ":,\"'" + DockerCommandLineJob.append_volume( + runtime, "/source" + characters, "/target" + characters, True + ) + assert runtime == [ + "runtime", + "--mount=type=bind," '"source=/source:,""\'",' '"target=/target:,""\'"', + ] diff -Nru cwltool-2.0.20200126090152+dfsg/tests/test_docker.py cwltool-2.0.20200224214940+dfsg/tests/test_docker.py --- cwltool-2.0.20200126090152+dfsg/tests/test_docker.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/tests/test_docker.py 2020-02-24 22:00:06.000000000 +0000 @@ -22,6 +22,7 @@ ] ) assert "completed success" in stderr + assert (tmpdir / "response.txt").read_text("utf-8") == "hello" assert result_code == 0 diff -Nru cwltool-2.0.20200126090152+dfsg/tests/test_toolargparse.py cwltool-2.0.20200224214940+dfsg/tests/test_toolargparse.py --- cwltool-2.0.20200126090152+dfsg/tests/test_toolargparse.py 2020-01-27 14:51:43.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/tests/test_toolargparse.py 2020-02-24 22:00:06.000000000 +0000 @@ -1,3 +1,4 @@ +import argparse import os import sys from io import BytesIO, StringIO @@ -5,7 +6,10 @@ import pytest +from cwltool.argparser import generate_parser +from cwltool.context import LoadingContext import cwltool.executors +from cwltool.load_tool import load_tool from cwltool.main import main from .util import get_data, needs_docker @@ -162,3 +166,21 @@ finally: if script and script.name and os.path.exists(script.name): os.unlink(script.name) + + +def test_argparser_with_doc(): + """The `desription` field is set if `doc` field is provided.""" + loadingContext = LoadingContext() + tool = load_tool(get_data("tests/with_doc.cwl"), loadingContext) + p = argparse.ArgumentParser() + parser = generate_parser(p, tool, {}, [], False) + assert parser.description is not None + + +def test_argparser_without_doc(): + """The `desription` field is None if `doc` field is not provided.""" + loadingContext = LoadingContext() + tool = load_tool(get_data("tests/without_doc.cwl"), loadingContext) + p = argparse.ArgumentParser() + parser = generate_parser(p, tool, {}, [], False) + assert parser.description is None diff -Nru cwltool-2.0.20200126090152+dfsg/tests/with_doc.cwl cwltool-2.0.20200224214940+dfsg/tests/with_doc.cwl --- cwltool-2.0.20200126090152+dfsg/tests/with_doc.cwl 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/tests/with_doc.cwl 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,7 @@ +cwlVersion: v1.0 +class: CommandLineTool +inputs: [] +baseCommand: echo +outputs: [] + +doc: This should be shown in help message diff -Nru cwltool-2.0.20200126090152+dfsg/tests/without_doc.cwl cwltool-2.0.20200224214940+dfsg/tests/without_doc.cwl --- cwltool-2.0.20200126090152+dfsg/tests/without_doc.cwl 1970-01-01 00:00:00.000000000 +0000 +++ cwltool-2.0.20200224214940+dfsg/tests/without_doc.cwl 2020-02-24 22:00:06.000000000 +0000 @@ -0,0 +1,5 @@ +cwlVersion: v1.0 +class: CommandLineTool +inputs: [] +baseCommand: echo +outputs: []