diff --git a/Makefile b/Makefile index dcc4a6ad2..6865cf8e6 100644 --- a/Makefile +++ b/Makefile @@ -44,7 +44,7 @@ help: Makefile ## cleanup : shortcut for "make sort_imports format flake8 diff_pydocstyle_report" cleanup: sort_imports format flake8 diff_pydocstyle_report -## install-dep : install most of the development dependencies via pip +## install-dep : inshttps://github.com/common-workflow-language/cwltool/issues?q=is%3Aissue+is%3Aopen+author%3Atom-tantall most of the development dependencies via pip install-dep: install-dependencies install-dependencies: FORCE @@ -79,18 +79,18 @@ clean: FORCE # Linting and code style related targets ## sort_import : sorting imports using isort: https://github.com/timothycrosley/isort -sort_imports: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) mypy-stubs +sort_imports: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) mypy-stubs isort $^ -remove_unused_imports: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) +remove_unused_imports: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) autoflake --in-place --remove-all-unused-imports $^ pep257: pydocstyle ## pydocstyle : check Python docstring style -pydocstyle: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) +pydocstyle: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) pydocstyle --add-ignore=D100,D101,D102,D103 $^ || true -pydocstyle_report.txt: $(filter-out schema_salad/metaschema.py,$(PYSOURCES)) +pydocstyle_report.txt: $(filter-out $(EXCLUDE_FILES),$(PYSOURCES)) pydocstyle setup.py $^ > $@ 2>&1 || true ## diff_pydocstyle_report : check Python docstring style for changed files only @@ -103,10 +103,10 @@ codespell: ## format : check/fix all code indentation and formatting (runs black) format: - black --force-exclude metaschema.py --exclude _version.py schema_salad setup.py mypy-stubs + black --force-exclude "metaschema.py|schema_salad/tests/cwl_v1*"" --exclude _version.py schema_salad setup.py mypy-stubs format-check: - black --diff --check --force-exclude metaschema.py --exclude _version.py schema_salad setup.py mypy-stubs + black --diff --check --force-exclude "metaschema.py|schema_salad/tests/cwl_v1*" --exclude _version.py setup.py mypy-stubs ## pylint : run static code analysis on Python code pylint: $(PYSOURCES) diff --git a/cwl_v1_2.py b/cwl_v1_2.py new file mode 100644 index 000000000..3ebc174e9 --- /dev/null +++ b/cwl_v1_2.py @@ -0,0 +1,26301 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +import copy +import logging +import os +import pathlib +import re +import tempfile +import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 +import xml.sax # nosec +from abc import ABC, abstractmethod +from io import StringIO +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, + no_type_check, +) +from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit +from urllib.request import pathname2url + +from rdflib import Graph +from rdflib.plugins.parsers.notation3 import BadSyntax +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +from schema_salad.exceptions import SchemaSaladException, ValidationException +from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ + +_vocab: Dict[str, str] = {} +_rvocab: Dict[str, str] = {} + +_logger = logging.getLogger("salad") + + +IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] + +doc_line_info = CommentedMap() + + +class LoadingOptions: + idx: IdxType + fileuri: Optional[str] + baseuri: str + namespaces: MutableMapping[str, str] + schemas: MutableSequence[str] + original_doc: Optional[Any] + addl_metadata: MutableMapping[str, Any] + fetcher: Fetcher + vocab: Dict[str, str] + rvocab: Dict[str, str] + cache: CacheType + imports: List[str] + includes: List[str] + + def __init__( + self, + fetcher: Optional[Fetcher] = None, + namespaces: Optional[Dict[str, str]] = None, + schemas: Optional[List[str]] = None, + fileuri: Optional[str] = None, + copyfrom: Optional["LoadingOptions"] = None, + original_doc: Optional[Any] = None, + addl_metadata: Optional[Dict[str, str]] = None, + baseuri: Optional[str] = None, + idx: Optional[IdxType] = None, + imports: Optional[List[str]] = None, + includes: Optional[List[str]] = None, + ) -> None: + """Create a LoadingOptions object.""" + self.original_doc = original_doc + + if idx is not None: + self.idx = idx + else: + self.idx = copyfrom.idx if copyfrom is not None else {} + + if fileuri is not None: + self.fileuri = fileuri + else: + self.fileuri = copyfrom.fileuri if copyfrom is not None else None + + if baseuri is not None: + self.baseuri = baseuri + else: + self.baseuri = copyfrom.baseuri if copyfrom is not None else "" + + if namespaces is not None: + self.namespaces = namespaces + else: + self.namespaces = copyfrom.namespaces if copyfrom is not None else {} + + if schemas is not None: + self.schemas = schemas + else: + self.schemas = copyfrom.schemas if copyfrom is not None else [] + + if addl_metadata is not None: + self.addl_metadata = addl_metadata + else: + self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} + + if imports is not None: + self.imports = imports + else: + self.imports = copyfrom.imports if copyfrom is not None else [] + + if includes is not None: + self.includes = includes + else: + self.includes = copyfrom.includes if copyfrom is not None else [] + + if fetcher is not None: + self.fetcher = fetcher + elif copyfrom is not None: + self.fetcher = copyfrom.fetcher + else: + import requests + from cachecontrol.caches import FileCache + from cachecontrol.wrapper import CacheControl + + root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) + session = CacheControl( + requests.Session(), + cache=FileCache(root / ".cache" / "salad"), + ) + self.fetcher: Fetcher = DefaultFetcher({}, session) + + self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in namespaces.items(): + self.vocab[k] = v + self.rvocab[v] = k + + @property + def graph(self) -> Graph: + """Generate a merged rdflib.Graph from all entries in self.schemas.""" + graph = Graph() + if not self.schemas: + return graph + key = str(hash(tuple(self.schemas))) + if key in self.cache: + return cast(Graph, self.cache[key]) + for schema in self.schemas: + fetchurl = ( + self.fetcher.urljoin(self.fileuri, schema) + if self.fileuri is not None + else pathlib.Path(schema).resolve().as_uri() + ) + if fetchurl not in self.cache or self.cache[fetchurl] is True: + _logger.debug("Getting external schema %s", fetchurl) + try: + content = self.fetcher.fetch_text(fetchurl) + except Exception as e: + _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) + continue + newGraph = Graph() + err_msg = "unknown error" + for fmt in ["xml", "turtle"]: + try: + newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) + self.cache[fetchurl] = newGraph + graph += newGraph + break + except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: + err_msg = str(e) + else: + _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) + self.cache[key] = graph + return graph + + +class Saveable(ABC): + """Mark classes than have a save() and fromDoc() function.""" + + @classmethod + @abstractmethod + def fromDoc( + cls, + _doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Saveable": + """Construct this object from the result of yaml.load().""" + + @abstractmethod + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, + ) -> CommentedMap: + """Convert this object to a JSON/YAML friendly dictionary.""" + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) + result, metadata = _document_load_by_url( + fieldtype, + url, + loadingOptions, + ) + loadingOptions.imports.append(url) + return result + if "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + val = loadingOptions.fetcher.fetch_text(url) + loadingOptions.includes.append(url) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] + + +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: Dict[Any, Dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: Dict[int, int], + min_col: int = 0, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. + """ + if inserted_line_info is None: + inserted_line_info = {} + + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len, inserted_line_info + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on + line = max_line + + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + shift + if line in inserted_line_info: + line = max_line + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line + shift, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]) + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1, inserted_line_info + + +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc + + +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, Dict[str, int]] = {} + if doc is None: + return {} + if doc.lc.data is None: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 + + +def save( + val: Any, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. + """ + if keys is None: + keys = [] + + doc = iterate_through_doc(keys) + + if isinstance(val, Saveable): + return val.save( + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if isinstance(val, MutableSequence): + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + new_keys = keys + if doc: + if str(i) in doc: + r.lc.data[i] = doc.lc.data[i] + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + ) + return r + + if isinstance(val, MutableMapping): + newdict = CommentedMap() + new_keys = keys + for key in val: + + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + return newdict + if val is None or isinstance(val, (int, float, bool, str)): + return val + raise Exception("Not Saveable: %s" % type(val)) + + +def save_with_metadata( + val: Any, + valLoadingOpts: LoadingOptions, + top: bool = True, + base_url: str = "", + relative_uris: bool = True, +) -> save_type: + """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" + saved_val = save(val, top, base_url, relative_uris) + newdict: MutableMapping[str, Any] = {} + if isinstance(saved_val, MutableSequence): + newdict = {"$graph": saved_val} + elif isinstance(saved_val, MutableMapping): + newdict = saved_val + + if valLoadingOpts.namespaces: + newdict["$namespaces"] = valLoadingOpts.namespaces + if valLoadingOpts.schemas: + newdict["$schemas"] = valLoadingOpts.schemas + if valLoadingOpts.baseuri: + newdict["$base"] = valLoadingOpts.baseuri + for k, v in valLoadingOpts.addl_metadata.items(): + if k not in newdict: + newdict[k] = v + + return newdict + + +def expand_url( + url, # type: str + base_url, # type: str + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> str + if url in ("@id", "@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and ":" in url: + prefix = url.split(":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urlsplit(url) + + if ( + (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) + or url.startswith("$(") + or url.startswith("${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urlsplit(base_url) + frg = "" + if bool(splitbase.fragment): + frg = splitbase.fragment + "/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urlsplit(base_url) + sp = splitbase.fragment.split("/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + "/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException(f"Term {url!r} not in vocabulary") + + return url + + +class _Loader: + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[str], Type[str]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException(f"Expected a list, was {type(doc)}") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field(doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return f"array<{self.items}>" + + +class _EnumLoader(_Loader): + def __init__(self, symbols: Sequence[str], name: str) -> None: + self.symbols = symbols + self.name = name + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if doc in self.symbols: + return doc + raise ValidationException(f"Expected one of {self.symbols}") + + def __repr__(self): # type: () -> str + return self.name + + +class _SecondaryDSLLoader(_Loader): + def __init__(self, inner): + # type: (_Loader) -> None + self.inner = inner + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + r: List[Dict[str, Any]] = [] + if isinstance(doc, MutableSequence): + for d in doc: + if isinstance(d, str): + if d.endswith("?"): + r.append({"pattern": d[:-1], "required": False}) + else: + r.append({"pattern": d}) + elif isinstance(d, dict): + new_dict: Dict[str, Any] = {} + dict_copy = copy.deepcopy(d) + if "pattern" in dict_copy: + new_dict["pattern"] = dict_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {d}" + ) + new_dict["required"] = ( + dict_copy.pop("required") if "required" in dict_copy else None + ) + + if len(dict_copy): + raise ValidationException( + "Unallowed values in secondaryFiles specification entry: {}".format( + dict_copy + ) + ) + r.append(new_dict) + + else: + raise ValidationException( + "Expected a string or sequence of (strings or mappings)." + ) + elif isinstance(doc, MutableMapping): + new_dict = {} + doc_copy = copy.deepcopy(doc) + if "pattern" in doc_copy: + new_dict["pattern"] = doc_copy.pop("pattern") + else: + raise ValidationException( + f"Missing pattern in secondaryFiles specification entry: {doc}" + ) + new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None + + if len(doc_copy): + raise ValidationException( + f"Unallowed values in secondaryFiles specification entry: {doc_copy}" + ) + r.append(new_dict) + + elif isinstance(doc, str): + if doc.endswith("?"): + r.append({"pattern": doc[:-1], "required": False}) + else: + r.append({"pattern": doc}) + else: + raise ValidationException("Expected str or sequence of str") + return self.inner.load(r, baseuri, loadingOptions, docRoot) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Saveable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException(f"Expected a dict, was {type(doc)}") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype.__name__) + + +class _ExpressionLoader(_Loader): + def __init__(self, items: Type[str]) -> None: + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if not isinstance(doc, str): + raise ValidationException(f"Expected a str, was {type(doc)}") + return doc + + +class _UnionLoader(_Loader): + def __init__(self, alternates: Sequence[_Loader]) -> None: + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append(ValidationException(f"tried {t} but", None, [e])) + raise ValidationException("", None, errors, "-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + newdoc = [] + for i in doc: + if isinstance(i, str): + newdoc.append( + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + ) + else: + newdoc.append(i) + doc = newdoc + elif isinstance(doc, str): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: str + baseuri, # type: str + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str] + m = self.typeDSLregex.match(doc) + if m: + group1 = m.group(1) + assert group1 is not None # nosec + first = expand_url(group1, baseuri, loadingOptions, False, True, self.refScope) + second = third = None + if bool(m.group(2)): + second = {"type": "array", "items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = ["null", second or first] + # third = CommentedSeq(["null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, str): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, str): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, str, Union[str, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, str, LoadingOptions, Optional[str]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load( + loader: _Loader, + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], + baseuri: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if isinstance(doc, str): + return _document_load_by_url( + loader, + loadingOptions.fetcher.urljoin(baseuri, doc), + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + if isinstance(doc, MutableMapping): + addl_metadata = {} + if addl_metadata_fields is not None: + for mf in addl_metadata_fields: + if mf in doc: + addl_metadata[mf] = doc[mf] + + docuri = baseuri + if "$base" in doc: + baseuri = doc["$base"] + + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, + namespaces=doc.get("$namespaces", None), + schemas=doc.get("$schemas", None), + baseuri=doc.get("$base", None), + addl_metadata=addl_metadata, + ) + + doc = copy.copy(doc) + if "$namespaces" in doc: + doc.pop("$namespaces") + if "$schemas" in doc: + doc.pop("$schemas") + if "$base" in doc: + doc.pop("$base") + + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + + if "$graph" in doc: + loadingOptions.idx[baseuri] = ( + loader.load(doc["$graph"], baseuri, loadingOptions), + loadingOptions, + ) + else: + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), + loadingOptions, + ) + + if docuri != baseuri: + loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] + + return loadingOptions.idx[baseuri] + if isinstance(doc, MutableSequence): + loadingOptions.idx[baseuri] = ( + loader.load(doc, baseuri, loadingOptions), + loadingOptions, + ) + return loadingOptions.idx[baseuri] + + raise ValidationException( + "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) + ) + + +def _document_load_by_url( + loader: _Loader, + url: str, + loadingOptions: LoadingOptions, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Tuple[Any, LoadingOptions]: + if url in loadingOptions.idx: + return loadingOptions.idx[url] + + doc_url, frg = urldefrag(url) + + text = loadingOptions.fetcher.fetch_text(doc_url) + textIO = StringIO(text) + textIO.name = str(doc_url) + yaml = yaml_no_ts() + result = yaml.load(textIO) + add_lc_filename(result, doc_url) + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) + + _document_load( + loader, + result, + doc_url, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + return loadingOptions.idx[url] + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = pathname2url(str(pathsp[0])) + else: + urlpath = pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return f"file:{urlpath}{frag}" + return f"file://{urlpath}{frag}" + + +def prefix_url(url: str, namespaces: Dict[str, str]) -> str: + """Expand short forms into full URLs using the given namespace dictionary.""" + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri( + uri: Any, + base_url: str, + scoped_id: bool, + ref_scope: Optional[int], + relative_uris: bool, +) -> Any: + """Convert any URI to a relative one, obeying the scoping rules.""" + if isinstance(uri, MutableSequence): + return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] + elif isinstance(uri, str): + if not relative_uris or uri == base_url: + return uri + urisplit = urlsplit(uri) + basesplit = urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) + + +def shortname(inputid: str) -> str: + """ + Compute the shortname of a fully qualified identifier. + + See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. + """ + parsed_id = urlparse(inputid) + if parsed_id.fragment: + return parsed_id.fragment.split("/")[-1] + return parsed_id.path.split("/")[-1] + + +def parser_info() -> str: + return "org.w3id.cwl.v1_2" + + +class Documented(Saveable): + pass + + +class RecordField(Documented): + """ + A field of a record. + """ + + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.doc, self.name, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["doc", "name", "type"]) + + +class RecordSchema(Saveable): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RecordSchema): + return bool(self.fields == other.fields and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "RecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'RecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type"]) + + +class EnumSchema(Saveable): + """ + Define an enumerated type. + + """ + + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type"]) + + +class ArraySchema(Saveable): + def __init__( + self, + items: Any, + type: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ArraySchema): + return bool(self.items == other.items and self.type == other.type) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + items = load_field( + _doc.get("items"), + uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, base_url, False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type"]) + + +class File(Saveable): + """ + Represents a file (or group of files when `secondaryFiles` is provided) that + will be accessible by tools using standard POSIX file system call API such as + open(2) and read(2). + + Files are represented as objects with `class` of `File`. File objects have + a number of properties that provide metadata about the file. + + The `location` property of a File is a IRI that uniquely identifies the + file. Implementations must support the `file://` IRI scheme and may support + other schemes such as `http://` and `https://`. The value of `location` may also be a + relative reference, in which case it must be resolved relative to the IRI + of the document it appears in. Alternately to `location`, implementations + must also accept the `path` property on File, which must be a filesystem + path available on the same host as the CWL runner (for inputs) or the + runtime environment of a command line tool execution (for command line tool + outputs). + + If no `location` or `path` is specified, a file object must specify + `contents` with the UTF-8 text content of the file. This is a "file + literal". File literals do not correspond to external resources, but are + created on disk with `contents` with when needed for executing a tool. + Where appropriate, expressions can return file literals to define new files + on a runtime. The maximum size of `contents` is 64 kilobytes. + + The `basename` property defines the filename on disk where the file is + staged. This may differ from the resource name. If not provided, + `basename` must be computed from the last path part of `location` and made + available to expressions. + + The `secondaryFiles` property is a list of File or Directory objects that + must be staged in the same directory as the primary file. It is an error + for file names to be duplicated in `secondaryFiles`. + + The `size` property is the size in bytes of the File. It must be computed + from the resource and made available to expressions. The `checksum` field + contains a cryptographic hash of the file content for use it verifying file + contents. Implementations may, at user option, enable or disable + computation of the `checksum` field for performance or other reasons. + However, the ability to compute output checksums is required to pass the + CWL conformance test suite. + + When executing a CommandLineTool, the files and secondary files may be + staged to an arbitrary directory, but must use the value of `basename` for + the filename. The `path` property must be file path in the context of the + tool execution runtime (local to the compute node, or within the executing + container). All computed properties should be available to expressions. + File literals also must be staged and `path` must be set. + + When collecting CommandLineTool outputs, `glob` matching returns file paths + (with the `path` property) and the derived properties. This can all be + modified by `outputEval`. Alternately, if the file `cwl.output.json` is + present in the output, `outputBinding` is ignored. + + File objects in the output must provide either a `location` IRI or a `path` + property in the context of the tool execution runtime (local to the compute + node, or within the executing container). + + When evaluating an ExpressionTool, file objects must be referenced via + `location` (the expression tool does not have access to files on disk so + `path` is meaningless) or as file literals. It is legal to return a file + object with an existing `location` but a different `basename`. The + `loadContents` field of ExpressionTool inputs behaves the same as on + CommandLineTool inputs, however it is not meaningful on the outputs. + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + dirname: Optional[Any] = None, + nameroot: Optional[Any] = None, + nameext: Optional[Any] = None, + checksum: Optional[Any] = None, + size: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + format: Optional[Any] = None, + contents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "File" + self.location = location + self.path = path + self.basename = basename + self.dirname = dirname + self.nameroot = nameroot + self.nameext = nameext + self.checksum = checksum + self.size = size + self.secondaryFiles = secondaryFiles + self.format = format + self.contents = contents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, File): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.dirname == other.dirname + and self.nameroot == other.nameroot + and self.nameext == other.nameext + and self.checksum == other.checksum + and self.size == other.size + and self.secondaryFiles == other.secondaryFiles + and self.format == other.format + and self.contents == other.contents + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.location, + self.path, + self.basename, + self.dirname, + self.nameroot, + self.nameext, + self.checksum, + self.size, + self.secondaryFiles, + self.format, + self.contents, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "File": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "File": + raise ValidationException("Not a File") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'location' field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'path' field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'basename' field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "dirname" in _doc: + try: + dirname = load_field( + _doc.get("dirname"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dirname' field is not valid because:", + SourceLine(_doc, "dirname", str), + [e], + ) + ) + else: + dirname = None + if "nameroot" in _doc: + try: + nameroot = load_field( + _doc.get("nameroot"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'nameroot' field is not valid because:", + SourceLine(_doc, "nameroot", str), + [e], + ) + ) + else: + nameroot = None + if "nameext" in _doc: + try: + nameext = load_field( + _doc.get("nameext"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'nameext' field is not valid because:", + SourceLine(_doc, "nameext", str), + [e], + ) + ) + else: + nameext = None + if "checksum" in _doc: + try: + checksum = load_field( + _doc.get("checksum"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'checksum' field is not valid because:", + SourceLine(_doc, "checksum", str), + [e], + ) + ) + else: + checksum = None + if "size" in _doc: + try: + size = load_field( + _doc.get("size"), + union_of_None_type_or_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'size' field is not valid because:", + SourceLine(_doc, "size", str), + [e], + ) + ) + else: + size = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "contents" in _doc: + try: + contents = load_field( + _doc.get("contents"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'contents' field is not valid because:", + SourceLine(_doc, "contents", str), + [e], + ) + ) + else: + contents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `dirname`, `nameroot`, `nameext`, `checksum`, `size`, `secondaryFiles`, `format`, `contents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'File'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + dirname=dirname, + nameroot=nameroot, + nameext=nameext, + checksum=checksum, + size=size, + secondaryFiles=secondaryFiles, + format=format, + contents=contents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "File" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dirname is not None and "dirname" not in r: + r["dirname"] = save( + self.dirname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dirname", + val=r.get("dirname"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.nameroot is not None and "nameroot" not in r: + r["nameroot"] = save( + self.nameroot, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameroot", + val=r.get("nameroot"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.nameext is not None and "nameext" not in r: + r["nameext"] = save( + self.nameext, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="nameext", + val=r.get("nameext"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.checksum is not None and "checksum" not in r: + r["checksum"] = save( + self.checksum, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="checksum", + val=r.get("checksum"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.size is not None and "size" not in r: + r["size"] = save( + self.size, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="size", + val=r.get("size"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, base_url, True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.contents is not None and "contents" not in r: + r["contents"] = save( + self.contents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="contents", + val=r.get("contents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "location", + "path", + "basename", + "dirname", + "nameroot", + "nameext", + "checksum", + "size", + "secondaryFiles", + "format", + "contents", + ] + ) + + +class Directory(Saveable): + """ + Represents a directory to present to a command line tool. + + Directories are represented as objects with `class` of `Directory`. Directory objects have + a number of properties that provide metadata about the directory. + + The `location` property of a Directory is a IRI that uniquely identifies + the directory. Implementations must support the file:// IRI scheme and may + support other schemes such as http://. Alternately to `location`, + implementations must also accept the `path` property on Directory, which + must be a filesystem path available on the same host as the CWL runner (for + inputs) or the runtime environment of a command line tool execution (for + command line tool outputs). + + A Directory object may have a `listing` field. This is a list of File and + Directory objects that are contained in the Directory. For each entry in + `listing`, the `basename` property defines the name of the File or + Subdirectory when staged to disk. If `listing` is not provided, the + implementation must have some way of fetching the Directory listing at + runtime based on the `location` field. + + If a Directory does not have `location`, it is a Directory literal. A + Directory literal must provide `listing`. Directory literals must be + created on disk at runtime as needed. + + The resources in a Directory literal do not need to have any implied + relationship in their `location`. For example, a Directory listing may + contain two files located on different hosts. It is the responsibility of + the runtime to ensure that those files are staged to disk appropriately. + Secondary files associated with files in `listing` must also be staged to + the same Directory. + + When executing a CommandLineTool, Directories must be recursively staged + first and have local values of `path` assigned. + + Directory objects in CommandLineTool output must provide either a + `location` IRI or a `path` property in the context of the tool execution + runtime (local to the compute node, or within the executing container). + + An ExpressionTool may forward file references from input to output by using + the same value for `location`. + + Name conflicts (the same `basename` appearing multiple times in `listing` + or in any entry in `secondaryFiles` in the listing) is a fatal error. + + """ + + def __init__( + self, + location: Optional[Any] = None, + path: Optional[Any] = None, + basename: Optional[Any] = None, + listing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "Directory" + self.location = location + self.path = path + self.basename = basename + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Directory): + return bool( + self.class_ == other.class_ + and self.location == other.location + and self.path == other.path + and self.basename == other.basename + and self.listing == other.listing + ) + return False + + def __hash__(self) -> int: + return hash( + (self.class_, self.location, self.path, self.basename, self.listing) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Directory": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Directory": + raise ValidationException("Not a Directory") + + if "location" in _doc: + try: + location = load_field( + _doc.get("location"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'location' field is not valid because:", + SourceLine(_doc, "location", str), + [e], + ) + ) + else: + location = None + if "path" in _doc: + try: + path = load_field( + _doc.get("path"), + uri_union_of_None_type_or_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'path' field is not valid because:", + SourceLine(_doc, "path", str), + [e], + ) + ) + else: + path = None + if "basename" in _doc: + try: + basename = load_field( + _doc.get("basename"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'basename' field is not valid because:", + SourceLine(_doc, "basename", str), + [e], + ) + ) + else: + basename = None + if "listing" in _doc: + try: + listing = load_field( + _doc.get("listing"), + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'listing' field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + else: + listing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `location`, `path`, `basename`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Directory'", None, _errors__) + _constructed = cls( + location=location, + path=path, + basename=basename, + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Directory" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.location is not None and "location" not in r: + u = save_relative_uri(self.location, base_url, False, None, relative_uris) + r["location"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="location", + val=r.get("location"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.path is not None and "path" not in r: + u = save_relative_uri(self.path, base_url, False, None, relative_uris) + r["path"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="path", + val=r.get("path"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.basename is not None and "basename" not in r: + r["basename"] = save( + self.basename, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="basename", + val=r.get("basename"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "location", "path", "basename", "listing"]) + + +class Labeled(Saveable): + pass + + +class Identified(Saveable): + pass + + +class LoadContents(Saveable): + pass + + +class FieldBase(Labeled): + pass + + +class InputFormat(Saveable): + pass + + +class OutputFormat(Saveable): + pass + + +class Parameter(FieldBase, Documented, Identified): + """ + Define an input or output parameter to a process. + + """ + + pass + + +class InputBinding(Saveable): + def __init__( + self, + loadContents: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputBinding): + return bool(self.loadContents == other.loadContents) + return False + + def __hash__(self) -> int: + return hash((self.loadContents)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents"]) + + +class IOSchema(Labeled, Documented): + pass + + +class InputSchema(IOSchema): + pass + + +class OutputSchema(IOSchema): + pass + + +class InputRecordField(RecordField, FieldBase, InputFormat, LoadContents): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + ] + ) + + +class InputRecordSchema(RecordSchema, InputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class InputEnumSchema(EnumSchema, InputSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputEnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class InputArraySchema(ArraySchema, InputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'InputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class OutputRecordField(RecordField, FieldBase, OutputFormat): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordField'", None, _errors__) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["doc", "name", "type", "label", "secondaryFiles", "streamable", "format"] + ) + + +class OutputRecordSchema(RecordSchema, OutputSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputRecordSchema'", None, _errors__) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class OutputEnumSchema(EnumSchema, OutputSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputEnumSchema'", None, _errors__) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class OutputArraySchema(ArraySchema, OutputSchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'OutputArraySchema'", None, _errors__) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class InputParameter(Parameter, InputFormat, LoadContents): + pass + + +class OutputParameter(Parameter, OutputFormat): + pass + + +class ProcessRequirement(Saveable): + """ + A process requirement declares a prerequisite that may or must be fulfilled + before executing a process. See [`Process.hints`](#process) and + [`Process.requirements`](#process). + + Process requirements are the primary mechanism for specifying extensions to + the CWL core specification. + + """ + + pass + + +class Process(Identified, Labeled, Documented): + """ + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + """ + + pass + + +class InlineJavascriptRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support inline Javascript expressions. + If this requirement is not present, the workflow platform must not perform expression + interpolation. + + """ + + def __init__( + self, + expressionLib: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InlineJavascriptRequirement" + self.expressionLib = expressionLib + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InlineJavascriptRequirement): + return bool( + self.class_ == other.class_ + and self.expressionLib == other.expressionLib + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.expressionLib)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InlineJavascriptRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InlineJavascriptRequirement": + raise ValidationException("Not a InlineJavascriptRequirement") + + if "expressionLib" in _doc: + try: + expressionLib = load_field( + _doc.get("expressionLib"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'expressionLib' field is not valid because:", + SourceLine(_doc, "expressionLib", str), + [e], + ) + ) + else: + expressionLib = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `expressionLib`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InlineJavascriptRequirement'", None, _errors__ + ) + _constructed = cls( + expressionLib=expressionLib, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InlineJavascriptRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.expressionLib is not None and "expressionLib" not in r: + r["expressionLib"] = save( + self.expressionLib, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expressionLib", + val=r.get("expressionLib"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "expressionLib"]) + + +class CommandInputSchema(Saveable): + pass + + +class SchemaDefRequirement(ProcessRequirement): + """ + This field consists of an array of type definitions which must be used when + interpreting the `inputs` and `outputs` fields. When a `type` field + contains a IRI, the implementation must check if the type is defined in + `schemaDefs` and use that definition. If the type is not found in + `schemaDefs`, it is an error. The entries in `schemaDefs` must be + processed in the order listed such that later schema definitions may refer + to earlier schema definitions. + + - **Type definitions are allowed for `enum` and `record` types only.** + - Type definitions may be shared by defining them in a file and then + `$include`-ing them in the `types` field. + - A file can contain a list of type definitions + + """ + + def __init__( + self, + types: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SchemaDefRequirement" + self.types = types + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SchemaDefRequirement): + return bool(self.class_ == other.class_ and self.types == other.types) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.types)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SchemaDefRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SchemaDefRequirement": + raise ValidationException("Not a SchemaDefRequirement") + + try: + types = load_field( + _doc.get("types"), + array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'types' field is not valid because:", + SourceLine(_doc, "types", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `types`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SchemaDefRequirement'", None, _errors__) + _constructed = cls( + types=types, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SchemaDefRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.types is not None and "types" not in r: + r["types"] = save( + self.types, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="types", + val=r.get("types"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "types"]) + + +class SecondaryFileSchema(Saveable): + """ + Secondary files are specified using the following micro-DSL for secondary files: + + * If the value is a string, it is transformed to an object with two fields + `pattern` and `required` + * By default, the value of `required` is `null` + (this indicates default behavior, which may be based on the context) + * If the value ends with a question mark `?` the question mark is + stripped off and the value of the field `required` is set to `False` + * The remaining value is assigned to the field `pattern` + + For implementation details and examples, please see + [this section](SchemaSalad.html#Domain_Specific_Language_for_secondary_files) + in the Schema Salad specification. + + """ + + def __init__( + self, + pattern: Any, + required: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.pattern = pattern + self.required = required + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SecondaryFileSchema): + return bool( + self.pattern == other.pattern and self.required == other.required + ) + return False + + def __hash__(self) -> int: + return hash((self.pattern, self.required)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SecondaryFileSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + pattern = load_field( + _doc.get("pattern"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'pattern' field is not valid because:", + SourceLine(_doc, "pattern", str), + [e], + ) + ) + if "required" in _doc: + try: + required = load_field( + _doc.get("required"), + union_of_None_type_or_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'required' field is not valid because:", + SourceLine(_doc, "required", str), + [e], + ) + ) + else: + required = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `pattern`, `required`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SecondaryFileSchema'", None, _errors__) + _constructed = cls( + pattern=pattern, + required=required, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.pattern is not None and "pattern" not in r: + r["pattern"] = save( + self.pattern, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pattern", + val=r.get("pattern"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.required is not None and "required" not in r: + r["required"] = save( + self.required, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="required", + val=r.get("required"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["pattern", "required"]) + + +class LoadListingRequirement(ProcessRequirement): + """ + Specify the desired behavior for loading the `listing` field of + a Directory object for use by expressions. + + """ + + def __init__( + self, + loadListing: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "LoadListingRequirement" + self.loadListing = loadListing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, LoadListingRequirement): + return bool( + self.class_ == other.class_ and self.loadListing == other.loadListing + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.loadListing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "LoadListingRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "LoadListingRequirement": + raise ValidationException("Not a LoadListingRequirement") + + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `loadListing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'LoadListingRequirement'", None, _errors__ + ) + _constructed = cls( + loadListing=loadListing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "LoadListingRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "loadListing"]) + + +class EnvironmentDef(Saveable): + """ + Define an environment variable that will be set in the runtime environment + by the workflow platform when executing the command line tool. May be the + result of executing an expression, such as getting a parameter from input. + + """ + + def __init__( + self, + envName: Any, + envValue: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.envName = envName + self.envValue = envValue + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvironmentDef): + return bool( + self.envName == other.envName and self.envValue == other.envValue + ) + return False + + def __hash__(self) -> int: + return hash((self.envName, self.envValue)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvironmentDef": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + envName = load_field( + _doc.get("envName"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'envName' field is not valid because:", + SourceLine(_doc, "envName", str), + [e], + ) + ) + try: + envValue = load_field( + _doc.get("envValue"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'envValue' field is not valid because:", + SourceLine(_doc, "envValue", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `envName`, `envValue`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvironmentDef'", None, _errors__) + _constructed = cls( + envName=envName, + envValue=envValue, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.envName is not None and "envName" not in r: + r["envName"] = save( + self.envName, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envName", + val=r.get("envName"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.envValue is not None and "envValue" not in r: + r["envValue"] = save( + self.envValue, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envValue", + val=r.get("envValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["envName", "envValue"]) + + +class CommandLineBinding(InputBinding): + """ + + When listed under `inputBinding` in the input schema, the term + "value" refers to the corresponding value in the input object. For + binding objects listed in `CommandLineTool.arguments`, the term "value" + refers to the effective value after evaluating `valueFrom`. + + The binding behavior when building the command line depends on the data + type of the value. If there is a mismatch between the type described by + the input schema and the effective value, such as resulting from an + expression evaluation, an implementation must use the data type of the + effective value. + + - **string**: Add `prefix` and the string to the command line. + + - **number**: Add `prefix` and decimal representation to command line. + + - **boolean**: If true, add `prefix` to the command line. If false, add + nothing. + + - **File**: Add `prefix` and the value of + [`File.path`](#File) to the command line. + + - **Directory**: Add `prefix` and the value of + [`Directory.path`](#Directory) to the command line. + + - **array**: If `itemSeparator` is specified, add `prefix` and the join + the array into a single string with `itemSeparator` separating the + items. Otherwise, first add `prefix`, then recursively process + individual elements. + If the array is empty, it does not add anything to command line. + + - **object**: Add `prefix` only, and recursively add object fields for + which `inputBinding` is specified. + + - **null**: Add nothing. + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + position: Optional[Any] = None, + prefix: Optional[Any] = None, + separate: Optional[Any] = None, + itemSeparator: Optional[Any] = None, + valueFrom: Optional[Any] = None, + shellQuote: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.position = position + self.prefix = prefix + self.separate = separate + self.itemSeparator = itemSeparator + self.valueFrom = valueFrom + self.shellQuote = shellQuote + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBinding): + return bool( + self.loadContents == other.loadContents + and self.position == other.position + and self.prefix == other.prefix + and self.separate == other.separate + and self.itemSeparator == other.itemSeparator + and self.valueFrom == other.valueFrom + and self.shellQuote == other.shellQuote + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.loadContents, + self.position, + self.prefix, + self.separate, + self.itemSeparator, + self.valueFrom, + self.shellQuote, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "position" in _doc: + try: + position = load_field( + _doc.get("position"), + union_of_None_type_or_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'position' field is not valid because:", + SourceLine(_doc, "position", str), + [e], + ) + ) + else: + position = None + if "prefix" in _doc: + try: + prefix = load_field( + _doc.get("prefix"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'prefix' field is not valid because:", + SourceLine(_doc, "prefix", str), + [e], + ) + ) + else: + prefix = None + if "separate" in _doc: + try: + separate = load_field( + _doc.get("separate"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'separate' field is not valid because:", + SourceLine(_doc, "separate", str), + [e], + ) + ) + else: + separate = None + if "itemSeparator" in _doc: + try: + itemSeparator = load_field( + _doc.get("itemSeparator"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'itemSeparator' field is not valid because:", + SourceLine(_doc, "itemSeparator", str), + [e], + ) + ) + else: + itemSeparator = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'valueFrom' field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + if "shellQuote" in _doc: + try: + shellQuote = load_field( + _doc.get("shellQuote"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'shellQuote' field is not valid because:", + SourceLine(_doc, "shellQuote", str), + [e], + ) + ) + else: + shellQuote = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `position`, `prefix`, `separate`, `itemSeparator`, `valueFrom`, `shellQuote`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + position=position, + prefix=prefix, + separate=separate, + itemSeparator=itemSeparator, + valueFrom=valueFrom, + shellQuote=shellQuote, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.position is not None and "position" not in r: + r["position"] = save( + self.position, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="position", + val=r.get("position"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.prefix is not None and "prefix" not in r: + r["prefix"] = save( + self.prefix, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="prefix", + val=r.get("prefix"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.separate is not None and "separate" not in r: + r["separate"] = save( + self.separate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="separate", + val=r.get("separate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.itemSeparator is not None and "itemSeparator" not in r: + r["itemSeparator"] = save( + self.itemSeparator, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="itemSeparator", + val=r.get("itemSeparator"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.shellQuote is not None and "shellQuote" not in r: + r["shellQuote"] = save( + self.shellQuote, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="shellQuote", + val=r.get("shellQuote"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "loadContents", + "position", + "prefix", + "separate", + "itemSeparator", + "valueFrom", + "shellQuote", + ] + ) + + +class CommandOutputBinding(LoadContents): + """ + Describes how to generate an output parameter based on the files produced + by a CommandLineTool. + + The output parameter value is generated by applying these operations in the + following order: + + - glob + - loadContents + - outputEval + - secondaryFiles + + """ + + def __init__( + self, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + glob: Optional[Any] = None, + outputEval: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.loadContents = loadContents + self.loadListing = loadListing + self.glob = glob + self.outputEval = outputEval + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputBinding): + return bool( + self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.glob == other.glob + and self.outputEval == other.outputEval + ) + return False + + def __hash__(self) -> int: + return hash((self.loadContents, self.loadListing, self.glob, self.outputEval)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputBinding": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "glob" in _doc: + try: + glob = load_field( + _doc.get("glob"), + union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'glob' field is not valid because:", + SourceLine(_doc, "glob", str), + [e], + ) + ) + else: + glob = None + if "outputEval" in _doc: + try: + outputEval = load_field( + _doc.get("outputEval"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputEval' field is not valid because:", + SourceLine(_doc, "outputEval", str), + [e], + ) + ) + else: + outputEval = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `loadContents`, `loadListing`, `glob`, `outputEval`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandOutputBinding'", None, _errors__) + _constructed = cls( + loadContents=loadContents, + loadListing=loadListing, + glob=glob, + outputEval=outputEval, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.glob is not None and "glob" not in r: + r["glob"] = save( + self.glob, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="glob", + val=r.get("glob"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputEval is not None and "outputEval" not in r: + r["outputEval"] = save( + self.outputEval, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputEval", + val=r.get("outputEval"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["loadContents", "loadListing", "glob", "outputEval"]) + + +class CommandLineBindable(Saveable): + def __init__( + self, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineBindable): + return bool(self.inputBinding == other.inputBinding) + return False + + def __hash__(self) -> int: + return hash((self.inputBinding)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineBindable": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineBindable'", None, _errors__) + _constructed = cls( + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["inputBinding"]) + + +class CommandInputRecordField(InputRecordField, CommandLineBindable): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.loadContents, + self.loadListing, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `loadContents`, `loadListing`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + loadContents=loadContents, + loadListing=loadListing, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "loadContents", + "loadListing", + "inputBinding", + ] + ) + + +class CommandInputRecordSchema( + InputRecordSchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.fields, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandInputEnumSchema(InputEnumSchema, CommandInputSchema, CommandLineBindable): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.name, + self.symbols, + self.type, + self.label, + self.doc, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputEnumSchema'", None, _errors__ + ) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc", "inputBinding"]) + + +class CommandInputArraySchema( + InputArraySchema, CommandInputSchema, CommandLineBindable +): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + (self.items, self.type, self.label, self.doc, self.name, self.inputBinding) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandInputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name", "inputBinding"]) + + +class CommandOutputRecordField(OutputRecordField): + def __init__( + self, + name: Any, + type: Any, + doc: Optional[Any] = None, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.format = format + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordField): + return bool( + self.doc == other.doc + and self.name == other.name + and self.type == other.type + and self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.format == other.format + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.doc, + self.name, + self.type, + self.label, + self.secondaryFiles, + self.streamable, + self.format, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordField": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + if not __original_name_is_none: + baseuri = name + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputBinding' field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `doc`, `name`, `type`, `label`, `secondaryFiles`, `streamable`, `format`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordField'", None, _errors__ + ) + _constructed = cls( + doc=doc, + name=name, + type=type, + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + format=format, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri( + self.format, str(self.name), True, None, relative_uris + ) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "doc", + "name", + "type", + "label", + "secondaryFiles", + "streamable", + "format", + "outputBinding", + ] + ) + + +class CommandOutputRecordSchema(OutputRecordSchema): + def __init__( + self, + type: Any, + fields: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputRecordSchema): + return bool( + self.fields == other.fields + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.fields, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputRecordSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + if "fields" in _doc: + try: + fields = load_field( + _doc.get("fields"), + idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'fields' field is not valid because:", + SourceLine(_doc, "fields", str), + [e], + ) + ) + else: + fields = None + try: + type = load_field( + _doc.get("type"), + typedsl_Record_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `fields`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputRecordSchema'", None, _errors__ + ) + _constructed = cls( + fields=fields, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.fields is not None and "fields" not in r: + r["fields"] = save( + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["fields", "type", "label", "doc", "name"]) + + +class CommandOutputEnumSchema(OutputEnumSchema): + def __init__( + self, + symbols: Any, + type: Any, + name: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.name = name + self.symbols = symbols + self.type = type + self.label = label + self.doc = doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputEnumSchema): + return bool( + self.name == other.name + and self.symbols == other.symbols + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + ) + return False + + def __hash__(self) -> int: + return hash((self.name, self.symbols, self.type, self.label, self.doc)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputEnumSchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + symbols = load_field( + _doc.get("symbols"), + uri_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'symbols' field is not valid because:", + SourceLine(_doc, "symbols", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Enum_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `name`, `symbols`, `type`, `label`, `doc`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputEnumSchema'", None, _errors__ + ) + _constructed = cls( + name=name, + symbols=symbols, + type=type, + label=label, + doc=doc, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) + r["symbols"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["name", "symbols", "type", "label", "doc"]) + + +class CommandOutputArraySchema(OutputArraySchema): + def __init__( + self, + items: Any, + type: Any, + label: Optional[Any] = None, + doc: Optional[Any] = None, + name: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + self.label = label + self.doc = doc + self.name = name + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputArraySchema): + return bool( + self.items == other.items + and self.type == other.type + and self.label == other.label + and self.doc == other.doc + and self.name == other.name + ) + return False + + def __hash__(self) -> int: + return hash((self.items, self.type, self.label, self.doc, self.name)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputArraySchema": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "name" in _doc: + try: + name = load_field( + _doc.get("name"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'name' field is not valid because:", + SourceLine(_doc, "name", str), + [e], + ) + ) + else: + name = None + + __original_name_is_none = name is None + if name is None: + if docRoot is not None: + name = docRoot + else: + name = "_:" + str(_uuid__.uuid4()) + if not __original_name_is_none: + baseuri = name + try: + items = load_field( + _doc.get("items"), + uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'items' field is not valid because:", + SourceLine(_doc, "items", str), + [e], + ) + ) + try: + type = load_field( + _doc.get("type"), + typedsl_Array_nameLoader_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `items`, `type`, `label`, `doc`, `name`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputArraySchema'", None, _errors__ + ) + _constructed = cls( + items=items, + type=type, + label=label, + doc=doc, + name=name, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[name] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: + u = save_relative_uri(self.name, base_url, True, None, relative_uris) + r["name"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.items is not None and "items" not in r: + u = save_relative_uri(self.items, str(self.name), False, 2, relative_uris) + r["items"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["items", "type", "label", "doc", "name"]) + + +class CommandInputParameter(InputParameter): + """ + An input parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandInputParameter'", None, _errors__) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class CommandOutputParameter(OutputParameter): + """ + An output parameter for a CommandLineTool. + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + self.outputBinding = outputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + and self.outputBinding == other.outputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + self.outputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "outputBinding" in _doc: + try: + outputBinding = load_field( + _doc.get("outputBinding"), + union_of_None_type_or_CommandOutputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputBinding' field is not valid because:", + SourceLine(_doc, "outputBinding", str), + [e], + ) + ) + else: + outputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`, `outputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'CommandOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + outputBinding=outputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputBinding is not None and "outputBinding" not in r: + r["outputBinding"] = save( + self.outputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputBinding", + val=r.get("outputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "type", + "outputBinding", + ] + ) + + +class CommandLineTool(Process): + """ + This defines the schema of the CWL Command Line Tool Description document. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + baseCommand: Optional[Any] = None, + arguments: Optional[Any] = None, + stdin: Optional[Any] = None, + stderr: Optional[Any] = None, + stdout: Optional[Any] = None, + successCodes: Optional[Any] = None, + temporaryFailCodes: Optional[Any] = None, + permanentFailCodes: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "CommandLineTool" + self.baseCommand = baseCommand + self.arguments = arguments + self.stdin = stdin + self.stderr = stderr + self.stdout = stdout + self.successCodes = successCodes + self.temporaryFailCodes = temporaryFailCodes + self.permanentFailCodes = permanentFailCodes + + def __eq__(self, other: Any) -> bool: + if isinstance(other, CommandLineTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.baseCommand == other.baseCommand + and self.arguments == other.arguments + and self.stdin == other.stdin + and self.stderr == other.stderr + and self.stdout == other.stdout + and self.successCodes == other.successCodes + and self.temporaryFailCodes == other.temporaryFailCodes + and self.permanentFailCodes == other.permanentFailCodes + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.baseCommand, + self.arguments, + self.stdin, + self.stderr, + self.stdout, + self.successCodes, + self.temporaryFailCodes, + self.permanentFailCodes, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "CommandLineTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "CommandLineTool": + raise ValidationException("Not a CommandLineTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_CommandInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_CommandOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + if "baseCommand" in _doc: + try: + baseCommand = load_field( + _doc.get("baseCommand"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'baseCommand' field is not valid because:", + SourceLine(_doc, "baseCommand", str), + [e], + ) + ) + else: + baseCommand = None + if "arguments" in _doc: + try: + arguments = load_field( + _doc.get("arguments"), + union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'arguments' field is not valid because:", + SourceLine(_doc, "arguments", str), + [e], + ) + ) + else: + arguments = None + if "stdin" in _doc: + try: + stdin = load_field( + _doc.get("stdin"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'stdin' field is not valid because:", + SourceLine(_doc, "stdin", str), + [e], + ) + ) + else: + stdin = None + if "stderr" in _doc: + try: + stderr = load_field( + _doc.get("stderr"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'stderr' field is not valid because:", + SourceLine(_doc, "stderr", str), + [e], + ) + ) + else: + stderr = None + if "stdout" in _doc: + try: + stdout = load_field( + _doc.get("stdout"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'stdout' field is not valid because:", + SourceLine(_doc, "stdout", str), + [e], + ) + ) + else: + stdout = None + if "successCodes" in _doc: + try: + successCodes = load_field( + _doc.get("successCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'successCodes' field is not valid because:", + SourceLine(_doc, "successCodes", str), + [e], + ) + ) + else: + successCodes = None + if "temporaryFailCodes" in _doc: + try: + temporaryFailCodes = load_field( + _doc.get("temporaryFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'temporaryFailCodes' field is not valid because:", + SourceLine(_doc, "temporaryFailCodes", str), + [e], + ) + ) + else: + temporaryFailCodes = None + if "permanentFailCodes" in _doc: + try: + permanentFailCodes = load_field( + _doc.get("permanentFailCodes"), + union_of_None_type_or_array_of_inttype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'permanentFailCodes' field is not valid because:", + SourceLine(_doc, "permanentFailCodes", str), + [e], + ) + ) + else: + permanentFailCodes = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `baseCommand`, `arguments`, `stdin`, `stderr`, `stdout`, `successCodes`, `temporaryFailCodes`, `permanentFailCodes`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'CommandLineTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + baseCommand=baseCommand, + arguments=arguments, + stdin=stdin, + stderr=stderr, + stdout=stdout, + successCodes=successCodes, + temporaryFailCodes=temporaryFailCodes, + permanentFailCodes=permanentFailCodes, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "CommandLineTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.baseCommand is not None and "baseCommand" not in r: + r["baseCommand"] = save( + self.baseCommand, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="baseCommand", + val=r.get("baseCommand"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.arguments is not None and "arguments" not in r: + r["arguments"] = save( + self.arguments, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="arguments", + val=r.get("arguments"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.stdin is not None and "stdin" not in r: + r["stdin"] = save( + self.stdin, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdin", + val=r.get("stdin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.stderr is not None and "stderr" not in r: + r["stderr"] = save( + self.stderr, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stderr", + val=r.get("stderr"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.stdout is not None and "stdout" not in r: + r["stdout"] = save( + self.stdout, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="stdout", + val=r.get("stdout"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.successCodes is not None and "successCodes" not in r: + r["successCodes"] = save( + self.successCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="successCodes", + val=r.get("successCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.temporaryFailCodes is not None and "temporaryFailCodes" not in r: + r["temporaryFailCodes"] = save( + self.temporaryFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="temporaryFailCodes", + val=r.get("temporaryFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.permanentFailCodes is not None and "permanentFailCodes" not in r: + r["permanentFailCodes"] = save( + self.permanentFailCodes, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="permanentFailCodes", + val=r.get("permanentFailCodes"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "baseCommand", + "arguments", + "stdin", + "stderr", + "stdout", + "successCodes", + "temporaryFailCodes", + "permanentFailCodes", + ] + ) + + +class DockerRequirement(ProcessRequirement): + """ + Indicates that a workflow component should be run in a + [Docker](https://docker.com) or Docker-compatible (such as + [Singularity](https://www.sylabs.io/) and [udocker](https://github.com/indigo-dc/udocker)) container environment and + specifies how to fetch or build the image. + + If a CommandLineTool lists `DockerRequirement` under + `hints` (or `requirements`), it may (or must) be run in the specified Docker + container. + + The platform must first acquire or install the correct Docker image as + specified by `dockerPull`, `dockerImport`, `dockerLoad` or `dockerFile`. + + The platform must execute the tool in the container using `docker run` with + the appropriate Docker image and tool command line. + + The workflow platform may provide input files and the designated output + directory through the use of volume bind mounts. The platform should rewrite + file paths in the input object to correspond to the Docker bind mounted + locations. That is, the platform should rewrite values in the parameter context + such as `runtime.outdir`, `runtime.tmpdir` and others to be valid paths + within the container. The platform must ensure that `runtime.outdir` and + `runtime.tmpdir` are distinct directories. + + When running a tool contained in Docker, the workflow platform must not + assume anything about the contents of the Docker container, such as the + presence or absence of specific software, except to assume that the + generated command line represents a valid command within the runtime + environment of the container. + + A container image may specify an + [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint) + and/or + [CMD](https://docs.docker.com/engine/reference/builder/#cmd). + Command line arguments will be appended after all elements of + ENTRYPOINT, and will override all elements specified using CMD (in + other words, CMD is only used when the CommandLineTool definition + produces an empty command line). + + Use of implicit ENTRYPOINT or CMD are discouraged due to reproducibility + concerns of the implicit hidden execution point (For further discussion, see + [https://doi.org/10.12688/f1000research.15140.1](https://doi.org/10.12688/f1000research.15140.1)). Portable + CommandLineTool wrappers in which use of a container is optional must not rely on ENTRYPOINT or CMD. + CommandLineTools which do rely on ENTRYPOINT or CMD must list `DockerRequirement` in the + `requirements` section. + + ## Interaction with other requirements + + If [EnvVarRequirement](#EnvVarRequirement) is specified alongside a + DockerRequirement, the environment variables must be provided to Docker + using `--env` or `--env-file` and interact with the container's preexisting + environment as defined by Docker. + + """ + + def __init__( + self, + dockerPull: Optional[Any] = None, + dockerLoad: Optional[Any] = None, + dockerFile: Optional[Any] = None, + dockerImport: Optional[Any] = None, + dockerImageId: Optional[Any] = None, + dockerOutputDirectory: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "DockerRequirement" + self.dockerPull = dockerPull + self.dockerLoad = dockerLoad + self.dockerFile = dockerFile + self.dockerImport = dockerImport + self.dockerImageId = dockerImageId + self.dockerOutputDirectory = dockerOutputDirectory + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DockerRequirement): + return bool( + self.class_ == other.class_ + and self.dockerPull == other.dockerPull + and self.dockerLoad == other.dockerLoad + and self.dockerFile == other.dockerFile + and self.dockerImport == other.dockerImport + and self.dockerImageId == other.dockerImageId + and self.dockerOutputDirectory == other.dockerOutputDirectory + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.dockerPull, + self.dockerLoad, + self.dockerFile, + self.dockerImport, + self.dockerImageId, + self.dockerOutputDirectory, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "DockerRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "DockerRequirement": + raise ValidationException("Not a DockerRequirement") + + if "dockerPull" in _doc: + try: + dockerPull = load_field( + _doc.get("dockerPull"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerPull' field is not valid because:", + SourceLine(_doc, "dockerPull", str), + [e], + ) + ) + else: + dockerPull = None + if "dockerLoad" in _doc: + try: + dockerLoad = load_field( + _doc.get("dockerLoad"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerLoad' field is not valid because:", + SourceLine(_doc, "dockerLoad", str), + [e], + ) + ) + else: + dockerLoad = None + if "dockerFile" in _doc: + try: + dockerFile = load_field( + _doc.get("dockerFile"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerFile' field is not valid because:", + SourceLine(_doc, "dockerFile", str), + [e], + ) + ) + else: + dockerFile = None + if "dockerImport" in _doc: + try: + dockerImport = load_field( + _doc.get("dockerImport"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerImport' field is not valid because:", + SourceLine(_doc, "dockerImport", str), + [e], + ) + ) + else: + dockerImport = None + if "dockerImageId" in _doc: + try: + dockerImageId = load_field( + _doc.get("dockerImageId"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerImageId' field is not valid because:", + SourceLine(_doc, "dockerImageId", str), + [e], + ) + ) + else: + dockerImageId = None + if "dockerOutputDirectory" in _doc: + try: + dockerOutputDirectory = load_field( + _doc.get("dockerOutputDirectory"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'dockerOutputDirectory' field is not valid because:", + SourceLine(_doc, "dockerOutputDirectory", str), + [e], + ) + ) + else: + dockerOutputDirectory = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `dockerPull`, `dockerLoad`, `dockerFile`, `dockerImport`, `dockerImageId`, `dockerOutputDirectory`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'DockerRequirement'", None, _errors__) + _constructed = cls( + dockerPull=dockerPull, + dockerLoad=dockerLoad, + dockerFile=dockerFile, + dockerImport=dockerImport, + dockerImageId=dockerImageId, + dockerOutputDirectory=dockerOutputDirectory, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "DockerRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.dockerPull is not None and "dockerPull" not in r: + r["dockerPull"] = save( + self.dockerPull, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerPull", + val=r.get("dockerPull"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerLoad is not None and "dockerLoad" not in r: + r["dockerLoad"] = save( + self.dockerLoad, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerLoad", + val=r.get("dockerLoad"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerFile is not None and "dockerFile" not in r: + r["dockerFile"] = save( + self.dockerFile, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerFile", + val=r.get("dockerFile"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerImport is not None and "dockerImport" not in r: + r["dockerImport"] = save( + self.dockerImport, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImport", + val=r.get("dockerImport"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerImageId is not None and "dockerImageId" not in r: + r["dockerImageId"] = save( + self.dockerImageId, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerImageId", + val=r.get("dockerImageId"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.dockerOutputDirectory is not None and "dockerOutputDirectory" not in r: + r["dockerOutputDirectory"] = save( + self.dockerOutputDirectory, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="dockerOutputDirectory", + val=r.get("dockerOutputDirectory"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "dockerPull", + "dockerLoad", + "dockerFile", + "dockerImport", + "dockerImageId", + "dockerOutputDirectory", + ] + ) + + +class SoftwareRequirement(ProcessRequirement): + """ + A list of software packages that should be configured in the environment of + the defined process. + + """ + + def __init__( + self, + packages: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SoftwareRequirement" + self.packages = packages + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwareRequirement): + return bool(self.class_ == other.class_ and self.packages == other.packages) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.packages)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwareRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SoftwareRequirement": + raise ValidationException("Not a SoftwareRequirement") + + try: + packages = load_field( + _doc.get("packages"), + idmap_packages_array_of_SoftwarePackageLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'packages' field is not valid because:", + SourceLine(_doc, "packages", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `packages`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwareRequirement'", None, _errors__) + _constructed = cls( + packages=packages, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SoftwareRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.packages is not None and "packages" not in r: + r["packages"] = save( + self.packages, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="packages", + val=r.get("packages"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "packages"]) + + +class SoftwarePackage(Saveable): + def __init__( + self, + package: Any, + version: Optional[Any] = None, + specs: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.package = package + self.version = version + self.specs = specs + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SoftwarePackage): + return bool( + self.package == other.package + and self.version == other.version + and self.specs == other.specs + ) + return False + + def __hash__(self) -> int: + return hash((self.package, self.version, self.specs)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SoftwarePackage": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + try: + package = load_field( + _doc.get("package"), + strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'package' field is not valid because:", + SourceLine(_doc, "package", str), + [e], + ) + ) + if "version" in _doc: + try: + version = load_field( + _doc.get("version"), + union_of_None_type_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'version' field is not valid because:", + SourceLine(_doc, "version", str), + [e], + ) + ) + else: + version = None + if "specs" in _doc: + try: + specs = load_field( + _doc.get("specs"), + uri_union_of_None_type_or_array_of_strtype_False_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'specs' field is not valid because:", + SourceLine(_doc, "specs", str), + [e], + ) + ) + else: + specs = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `package`, `version`, `specs`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'SoftwarePackage'", None, _errors__) + _constructed = cls( + package=package, + version=version, + specs=specs, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.package is not None and "package" not in r: + r["package"] = save( + self.package, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="package", + val=r.get("package"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.version is not None and "version" not in r: + r["version"] = save( + self.version, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="version", + val=r.get("version"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.specs is not None and "specs" not in r: + u = save_relative_uri(self.specs, base_url, False, None, relative_uris) + r["specs"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specs", + val=r.get("specs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["package", "version", "specs"]) + + +class Dirent(Saveable): + """ + Define a file or subdirectory that must be staged to a particular + place prior to executing the command line tool. May be the result + of executing an expression, such as building a configuration file + from a template. + + Usually files are staged within the [designated output directory](#Runtime_environment). + However, under certain circumstances, files may be staged at + arbitrary locations, see discussion for `entryname`. + + """ + + def __init__( + self, + entry: Any, + entryname: Optional[Any] = None, + writable: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.entryname = entryname + self.entry = entry + self.writable = writable + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Dirent): + return bool( + self.entryname == other.entryname + and self.entry == other.entry + and self.writable == other.writable + ) + return False + + def __hash__(self) -> int: + return hash((self.entryname, self.entry, self.writable)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Dirent": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "entryname" in _doc: + try: + entryname = load_field( + _doc.get("entryname"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'entryname' field is not valid because:", + SourceLine(_doc, "entryname", str), + [e], + ) + ) + else: + entryname = None + try: + entry = load_field( + _doc.get("entry"), + union_of_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'entry' field is not valid because:", + SourceLine(_doc, "entry", str), + [e], + ) + ) + if "writable" in _doc: + try: + writable = load_field( + _doc.get("writable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'writable' field is not valid because:", + SourceLine(_doc, "writable", str), + [e], + ) + ) + else: + writable = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `entryname`, `entry`, `writable`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Dirent'", None, _errors__) + _constructed = cls( + entryname=entryname, + entry=entry, + writable=writable, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.entryname is not None and "entryname" not in r: + r["entryname"] = save( + self.entryname, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entryname", + val=r.get("entryname"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.entry is not None and "entry" not in r: + r["entry"] = save( + self.entry, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="entry", + val=r.get("entry"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.writable is not None and "writable" not in r: + r["writable"] = save( + self.writable, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="writable", + val=r.get("writable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["entryname", "entry", "writable"]) + + +class InitialWorkDirRequirement(ProcessRequirement): + """ + Define a list of files and subdirectories that must be staged by the workflow platform prior to executing the command line tool. + Normally files are staged within the designated output directory. However, when running inside containers, files may be staged at arbitrary locations, see discussion for [`Dirent.entryname`](#Dirent). Together with `DockerRequirement.dockerOutputDirectory` it is possible to control the locations of both input and output files when running in containers. + """ + + def __init__( + self, + listing: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InitialWorkDirRequirement" + self.listing = listing + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InitialWorkDirRequirement): + return bool(self.class_ == other.class_ and self.listing == other.listing) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.listing)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InitialWorkDirRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InitialWorkDirRequirement": + raise ValidationException("Not a InitialWorkDirRequirement") + + try: + listing = load_field( + _doc.get("listing"), + union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'listing' field is not valid because:", + SourceLine(_doc, "listing", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `listing`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InitialWorkDirRequirement'", None, _errors__ + ) + _constructed = cls( + listing=listing, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InitialWorkDirRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.listing is not None and "listing" not in r: + r["listing"] = save( + self.listing, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="listing", + val=r.get("listing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "listing"]) + + +class EnvVarRequirement(ProcessRequirement): + """ + Define a list of environment variables which will be set in the + execution environment of the tool. See `EnvironmentDef` for details. + + """ + + def __init__( + self, + envDef: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "EnvVarRequirement" + self.envDef = envDef + + def __eq__(self, other: Any) -> bool: + if isinstance(other, EnvVarRequirement): + return bool(self.class_ == other.class_ and self.envDef == other.envDef) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.envDef)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "EnvVarRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "EnvVarRequirement": + raise ValidationException("Not a EnvVarRequirement") + + try: + envDef = load_field( + _doc.get("envDef"), + idmap_envDef_array_of_EnvironmentDefLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'envDef' field is not valid because:", + SourceLine(_doc, "envDef", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `envDef`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'EnvVarRequirement'", None, _errors__) + _constructed = cls( + envDef=envDef, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "EnvVarRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.envDef is not None and "envDef" not in r: + r["envDef"] = save( + self.envDef, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="envDef", + val=r.get("envDef"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "envDef"]) + + +class ShellCommandRequirement(ProcessRequirement): + """ + Modify the behavior of CommandLineTool to generate a single string + containing a shell command line. Each item in the `arguments` list must + be joined into a string separated by single spaces and quoted to prevent + interpretation by the shell, unless `CommandLineBinding` for that argument + contains `shellQuote: false`. If `shellQuote: false` is specified, the + argument is joined into the command string without quoting, which allows + the use of shell metacharacters such as `|` for pipes. + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ShellCommandRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ShellCommandRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ShellCommandRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ShellCommandRequirement": + raise ValidationException("Not a ShellCommandRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ShellCommandRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ShellCommandRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ResourceRequirement(ProcessRequirement): + """ + Specify basic hardware resource requirements. + + "min" is the minimum amount of a resource that must be reserved to + schedule a job. If "min" cannot be satisfied, the job should not + be run. + + "max" is the maximum amount of a resource that the job shall be + allocated. If a node has sufficient resources, multiple jobs may + be scheduled on a single node provided each job's "max" resource + requirements are met. If a job attempts to exceed its resource + allocation, an implementation may deny additional resources, which + may result in job failure. + + If both "min" and "max" are specified, an implementation may + choose to allocate any amount between "min" and "max", with the + actual allocation provided in the `runtime` object. + + If "min" is specified but "max" is not, then "max" == "min" + If "max" is specified by "min" is not, then "min" == "max". + + It is an error if max < min. + + It is an error if the value of any of these fields is negative. + + If neither "min" nor "max" is specified for a resource, use the default values below. + + """ + + def __init__( + self, + coresMin: Optional[Any] = None, + coresMax: Optional[Any] = None, + ramMin: Optional[Any] = None, + ramMax: Optional[Any] = None, + tmpdirMin: Optional[Any] = None, + tmpdirMax: Optional[Any] = None, + outdirMin: Optional[Any] = None, + outdirMax: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ResourceRequirement" + self.coresMin = coresMin + self.coresMax = coresMax + self.ramMin = ramMin + self.ramMax = ramMax + self.tmpdirMin = tmpdirMin + self.tmpdirMax = tmpdirMax + self.outdirMin = outdirMin + self.outdirMax = outdirMax + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ResourceRequirement): + return bool( + self.class_ == other.class_ + and self.coresMin == other.coresMin + and self.coresMax == other.coresMax + and self.ramMin == other.ramMin + and self.ramMax == other.ramMax + and self.tmpdirMin == other.tmpdirMin + and self.tmpdirMax == other.tmpdirMax + and self.outdirMin == other.outdirMin + and self.outdirMax == other.outdirMax + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.class_, + self.coresMin, + self.coresMax, + self.ramMin, + self.ramMax, + self.tmpdirMin, + self.tmpdirMax, + self.outdirMin, + self.outdirMax, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ResourceRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ResourceRequirement": + raise ValidationException("Not a ResourceRequirement") + + if "coresMin" in _doc: + try: + coresMin = load_field( + _doc.get("coresMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'coresMin' field is not valid because:", + SourceLine(_doc, "coresMin", str), + [e], + ) + ) + else: + coresMin = None + if "coresMax" in _doc: + try: + coresMax = load_field( + _doc.get("coresMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'coresMax' field is not valid because:", + SourceLine(_doc, "coresMax", str), + [e], + ) + ) + else: + coresMax = None + if "ramMin" in _doc: + try: + ramMin = load_field( + _doc.get("ramMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'ramMin' field is not valid because:", + SourceLine(_doc, "ramMin", str), + [e], + ) + ) + else: + ramMin = None + if "ramMax" in _doc: + try: + ramMax = load_field( + _doc.get("ramMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'ramMax' field is not valid because:", + SourceLine(_doc, "ramMax", str), + [e], + ) + ) + else: + ramMax = None + if "tmpdirMin" in _doc: + try: + tmpdirMin = load_field( + _doc.get("tmpdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'tmpdirMin' field is not valid because:", + SourceLine(_doc, "tmpdirMin", str), + [e], + ) + ) + else: + tmpdirMin = None + if "tmpdirMax" in _doc: + try: + tmpdirMax = load_field( + _doc.get("tmpdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'tmpdirMax' field is not valid because:", + SourceLine(_doc, "tmpdirMax", str), + [e], + ) + ) + else: + tmpdirMax = None + if "outdirMin" in _doc: + try: + outdirMin = load_field( + _doc.get("outdirMin"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outdirMin' field is not valid because:", + SourceLine(_doc, "outdirMin", str), + [e], + ) + ) + else: + outdirMin = None + if "outdirMax" in _doc: + try: + outdirMax = load_field( + _doc.get("outdirMax"), + union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outdirMax' field is not valid because:", + SourceLine(_doc, "outdirMax", str), + [e], + ) + ) + else: + outdirMax = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `coresMin`, `coresMax`, `ramMin`, `ramMax`, `tmpdirMin`, `tmpdirMax`, `outdirMin`, `outdirMax`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ResourceRequirement'", None, _errors__) + _constructed = cls( + coresMin=coresMin, + coresMax=coresMax, + ramMin=ramMin, + ramMax=ramMax, + tmpdirMin=tmpdirMin, + tmpdirMax=tmpdirMax, + outdirMin=outdirMin, + outdirMax=outdirMax, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ResourceRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.coresMin is not None and "coresMin" not in r: + r["coresMin"] = save( + self.coresMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMin", + val=r.get("coresMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.coresMax is not None and "coresMax" not in r: + r["coresMax"] = save( + self.coresMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="coresMax", + val=r.get("coresMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.ramMin is not None and "ramMin" not in r: + r["ramMin"] = save( + self.ramMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMin", + val=r.get("ramMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.ramMax is not None and "ramMax" not in r: + r["ramMax"] = save( + self.ramMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="ramMax", + val=r.get("ramMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.tmpdirMin is not None and "tmpdirMin" not in r: + r["tmpdirMin"] = save( + self.tmpdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMin", + val=r.get("tmpdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.tmpdirMax is not None and "tmpdirMax" not in r: + r["tmpdirMax"] = save( + self.tmpdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="tmpdirMax", + val=r.get("tmpdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outdirMin is not None and "outdirMin" not in r: + r["outdirMin"] = save( + self.outdirMin, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMin", + val=r.get("outdirMin"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outdirMax is not None and "outdirMax" not in r: + r["outdirMax"] = save( + self.outdirMax, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outdirMax", + val=r.get("outdirMax"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "class", + "coresMin", + "coresMax", + "ramMin", + "ramMax", + "tmpdirMin", + "tmpdirMax", + "outdirMin", + "outdirMax", + ] + ) + + +class WorkReuse(ProcessRequirement): + """ + For implementations that support reusing output from past work (on + the assumption that same code and same input produce same + results), control whether to enable or disable the reuse behavior + for a particular tool or step (to accommodate situations where that + assumption is incorrect). A reused step is not executed but + instead returns the same output as the original execution. + + If `WorkReuse` is not specified, correct tools should assume it + is enabled by default. + + """ + + def __init__( + self, + enableReuse: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "WorkReuse" + self.enableReuse = enableReuse + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkReuse): + return bool( + self.class_ == other.class_ and self.enableReuse == other.enableReuse + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.enableReuse)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkReuse": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "WorkReuse": + raise ValidationException("Not a WorkReuse") + + try: + enableReuse = load_field( + _doc.get("enableReuse"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'enableReuse' field is not valid because:", + SourceLine(_doc, "enableReuse", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `enableReuse`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkReuse'", None, _errors__) + _constructed = cls( + enableReuse=enableReuse, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "WorkReuse" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.enableReuse is not None and "enableReuse" not in r: + r["enableReuse"] = save( + self.enableReuse, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="enableReuse", + val=r.get("enableReuse"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "enableReuse"]) + + +class NetworkAccess(ProcessRequirement): + """ + Indicate whether a process requires outgoing IPv4/IPv6 network + access. Choice of IPv4 or IPv6 is implementation and site + specific, correct tools must support both. + + If `networkAccess` is false or not specified, tools must not + assume network access, except for localhost (the loopback device). + + If `networkAccess` is true, the tool must be able to make outgoing + connections to network resources. Resources may be on a private + subnet or the public Internet. However, implementations and sites + may apply their own security policies to restrict what is + accessible by the tool. + + Enabling network access does not imply a publicly routable IP + address or the ability to accept inbound connections. + + """ + + def __init__( + self, + networkAccess: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "NetworkAccess" + self.networkAccess = networkAccess + + def __eq__(self, other: Any) -> bool: + if isinstance(other, NetworkAccess): + return bool( + self.class_ == other.class_ + and self.networkAccess == other.networkAccess + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.networkAccess)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "NetworkAccess": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "NetworkAccess": + raise ValidationException("Not a NetworkAccess") + + try: + networkAccess = load_field( + _doc.get("networkAccess"), + union_of_booltype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'networkAccess' field is not valid because:", + SourceLine(_doc, "networkAccess", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `networkAccess`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'NetworkAccess'", None, _errors__) + _constructed = cls( + networkAccess=networkAccess, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "NetworkAccess" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.networkAccess is not None and "networkAccess" not in r: + r["networkAccess"] = save( + self.networkAccess, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="networkAccess", + val=r.get("networkAccess"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "networkAccess"]) + + +class InplaceUpdateRequirement(ProcessRequirement): + """ + + If `inplaceUpdate` is true, then an implementation supporting this + feature may permit tools to directly update files with `writable: + true` in InitialWorkDirRequirement. That is, as an optimization, + files may be destructively modified in place as opposed to copied + and updated. + + An implementation must ensure that only one workflow step may + access a writable file at a time. It is an error if a file which + is writable by one workflow step file is accessed (for reading or + writing) by any other workflow step running independently. + However, a file which has been updated in a previous completed + step may be used as input to multiple steps, provided it is + read-only in every step. + + Workflow steps which modify a file must produce the modified file + as output. Downstream steps which further process the file must + use the output of previous steps, and not refer to a common input + (this is necessary for both ordering and correctness). + + Workflow authors should provide this in the `hints` section. The + intent of this feature is that workflows produce the same results + whether or not InplaceUpdateRequirement is supported by the + implementation, and this feature is primarily available as an + optimization for particular environments. + + Users and implementers should be aware that workflows that + destructively modify inputs may not be repeatable or reproducible. + In particular, enabling this feature implies that WorkReuse should + not be enabled. + + """ + + def __init__( + self, + inplaceUpdate: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "InplaceUpdateRequirement" + self.inplaceUpdate = inplaceUpdate + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InplaceUpdateRequirement): + return bool( + self.class_ == other.class_ + and self.inplaceUpdate == other.inplaceUpdate + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.inplaceUpdate)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "InplaceUpdateRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "InplaceUpdateRequirement": + raise ValidationException("Not a InplaceUpdateRequirement") + + try: + inplaceUpdate = load_field( + _doc.get("inplaceUpdate"), + booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inplaceUpdate' field is not valid because:", + SourceLine(_doc, "inplaceUpdate", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `inplaceUpdate`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'InplaceUpdateRequirement'", None, _errors__ + ) + _constructed = cls( + inplaceUpdate=inplaceUpdate, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "InplaceUpdateRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.inplaceUpdate is not None and "inplaceUpdate" not in r: + r["inplaceUpdate"] = save( + self.inplaceUpdate, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inplaceUpdate", + val=r.get("inplaceUpdate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "inplaceUpdate"]) + + +class ToolTimeLimit(ProcessRequirement): + """ + Set an upper limit on the execution time of a CommandLineTool. + A CommandLineTool whose execution duration exceeds the time + limit may be preemptively terminated and considered failed. + May also be used by batch systems to make scheduling decisions. + The execution duration excludes external operations, such as + staging of files, pulling a docker image etc, and only counts + wall-time for the execution of the command line itself. + + """ + + def __init__( + self, + timelimit: Any, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ToolTimeLimit" + self.timelimit = timelimit + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ToolTimeLimit): + return bool( + self.class_ == other.class_ and self.timelimit == other.timelimit + ) + return False + + def __hash__(self) -> int: + return hash((self.class_, self.timelimit)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ToolTimeLimit": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ToolTimeLimit": + raise ValidationException("Not a ToolTimeLimit") + + try: + timelimit = load_field( + _doc.get("timelimit"), + union_of_inttype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'timelimit' field is not valid because:", + SourceLine(_doc, "timelimit", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`, `timelimit`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ToolTimeLimit'", None, _errors__) + _constructed = cls( + timelimit=timelimit, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ToolTimeLimit" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.timelimit is not None and "timelimit" not in r: + r["timelimit"] = save( + self.timelimit, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="timelimit", + val=r.get("timelimit"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class", "timelimit"]) + + +class ExpressionToolOutputParameter(OutputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionToolOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionToolOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ExpressionToolOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class WorkflowInputParameter(InputParameter): + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + inputBinding: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + self.inputBinding = inputBinding + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + and self.inputBinding == other.inputBinding + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + self.inputBinding, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + if "inputBinding" in _doc: + try: + inputBinding = load_field( + _doc.get("inputBinding"), + union_of_None_type_or_InputBindingLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputBinding' field is not valid because:", + SourceLine(_doc, "inputBinding", str), + [e], + ) + ) + else: + inputBinding = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`, `inputBinding`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowInputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + inputBinding=inputBinding, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputBinding is not None and "inputBinding" not in r: + r["inputBinding"] = save( + self.inputBinding, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputBinding", + val=r.get("inputBinding"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + "inputBinding", + ] + ) + + +class ExpressionTool(Process): + """ + An ExpressionTool is a type of Process object that can be run by itself + or as a Workflow step. It executes a pure Javascript expression that has + access to the same input parameters as a workflow. It is meant to be used + sparingly as a way to isolate complex Javascript expressions that need to + operate on input data and produce some result; perhaps just a + rearrangement of the inputs. No Docker software container is required + or allowed. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + expression: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "ExpressionTool" + self.expression = expression + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ExpressionTool): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.expression == other.expression + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.expression, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ExpressionTool": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ExpressionTool": + raise ValidationException("Not a ExpressionTool") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_ExpressionToolOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + try: + expression = load_field( + _doc.get("expression"), + ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'expression' field is not valid because:", + SourceLine(_doc, "expression", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `expression`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'ExpressionTool'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + expression=expression, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ExpressionTool" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.expression is not None and "expression" not in r: + r["expression"] = save( + self.expression, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="expression", + val=r.get("expression"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "expression", + ] + ) + + +class WorkflowOutputParameter(OutputParameter): + """ + Describe an output parameter of a workflow. The parameter must be + connected to one or more parameters defined in the workflow that + will provide the value of the output parameter. It is legal to + connect a WorkflowInputParameter to a WorkflowOutputParameter. + + See [WorkflowStepInput](#WorkflowStepInput) for discussion of + `linkMerge` and `pickValue`. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + outputSource: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.outputSource = outputSource + self.linkMerge = linkMerge + self.pickValue = pickValue + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.outputSource == other.outputSource + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.outputSource, + self.linkMerge, + self.pickValue, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "outputSource" in _doc: + try: + outputSource = load_field( + _doc.get("outputSource"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputSource' field is not valid because:", + SourceLine(_doc, "outputSource", str), + [e], + ) + ) + else: + outputSource = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'linkMerge' field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'pickValue' field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + ) + ) + else: + pickValue = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `outputSource`, `linkMerge`, `pickValue`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'WorkflowOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + outputSource=outputSource, + linkMerge=linkMerge, + pickValue=pickValue, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputSource is not None and "outputSource" not in r: + u = save_relative_uri( + self.outputSource, str(self.id), False, 1, relative_uris + ) + r["outputSource"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputSource", + val=r.get("outputSource"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.pickValue is not None and "pickValue" not in r: + r["pickValue"] = save( + self.pickValue, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pickValue", + val=r.get("pickValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "outputSource", + "linkMerge", + "pickValue", + "type", + ] + ) + + +class Sink(Saveable): + pass + + +class WorkflowStepInput(Identified, Sink, LoadContents, Labeled): + """ + The input of a workflow step connects an upstream parameter (from the + workflow inputs, or the outputs of other workflows steps) with the input + parameters of the process specified by the `run` field. Only input parameters + declared by the target process will be passed through at runtime to the process + though additional parameters may be specified (for use within `valueFrom` + expressions for instance) - unconnected or unused parameters do not represent an + error condition. + + # Input object + + A WorkflowStepInput object must contain an `id` field in the form + `#fieldname` or `#prefix/fieldname`. When the `id` field contains a slash + `/` the field name consists of the characters following the final slash + (the prefix portion may contain one or more slashes to indicate scope). + This defines a field of the workflow step input object with the value of + the `source` parameter(s). + + # Merging multiple inbound data links + + To merge multiple inbound data links, + [MultipleInputFeatureRequirement](#MultipleInputFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + If the sink parameter is an array, or named in a [workflow + scatter](#WorkflowStep) operation, there may be multiple inbound + data links listed in the `source` field. The values from the + input links are merged depending on the method specified in the + `linkMerge` field. If both `linkMerge` and `pickValue` are null + or not specified, and there is more than one element in the + `source` array, the default method is "merge_nested". + + If both `linkMerge` and `pickValue` are null or not specified, and + there is only a single element in the `source`, then the input + parameter takes the scalar value from the single input link (it is + *not* wrapped in a single-list). + + * **merge_nested** + + The input must be an array consisting of exactly one entry for each + input link. If "merge_nested" is specified with a single link, the value + from the link must be wrapped in a single-item list. + + * **merge_flattened** + + 1. The source and sink parameters must be compatible types, or the source + type must be compatible with single element from the "items" type of + the destination array parameter. + 2. Source parameters which are arrays are concatenated. + Source parameters which are single element types are appended as + single elements. + + # Picking non-null values among inbound data links + + If present, `pickValue` specifies how to pick non-null values among inbound data links. + + `pickValue` is evaluated + 1. Once all source values from upstream step or parameters are available. + 2. After `linkMerge`. + 3. Before `scatter` or `valueFrom`. + + This is specifically intended to be useful in combination with + [conditional execution](#WorkflowStep), where several upstream + steps may be connected to a single input (`source` is a list), and + skipped steps produce null values. + + Static type checkers should check for type consistency after inferring what the type + will be after `pickValue` is applied, just as they do currently for `linkMerge`. + + * **first_non_null** + + For the first level of a list input, pick the first non-null element. The result is a scalar. + It is an error if there is no non-null element. Examples: + * `[null, x, null, y] -> x` + * `[null, [null], null, y] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: If-else pattern where the + value comes either from a conditional step or from a default or + fallback value. The conditional step(s) should be placed first in + the list. + + * **the_only_non_null** + + For the first level of a list input, pick the single non-null element. The result is a scalar. + It is an error if there is more than one non-null element. Examples: + + * `[null, x, null] -> x` + * `[null, x, null, y] -> Runtime Error` + * `[null, [null], null] -> [null]` + * `[null, null, null] -> Runtime Error` + + *Intended use case*: Switch type patterns where developer considers + more than one active code path as a workflow error + (possibly indicating an error in writing `when` condition expressions). + + * **all_non_null** + + For the first level of a list input, pick all non-null values. + The result is a list, which may be empty. Examples: + + * `[null, x, null] -> [x]` + * `[x, null, y] -> [x, y]` + * `[null, [x], [null]] -> [[x], [null]]` + * `[null, null, null] -> []` + + *Intended use case*: It is valid to have more than one source, but + sources are conditional, so null sources (from skipped steps) + should be filtered out. + + """ + + def __init__( + self, + id: Optional[Any] = None, + source: Optional[Any] = None, + linkMerge: Optional[Any] = None, + pickValue: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + label: Optional[Any] = None, + default: Optional[Any] = None, + valueFrom: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.source = source + self.linkMerge = linkMerge + self.pickValue = pickValue + self.loadContents = loadContents + self.loadListing = loadListing + self.label = label + self.default = default + self.valueFrom = valueFrom + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepInput): + return bool( + self.id == other.id + and self.source == other.source + and self.linkMerge == other.linkMerge + and self.pickValue == other.pickValue + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.label == other.label + and self.default == other.default + and self.valueFrom == other.valueFrom + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.source, + self.linkMerge, + self.pickValue, + self.loadContents, + self.loadListing, + self.label, + self.default, + self.valueFrom, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepInput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "source" in _doc: + try: + source = load_field( + _doc.get("source"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'source' field is not valid because:", + SourceLine(_doc, "source", str), + [e], + ) + ) + else: + source = None + if "linkMerge" in _doc: + try: + linkMerge = load_field( + _doc.get("linkMerge"), + union_of_None_type_or_LinkMergeMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'linkMerge' field is not valid because:", + SourceLine(_doc, "linkMerge", str), + [e], + ) + ) + else: + linkMerge = None + if "pickValue" in _doc: + try: + pickValue = load_field( + _doc.get("pickValue"), + union_of_None_type_or_PickValueMethodLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'pickValue' field is not valid because:", + SourceLine(_doc, "pickValue", str), + [e], + ) + ) + else: + pickValue = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + if "valueFrom" in _doc: + try: + valueFrom = load_field( + _doc.get("valueFrom"), + union_of_None_type_or_strtype_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'valueFrom' field is not valid because:", + SourceLine(_doc, "valueFrom", str), + [e], + ) + ) + else: + valueFrom = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `source`, `linkMerge`, `pickValue`, `loadContents`, `loadListing`, `label`, `default`, `valueFrom`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepInput'", None, _errors__) + _constructed = cls( + id=id, + source=source, + linkMerge=linkMerge, + pickValue=pickValue, + loadContents=loadContents, + loadListing=loadListing, + label=label, + default=default, + valueFrom=valueFrom, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.source is not None and "source" not in r: + u = save_relative_uri(self.source, str(self.id), False, 2, relative_uris) + r["source"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="source", + val=r.get("source"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.linkMerge is not None and "linkMerge" not in r: + r["linkMerge"] = save( + self.linkMerge, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="linkMerge", + val=r.get("linkMerge"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.pickValue is not None and "pickValue" not in r: + r["pickValue"] = save( + self.pickValue, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="pickValue", + val=r.get("pickValue"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.valueFrom is not None and "valueFrom" not in r: + r["valueFrom"] = save( + self.valueFrom, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="valueFrom", + val=r.get("valueFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "source", + "linkMerge", + "pickValue", + "loadContents", + "loadListing", + "label", + "default", + "valueFrom", + ] + ) + + +class WorkflowStepOutput(Identified): + """ + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is + the identifier to use in the `source` field of `WorkflowStepInput` + to connect the output value to downstream parameters. + + """ + + def __init__( + self, + id: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStepOutput): + return bool(self.id == other.id) + return False + + def __hash__(self) -> int: + return hash((self.id)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStepOutput": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__) + _constructed = cls( + id=id, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["id"]) + + +class WorkflowStep(Identified, Labeled, Documented): + """ + A workflow step is an executable element of a workflow. It specifies the + underlying process implementation (such as `CommandLineTool` or another + `Workflow`) in the `run` field and connects the input and output parameters + of the underlying process to workflow parameters. + + # Scatter/gather + + To use scatter/gather, + [ScatterFeatureRequirement](#ScatterFeatureRequirement) must be specified + in the workflow or workflow step requirements. + + A "scatter" operation specifies that the associated workflow step or + subworkflow should execute separately over a list of input elements. Each + job making up a scatter operation is independent and may be executed + concurrently. + + The `scatter` field specifies one or more input parameters which will be + scattered. An input parameter may be listed more than once. The declared + type of each input parameter implicitly becomes an array of items of the + input parameter type. If a parameter is listed more than once, it becomes + a nested array. As a result, upstream parameters which are connected to + scattered parameters must be arrays. + + All output parameter types are also implicitly wrapped in arrays. Each job + in the scatter results in an entry in the output array. + + If any scattered parameter runtime value is an empty array, all outputs are + set to empty arrays and no work is done for the step, according to + applicable scattering rules. + + If `scatter` declares more than one input parameter, `scatterMethod` + describes how to decompose the input into a discrete set of jobs. + + * **dotproduct** specifies that each of the input arrays are aligned and one + element taken from each array to construct each job. It is an error + if all input arrays are not the same length. + + * **nested_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output must be nested arrays for each level of scattering, in the + order that the input arrays are listed in the `scatter` field. + + * **flat_crossproduct** specifies the Cartesian product of the inputs, + producing a job for every combination of the scattered inputs. The + output arrays must be flattened to a single level, but otherwise listed in the + order that the input arrays are listed in the `scatter` field. + + # Conditional execution (Optional) + + Conditional execution makes execution of a step conditional on an + expression. A step that is not executed is "skipped". A skipped + step produces `null` for all output parameters. + + The condition is evaluated after `scatter`, using the input object + of each individual scatter job. This means over a set of scatter + jobs, some may be executed and some may be skipped. When the + results are gathered, skipped steps must be `null` in the output + arrays. + + The `when` field controls conditional execution. This is an + expression that must be evaluated with `inputs` bound to the step + input object (or individual scatter job), and returns a boolean + value. It is an error if this expression returns a value other + than `true` or `false`. + + Conditionals in CWL are an optional feature and are not required + to be implemented by all consumers of CWL documents. An + implementation that does not support conditionals must return a + fatal error when attempting to execute a workflow that uses + conditional constructs the implementation does not support. + + # Subworkflows + + To specify a nested workflow as part of a workflow step, + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) must be + specified in the workflow or workflow step requirements. + + It is a fatal error if a workflow directly or indirectly invokes itself as + a subworkflow (recursive workflows are not allowed). + + """ + + def __init__( + self, + in_: Any, + out: Any, + run: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + when: Optional[Any] = None, + scatter: Optional[Any] = None, + scatterMethod: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.in_ = in_ + self.out = out + self.requirements = requirements + self.hints = hints + self.run = run + self.when = when + self.scatter = scatter + self.scatterMethod = scatterMethod + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WorkflowStep): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.in_ == other.in_ + and self.out == other.out + and self.requirements == other.requirements + and self.hints == other.hints + and self.run == other.run + and self.when == other.when + and self.scatter == other.scatter + and self.scatterMethod == other.scatterMethod + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.in_, + self.out, + self.requirements, + self.hints, + self.run, + self.when, + self.scatter, + self.scatterMethod, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "WorkflowStep": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + in_ = load_field( + _doc.get("in"), + idmap_in__array_of_WorkflowStepInputLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'in' field is not valid because:", + SourceLine(_doc, "in", str), + [e], + ) + ) + try: + out = load_field( + _doc.get("out"), + uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'out' field is not valid because:", + SourceLine(_doc, "out", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + + subscope_baseuri = expand_url('run', baseuri, loadingOptions, True) + try: + run = load_field( + _doc.get("run"), + uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None, + subscope_baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'run' field is not valid because:", + SourceLine(_doc, "run", str), + [e], + ) + ) + if "when" in _doc: + try: + when = load_field( + _doc.get("when"), + union_of_None_type_or_ExpressionLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'when' field is not valid because:", + SourceLine(_doc, "when", str), + [e], + ) + ) + else: + when = None + if "scatter" in _doc: + try: + scatter = load_field( + _doc.get("scatter"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'scatter' field is not valid because:", + SourceLine(_doc, "scatter", str), + [e], + ) + ) + else: + scatter = None + if "scatterMethod" in _doc: + try: + scatterMethod = load_field( + _doc.get("scatterMethod"), + uri_union_of_None_type_or_ScatterMethodLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'scatterMethod' field is not valid because:", + SourceLine(_doc, "scatterMethod", str), + [e], + ) + ) + else: + scatterMethod = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `in`, `out`, `requirements`, `hints`, `run`, `when`, `scatter`, `scatterMethod`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'WorkflowStep'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + in_=in_, + out=out, + requirements=requirements, + hints=hints, + run=run, + when=when, + scatter=scatter, + scatterMethod=scatterMethod, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.in_ is not None and "in" not in r: + r["in"] = save( + self.in_, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="in", + val=r.get("in"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.out is not None and "out" not in r: + u = save_relative_uri(self.out, str(self.id), True, None, relative_uris) + r["out"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="out", + val=r.get("out"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.run is not None and "run" not in r: + u = save_relative_uri(self.run, str(self.id), False, None, relative_uris) + r["run"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="run", + val=r.get("run"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.when is not None and "when" not in r: + r["when"] = save( + self.when, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="when", + val=r.get("when"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.scatter is not None and "scatter" not in r: + u = save_relative_uri(self.scatter, str(self.id), False, 0, relative_uris) + r["scatter"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatter", + val=r.get("scatter"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.scatterMethod is not None and "scatterMethod" not in r: + u = save_relative_uri( + self.scatterMethod, str(self.id), False, None, relative_uris + ) + r["scatterMethod"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="scatterMethod", + val=r.get("scatterMethod"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "in", + "out", + "requirements", + "hints", + "run", + "when", + "scatter", + "scatterMethod", + ] + ) + + +class Workflow(Process): + """ + A workflow describes a set of **steps** and the **dependencies** between + those steps. When a step produces output that will be consumed by a + second step, the first step is a dependency of the second step. + + When there is a dependency, the workflow engine must execute the preceding + step and wait for it to successfully produce output before executing the + dependent step. If two steps are defined in the workflow graph that + are not directly or indirectly dependent, these steps are **independent**, + and may execute in any order or execute concurrently. A workflow is + complete when all steps have been executed. + + Dependencies between parameters are expressed using the `source` + field on [workflow step input parameters](#WorkflowStepInput) and + `outputSource` field on [workflow output + parameters](#WorkflowOutputParameter). + + The `source` field on each workflow step input parameter expresses + the data links that contribute to the value of the step input + parameter (the "sink"). A workflow step can only begin execution + when every data link connected to a step has been fulfilled. + + The `outputSource` field on each workflow step input parameter + expresses the data links that contribute to the value of the + workflow output parameter (the "sink"). Workflow execution cannot + complete successfully until every data link connected to an output + parameter has been fulfilled. + + ## Workflow success and failure + + A completed step must result in one of `success`, `temporaryFailure` or + `permanentFailure` states. An implementation may choose to retry a step + execution which resulted in `temporaryFailure`. An implementation may + choose to either continue running other steps of a workflow, or terminate + immediately upon `permanentFailure`. + + * If any step of a workflow execution results in `permanentFailure`, then + the workflow status is `permanentFailure`. + + * If one or more steps result in `temporaryFailure` and all other steps + complete `success` or are not executed, then the workflow status is + `temporaryFailure`. + + * If all workflow steps are executed and complete with `success`, then the + workflow status is `success`. + + # Extensions + + [ScatterFeatureRequirement](#ScatterFeatureRequirement) and + [SubworkflowFeatureRequirement](#SubworkflowFeatureRequirement) are + available as standard [extensions](#Extensions_and_Metadata) to core + workflow semantics. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + steps: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Workflow" + self.steps = steps + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Workflow): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + and self.steps == other.steps + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + self.steps, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Workflow": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Workflow": + raise ValidationException("Not a Workflow") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_WorkflowInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_WorkflowOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + try: + steps = load_field( + _doc.get("steps"), + idmap_steps_union_of_array_of_WorkflowStepLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'steps' field is not valid because:", + SourceLine(_doc, "steps", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`, `steps`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Workflow'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + steps=steps, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Workflow" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.steps is not None and "steps" not in r: + r["steps"] = save( + self.steps, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="steps", + val=r.get("steps"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + "steps", + ] + ) + + +class SubworkflowFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support nested workflows in + the `run` field of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "SubworkflowFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, SubworkflowFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "SubworkflowFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "SubworkflowFeatureRequirement": + raise ValidationException("Not a SubworkflowFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'SubworkflowFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "SubworkflowFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class ScatterFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support the `scatter` and + `scatterMethod` fields of [WorkflowStep](#WorkflowStep). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "ScatterFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ScatterFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "ScatterFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "ScatterFeatureRequirement": + raise ValidationException("Not a ScatterFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'ScatterFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "ScatterFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class MultipleInputFeatureRequirement(ProcessRequirement): + """ + Indicates that the workflow platform must support multiple inbound data links + listed in the `source` field of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "MultipleInputFeatureRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, MultipleInputFeatureRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "MultipleInputFeatureRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "MultipleInputFeatureRequirement": + raise ValidationException("Not a MultipleInputFeatureRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'MultipleInputFeatureRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "MultipleInputFeatureRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class StepInputExpressionRequirement(ProcessRequirement): + """ + Indicate that the workflow platform must support the `valueFrom` field + of [WorkflowStepInput](#WorkflowStepInput). + + """ + + def __init__( + self, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.class_ = "StepInputExpressionRequirement" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, StepInputExpressionRequirement): + return bool(self.class_ == other.class_) + return False + + def __hash__(self) -> int: + return hash((self.class_)) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "StepInputExpressionRequirement": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "StepInputExpressionRequirement": + raise ValidationException("Not a StepInputExpressionRequirement") + + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `class`".format(k), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'StepInputExpressionRequirement'", None, _errors__ + ) + _constructed = cls( + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "StepInputExpressionRequirement" + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset(["class"]) + + +class OperationInputParameter(InputParameter): + """ + Describe an input parameter of an operation. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + loadContents: Optional[Any] = None, + loadListing: Optional[Any] = None, + default: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.loadContents = loadContents + self.loadListing = loadListing + self.default = default + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationInputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.loadContents == other.loadContents + and self.loadListing == other.loadListing + and self.default == other.default + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.loadContents, + self.loadListing, + self.default, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OperationInputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + if "loadContents" in _doc: + try: + loadContents = load_field( + _doc.get("loadContents"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadContents' field is not valid because:", + SourceLine(_doc, "loadContents", str), + [e], + ) + ) + else: + loadContents = None + if "loadListing" in _doc: + try: + loadListing = load_field( + _doc.get("loadListing"), + union_of_None_type_or_LoadListingEnumLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'loadListing' field is not valid because:", + SourceLine(_doc, "loadListing", str), + [e], + ) + ) + else: + loadListing = None + if "default" in _doc: + try: + default = load_field( + _doc.get("default"), + union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'default' field is not valid because:", + SourceLine(_doc, "default", str), + [e], + ) + ) + else: + default = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `loadContents`, `loadListing`, `default`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'OperationInputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + loadContents=loadContents, + loadListing=loadListing, + default=default, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadContents is not None and "loadContents" not in r: + r["loadContents"] = save( + self.loadContents, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadContents", + val=r.get("loadContents"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.loadListing is not None and "loadListing" not in r: + r["loadListing"] = save( + self.loadListing, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="loadListing", + val=r.get("loadListing"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.default is not None and "default" not in r: + r["default"] = save( + self.default, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "label", + "secondaryFiles", + "streamable", + "doc", + "id", + "format", + "loadContents", + "loadListing", + "default", + "type", + ] + ) + + +class OperationOutputParameter(OutputParameter): + """ + Describe an output parameter of an operation. + + """ + + def __init__( + self, + type: Any, + label: Optional[Any] = None, + secondaryFiles: Optional[Any] = None, + streamable: Optional[Any] = None, + doc: Optional[Any] = None, + id: Optional[Any] = None, + format: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.label = label + self.secondaryFiles = secondaryFiles + self.streamable = streamable + self.doc = doc + self.id = id + self.format = format + self.type = type + + def __eq__(self, other: Any) -> bool: + if isinstance(other, OperationOutputParameter): + return bool( + self.label == other.label + and self.secondaryFiles == other.secondaryFiles + and self.streamable == other.streamable + and self.doc == other.doc + and self.id == other.id + and self.format == other.format + and self.type == other.type + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.label, + self.secondaryFiles, + self.streamable, + self.doc, + self.id, + self.format, + self.type, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "OperationOutputParameter": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "secondaryFiles" in _doc: + try: + secondaryFiles = load_field( + _doc.get("secondaryFiles"), + secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'secondaryFiles' field is not valid because:", + SourceLine(_doc, "secondaryFiles", str), + [e], + ) + ) + else: + secondaryFiles = None + if "streamable" in _doc: + try: + streamable = load_field( + _doc.get("streamable"), + union_of_None_type_or_booltype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'streamable' field is not valid because:", + SourceLine(_doc, "streamable", str), + [e], + ) + ) + else: + streamable = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + if "format" in _doc: + try: + format = load_field( + _doc.get("format"), + uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'format' field is not valid because:", + SourceLine(_doc, "format", str), + [e], + ) + ) + else: + format = None + try: + type = load_field( + _doc.get("type"), + typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'type' field is not valid because:", + SourceLine(_doc, "type", str), + [e], + ) + ) + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `label`, `secondaryFiles`, `streamable`, `doc`, `id`, `format`, `type`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException( + "Trying 'OperationOutputParameter'", None, _errors__ + ) + _constructed = cls( + label=label, + secondaryFiles=secondaryFiles, + streamable=streamable, + doc=doc, + id=id, + format=format, + type=type, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.secondaryFiles is not None and "secondaryFiles" not in r: + r["secondaryFiles"] = save( + self.secondaryFiles, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFiles", + val=r.get("secondaryFiles"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.streamable is not None and "streamable" not in r: + r["streamable"] = save( + self.streamable, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="streamable", + val=r.get("streamable"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.format is not None and "format" not in r: + u = save_relative_uri(self.format, str(self.id), True, None, relative_uris) + r["format"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="format", + val=r.get("format"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type is not None and "type" not in r: + r["type"] = save( + self.type, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + ["label", "secondaryFiles", "streamable", "doc", "id", "format", "type"] + ) + + +class Operation(Process): + """ + This record describes an abstract operation. It is a potential + step of a workflow that has not yet been bound to a concrete + implementation. It specifies an input and output signature, but + does not provide enough information to be executed. An + implementation (or other tooling) may provide a means of binding + an Operation to a concrete process (such as Workflow, + CommandLineTool, or ExpressionTool) with a compatible signature. + + """ + + def __init__( + self, + inputs: Any, + outputs: Any, + id: Optional[Any] = None, + label: Optional[Any] = None, + doc: Optional[Any] = None, + requirements: Optional[Any] = None, + hints: Optional[Any] = None, + cwlVersion: Optional[Any] = None, + intent: Optional[Any] = None, + extension_fields: Optional[Dict[str, Any]] = None, + loadingOptions: Optional[LoadingOptions] = None, + ) -> None: + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.requirements = requirements + self.hints = hints + self.cwlVersion = cwlVersion + self.intent = intent + self.class_ = "Operation" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Operation): + return bool( + self.id == other.id + and self.label == other.label + and self.doc == other.doc + and self.inputs == other.inputs + and self.outputs == other.outputs + and self.requirements == other.requirements + and self.hints == other.hints + and self.cwlVersion == other.cwlVersion + and self.intent == other.intent + and self.class_ == other.class_ + ) + return False + + def __hash__(self) -> int: + return hash( + ( + self.id, + self.label, + self.doc, + self.inputs, + self.outputs, + self.requirements, + self.hints, + self.cwlVersion, + self.intent, + self.class_, + ) + ) + + @classmethod + def fromDoc( + cls, + doc: Any, + baseuri: str, + loadingOptions: LoadingOptions, + docRoot: Optional[str] = None, + ) -> "Operation": + _doc = copy.copy(doc) + if hasattr(doc, "lc"): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + _errors__ = [] + + if _doc.get("class") != "Operation": + raise ValidationException("Not a Operation") + + if "id" in _doc: + try: + id = load_field( + _doc.get("id"), + uri_union_of_None_type_or_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'id' field is not valid because:", + SourceLine(_doc, "id", str), + [e], + ) + ) + else: + id = None + + __original_id_is_none = id is None + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(_uuid__.uuid4()) + if not __original_id_is_none: + baseuri = id + if "label" in _doc: + try: + label = load_field( + _doc.get("label"), + union_of_None_type_or_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'label' field is not valid because:", + SourceLine(_doc, "label", str), + [e], + ) + ) + else: + label = None + if "doc" in _doc: + try: + doc = load_field( + _doc.get("doc"), + union_of_None_type_or_strtype_or_array_of_strtype, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'doc' field is not valid because:", + SourceLine(_doc, "doc", str), + [e], + ) + ) + else: + doc = None + try: + inputs = load_field( + _doc.get("inputs"), + idmap_inputs_array_of_OperationInputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'inputs' field is not valid because:", + SourceLine(_doc, "inputs", str), + [e], + ) + ) + try: + outputs = load_field( + _doc.get("outputs"), + idmap_outputs_array_of_OperationOutputParameterLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'outputs' field is not valid because:", + SourceLine(_doc, "outputs", str), + [e], + ) + ) + if "requirements" in _doc: + try: + requirements = load_field( + _doc.get("requirements"), + idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'requirements' field is not valid because:", + SourceLine(_doc, "requirements", str), + [e], + ) + ) + else: + requirements = None + if "hints" in _doc: + try: + hints = load_field( + _doc.get("hints"), + idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'hints' field is not valid because:", + SourceLine(_doc, "hints", str), + [e], + ) + ) + else: + hints = None + if "cwlVersion" in _doc: + try: + cwlVersion = load_field( + _doc.get("cwlVersion"), + uri_union_of_None_type_or_CWLVersionLoader_False_True_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'cwlVersion' field is not valid because:", + SourceLine(_doc, "cwlVersion", str), + [e], + ) + ) + else: + cwlVersion = None + if "intent" in _doc: + try: + intent = load_field( + _doc.get("intent"), + uri_union_of_None_type_or_array_of_strtype_True_False_None, + baseuri, + loadingOptions, + ) + except ValidationException as e: + _errors__.append( + ValidationException( + "the 'intent' field is not valid because:", + SourceLine(_doc, "intent", str), + [e], + ) + ) + else: + intent = None + extension_fields: Dict[str, Any] = {} + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url( + k, "", loadingOptions, scoped_id=False, vocab_term=False + ) + extension_fields[ex] = _doc[k] + else: + _errors__.append( + ValidationException( + "invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `requirements`, `hints`, `cwlVersion`, `intent`, `class`".format( + k + ), + SourceLine(_doc, k, str), + ) + ) + break + + if _errors__: + raise ValidationException("Trying 'Operation'", None, _errors__) + _constructed = cls( + id=id, + label=label, + doc=doc, + inputs=inputs, + outputs=outputs, + requirements=requirements, + hints=hints, + cwlVersion=cwlVersion, + intent=intent, + extension_fields=extension_fields, + loadingOptions=loadingOptions, + ) + loadingOptions.idx[id] = (_constructed, loadingOptions) + return _constructed + + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] + + r["class"] = "Operation" + + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.id is not None and "id" not in r: + u = save_relative_uri(self.id, base_url, True, None, relative_uris) + r["id"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="id", + val=r.get("id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.label is not None and "label" not in r: + r["label"] = save( + self.label, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="label", + val=r.get("label"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: + r["doc"] = save( + self.doc, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inputs is not None and "inputs" not in r: + r["inputs"] = save( + self.inputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inputs", + val=r.get("inputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.outputs is not None and "outputs" not in r: + r["outputs"] = save( + self.outputs, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="outputs", + val=r.get("outputs"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.requirements is not None and "requirements" not in r: + r["requirements"] = save( + self.requirements, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="requirements", + val=r.get("requirements"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.hints is not None and "hints" not in r: + r["hints"] = save( + self.hints, + top=False, + base_url=str(self.id), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="hints", + val=r.get("hints"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.cwlVersion is not None and "cwlVersion" not in r: + u = save_relative_uri( + self.cwlVersion, str(self.id), False, None, relative_uris + ) + r["cwlVersion"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="cwlVersion", + val=r.get("cwlVersion"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.intent is not None and "intent" not in r: + u = save_relative_uri(self.intent, str(self.id), True, None, relative_uris) + r["intent"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="intent", + val=r.get("intent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + + # top refers to the directory level + if top: + if self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + if self.loadingOptions.schemas: + r["$schemas"] = self.loadingOptions.schemas + return r + + attrs = frozenset( + [ + "id", + "label", + "doc", + "inputs", + "outputs", + "requirements", + "hints", + "cwlVersion", + "intent", + "class", + ] + ) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "CWLType": "https://w3id.org/cwl/cwl#CWLType", + "CWLVersion": "https://w3id.org/cwl/cwl#CWLVersion", + "CommandInputArraySchema": "https://w3id.org/cwl/cwl#CommandInputArraySchema", + "CommandInputEnumSchema": "https://w3id.org/cwl/cwl#CommandInputEnumSchema", + "CommandInputParameter": "https://w3id.org/cwl/cwl#CommandInputParameter", + "CommandInputRecordField": "https://w3id.org/cwl/cwl#CommandInputRecordField", + "CommandInputRecordSchema": "https://w3id.org/cwl/cwl#CommandInputRecordSchema", + "CommandInputSchema": "https://w3id.org/cwl/cwl#CommandInputSchema", + "CommandLineBindable": "https://w3id.org/cwl/cwl#CommandLineBindable", + "CommandLineBinding": "https://w3id.org/cwl/cwl#CommandLineBinding", + "CommandLineTool": "https://w3id.org/cwl/cwl#CommandLineTool", + "CommandOutputArraySchema": "https://w3id.org/cwl/cwl#CommandOutputArraySchema", + "CommandOutputBinding": "https://w3id.org/cwl/cwl#CommandOutputBinding", + "CommandOutputEnumSchema": "https://w3id.org/cwl/cwl#CommandOutputEnumSchema", + "CommandOutputParameter": "https://w3id.org/cwl/cwl#CommandOutputParameter", + "CommandOutputRecordField": "https://w3id.org/cwl/cwl#CommandOutputRecordField", + "CommandOutputRecordSchema": "https://w3id.org/cwl/cwl#CommandOutputRecordSchema", + "Directory": "https://w3id.org/cwl/cwl#Directory", + "Dirent": "https://w3id.org/cwl/cwl#Dirent", + "DockerRequirement": "https://w3id.org/cwl/cwl#DockerRequirement", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "EnvVarRequirement": "https://w3id.org/cwl/cwl#EnvVarRequirement", + "EnvironmentDef": "https://w3id.org/cwl/cwl#EnvironmentDef", + "Expression": "https://w3id.org/cwl/cwl#Expression", + "ExpressionPlaceholder": "https://w3id.org/cwl/cwl#ExpressionPlaceholder", + "ExpressionTool": "https://w3id.org/cwl/cwl#ExpressionTool", + "ExpressionToolOutputParameter": "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter", + "FieldBase": "https://w3id.org/cwl/cwl#FieldBase", + "File": "https://w3id.org/cwl/cwl#File", + "IOSchema": "https://w3id.org/cwl/cwl#IOSchema", + "Identified": "https://w3id.org/cwl/cwl#Identified", + "InitialWorkDirRequirement": "https://w3id.org/cwl/cwl#InitialWorkDirRequirement", + "InlineJavascriptRequirement": "https://w3id.org/cwl/cwl#InlineJavascriptRequirement", + "InplaceUpdateRequirement": "https://w3id.org/cwl/cwl#InplaceUpdateRequirement", + "InputArraySchema": "https://w3id.org/cwl/cwl#InputArraySchema", + "InputBinding": "https://w3id.org/cwl/cwl#InputBinding", + "InputEnumSchema": "https://w3id.org/cwl/cwl#InputEnumSchema", + "InputFormat": "https://w3id.org/cwl/cwl#InputFormat", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "InputRecordField": "https://w3id.org/cwl/cwl#InputRecordField", + "InputRecordSchema": "https://w3id.org/cwl/cwl#InputRecordSchema", + "InputSchema": "https://w3id.org/cwl/cwl#InputSchema", + "Labeled": "https://w3id.org/cwl/cwl#Labeled", + "LinkMergeMethod": "https://w3id.org/cwl/cwl#LinkMergeMethod", + "LoadContents": "https://w3id.org/cwl/cwl#LoadContents", + "LoadListingEnum": "https://w3id.org/cwl/cwl#LoadListingEnum", + "LoadListingRequirement": "https://w3id.org/cwl/cwl#LoadListingRequirement", + "MultipleInputFeatureRequirement": "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement", + "NetworkAccess": "https://w3id.org/cwl/cwl#NetworkAccess", + "Operation": "https://w3id.org/cwl/cwl#Operation", + "OperationInputParameter": "https://w3id.org/cwl/cwl#OperationInputParameter", + "OperationOutputParameter": "https://w3id.org/cwl/cwl#OperationOutputParameter", + "OutputArraySchema": "https://w3id.org/cwl/cwl#OutputArraySchema", + "OutputEnumSchema": "https://w3id.org/cwl/cwl#OutputEnumSchema", + "OutputFormat": "https://w3id.org/cwl/cwl#OutputFormat", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "OutputRecordField": "https://w3id.org/cwl/cwl#OutputRecordField", + "OutputRecordSchema": "https://w3id.org/cwl/cwl#OutputRecordSchema", + "OutputSchema": "https://w3id.org/cwl/cwl#OutputSchema", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PickValueMethod": "https://w3id.org/cwl/cwl#PickValueMethod", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "ProcessRequirement": "https://w3id.org/cwl/cwl#ProcessRequirement", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ResourceRequirement": "https://w3id.org/cwl/cwl#ResourceRequirement", + "ScatterFeatureRequirement": "https://w3id.org/cwl/cwl#ScatterFeatureRequirement", + "ScatterMethod": "https://w3id.org/cwl/cwl#ScatterMethod", + "SchemaDefRequirement": "https://w3id.org/cwl/cwl#SchemaDefRequirement", + "SecondaryFileSchema": "https://w3id.org/cwl/cwl#SecondaryFileSchema", + "ShellCommandRequirement": "https://w3id.org/cwl/cwl#ShellCommandRequirement", + "Sink": "https://w3id.org/cwl/cwl#Sink", + "SoftwarePackage": "https://w3id.org/cwl/cwl#SoftwarePackage", + "SoftwareRequirement": "https://w3id.org/cwl/cwl#SoftwareRequirement", + "StepInputExpressionRequirement": "https://w3id.org/cwl/cwl#StepInputExpressionRequirement", + "SubworkflowFeatureRequirement": "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement", + "ToolTimeLimit": "https://w3id.org/cwl/cwl#ToolTimeLimit", + "WorkReuse": "https://w3id.org/cwl/cwl#WorkReuse", + "Workflow": "https://w3id.org/cwl/cwl#Workflow", + "WorkflowInputParameter": "https://w3id.org/cwl/cwl#WorkflowInputParameter", + "WorkflowOutputParameter": "https://w3id.org/cwl/cwl#WorkflowOutputParameter", + "WorkflowStep": "https://w3id.org/cwl/cwl#WorkflowStep", + "WorkflowStepInput": "https://w3id.org/cwl/cwl#WorkflowStepInput", + "WorkflowStepOutput": "https://w3id.org/cwl/cwl#WorkflowStepOutput", + "all_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "deep_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing", + "dotproduct": "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct", + "double": "http://www.w3.org/2001/XMLSchema#double", + "draft-2": "https://w3id.org/cwl/cwl#draft-2", + "draft-3": "https://w3id.org/cwl/cwl#draft-3", + "draft-3.dev1": "https://w3id.org/cwl/cwl#draft-3.dev1", + "draft-3.dev2": "https://w3id.org/cwl/cwl#draft-3.dev2", + "draft-3.dev3": "https://w3id.org/cwl/cwl#draft-3.dev3", + "draft-3.dev4": "https://w3id.org/cwl/cwl#draft-3.dev4", + "draft-3.dev5": "https://w3id.org/cwl/cwl#draft-3.dev5", + "draft-4.dev1": "https://w3id.org/cwl/cwl#draft-4.dev1", + "draft-4.dev2": "https://w3id.org/cwl/cwl#draft-4.dev2", + "draft-4.dev3": "https://w3id.org/cwl/cwl#draft-4.dev3", + "enum": "https://w3id.org/cwl/salad#enum", + "first_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null", + "flat_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "merge_flattened": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened", + "merge_nested": "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested", + "nested_crossproduct": "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct", + "no_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing", + "null": "https://w3id.org/cwl/salad#null", + "record": "https://w3id.org/cwl/salad#record", + "shallow_listing": "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing", + "stderr": "https://w3id.org/cwl/cwl#stderr", + "stdin": "https://w3id.org/cwl/cwl#stdin", + "stdout": "https://w3id.org/cwl/cwl#stdout", + "string": "http://www.w3.org/2001/XMLSchema#string", + "the_only_non_null": "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null", + "v1.0": "https://w3id.org/cwl/cwl#v1.0", + "v1.0.dev4": "https://w3id.org/cwl/cwl#v1.0.dev4", + "v1.1": "https://w3id.org/cwl/cwl#v1.1", + "v1.1.0-dev1": "https://w3id.org/cwl/cwl#v1.1.0-dev1", + "v1.2": "https://w3id.org/cwl/cwl#v1.2", + "v1.2.0-dev1": "https://w3id.org/cwl/cwl#v1.2.0-dev1", + "v1.2.0-dev2": "https://w3id.org/cwl/cwl#v1.2.0-dev2", + "v1.2.0-dev3": "https://w3id.org/cwl/cwl#v1.2.0-dev3", + "v1.2.0-dev4": "https://w3id.org/cwl/cwl#v1.2.0-dev4", + "v1.2.0-dev5": "https://w3id.org/cwl/cwl#v1.2.0-dev5", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/cwl#CWLType": "CWLType", + "https://w3id.org/cwl/cwl#CWLVersion": "CWLVersion", + "https://w3id.org/cwl/cwl#CommandInputArraySchema": "CommandInputArraySchema", + "https://w3id.org/cwl/cwl#CommandInputEnumSchema": "CommandInputEnumSchema", + "https://w3id.org/cwl/cwl#CommandInputParameter": "CommandInputParameter", + "https://w3id.org/cwl/cwl#CommandInputRecordField": "CommandInputRecordField", + "https://w3id.org/cwl/cwl#CommandInputRecordSchema": "CommandInputRecordSchema", + "https://w3id.org/cwl/cwl#CommandInputSchema": "CommandInputSchema", + "https://w3id.org/cwl/cwl#CommandLineBindable": "CommandLineBindable", + "https://w3id.org/cwl/cwl#CommandLineBinding": "CommandLineBinding", + "https://w3id.org/cwl/cwl#CommandLineTool": "CommandLineTool", + "https://w3id.org/cwl/cwl#CommandOutputArraySchema": "CommandOutputArraySchema", + "https://w3id.org/cwl/cwl#CommandOutputBinding": "CommandOutputBinding", + "https://w3id.org/cwl/cwl#CommandOutputEnumSchema": "CommandOutputEnumSchema", + "https://w3id.org/cwl/cwl#CommandOutputParameter": "CommandOutputParameter", + "https://w3id.org/cwl/cwl#CommandOutputRecordField": "CommandOutputRecordField", + "https://w3id.org/cwl/cwl#CommandOutputRecordSchema": "CommandOutputRecordSchema", + "https://w3id.org/cwl/cwl#Directory": "Directory", + "https://w3id.org/cwl/cwl#Dirent": "Dirent", + "https://w3id.org/cwl/cwl#DockerRequirement": "DockerRequirement", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://w3id.org/cwl/cwl#EnvVarRequirement": "EnvVarRequirement", + "https://w3id.org/cwl/cwl#EnvironmentDef": "EnvironmentDef", + "https://w3id.org/cwl/cwl#Expression": "Expression", + "https://w3id.org/cwl/cwl#ExpressionPlaceholder": "ExpressionPlaceholder", + "https://w3id.org/cwl/cwl#ExpressionTool": "ExpressionTool", + "https://w3id.org/cwl/cwl#ExpressionToolOutputParameter": "ExpressionToolOutputParameter", + "https://w3id.org/cwl/cwl#FieldBase": "FieldBase", + "https://w3id.org/cwl/cwl#File": "File", + "https://w3id.org/cwl/cwl#IOSchema": "IOSchema", + "https://w3id.org/cwl/cwl#Identified": "Identified", + "https://w3id.org/cwl/cwl#InitialWorkDirRequirement": "InitialWorkDirRequirement", + "https://w3id.org/cwl/cwl#InlineJavascriptRequirement": "InlineJavascriptRequirement", + "https://w3id.org/cwl/cwl#InplaceUpdateRequirement": "InplaceUpdateRequirement", + "https://w3id.org/cwl/cwl#InputArraySchema": "InputArraySchema", + "https://w3id.org/cwl/cwl#InputBinding": "InputBinding", + "https://w3id.org/cwl/cwl#InputEnumSchema": "InputEnumSchema", + "https://w3id.org/cwl/cwl#InputFormat": "InputFormat", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#InputRecordField": "InputRecordField", + "https://w3id.org/cwl/cwl#InputRecordSchema": "InputRecordSchema", + "https://w3id.org/cwl/cwl#InputSchema": "InputSchema", + "https://w3id.org/cwl/cwl#Labeled": "Labeled", + "https://w3id.org/cwl/cwl#LinkMergeMethod": "LinkMergeMethod", + "https://w3id.org/cwl/cwl#LoadContents": "LoadContents", + "https://w3id.org/cwl/cwl#LoadListingEnum": "LoadListingEnum", + "https://w3id.org/cwl/cwl#LoadListingRequirement": "LoadListingRequirement", + "https://w3id.org/cwl/cwl#MultipleInputFeatureRequirement": "MultipleInputFeatureRequirement", + "https://w3id.org/cwl/cwl#NetworkAccess": "NetworkAccess", + "https://w3id.org/cwl/cwl#Operation": "Operation", + "https://w3id.org/cwl/cwl#OperationInputParameter": "OperationInputParameter", + "https://w3id.org/cwl/cwl#OperationOutputParameter": "OperationOutputParameter", + "https://w3id.org/cwl/cwl#OutputArraySchema": "OutputArraySchema", + "https://w3id.org/cwl/cwl#OutputEnumSchema": "OutputEnumSchema", + "https://w3id.org/cwl/cwl#OutputFormat": "OutputFormat", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#OutputRecordField": "OutputRecordField", + "https://w3id.org/cwl/cwl#OutputRecordSchema": "OutputRecordSchema", + "https://w3id.org/cwl/cwl#OutputSchema": "OutputSchema", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/cwl#PickValueMethod": "PickValueMethod", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/cwl#ProcessRequirement": "ProcessRequirement", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://w3id.org/cwl/cwl#ResourceRequirement": "ResourceRequirement", + "https://w3id.org/cwl/cwl#ScatterFeatureRequirement": "ScatterFeatureRequirement", + "https://w3id.org/cwl/cwl#ScatterMethod": "ScatterMethod", + "https://w3id.org/cwl/cwl#SchemaDefRequirement": "SchemaDefRequirement", + "https://w3id.org/cwl/cwl#SecondaryFileSchema": "SecondaryFileSchema", + "https://w3id.org/cwl/cwl#ShellCommandRequirement": "ShellCommandRequirement", + "https://w3id.org/cwl/cwl#Sink": "Sink", + "https://w3id.org/cwl/cwl#SoftwarePackage": "SoftwarePackage", + "https://w3id.org/cwl/cwl#SoftwareRequirement": "SoftwareRequirement", + "https://w3id.org/cwl/cwl#StepInputExpressionRequirement": "StepInputExpressionRequirement", + "https://w3id.org/cwl/cwl#SubworkflowFeatureRequirement": "SubworkflowFeatureRequirement", + "https://w3id.org/cwl/cwl#ToolTimeLimit": "ToolTimeLimit", + "https://w3id.org/cwl/cwl#WorkReuse": "WorkReuse", + "https://w3id.org/cwl/cwl#Workflow": "Workflow", + "https://w3id.org/cwl/cwl#WorkflowInputParameter": "WorkflowInputParameter", + "https://w3id.org/cwl/cwl#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://w3id.org/cwl/cwl#WorkflowStep": "WorkflowStep", + "https://w3id.org/cwl/cwl#WorkflowStepInput": "WorkflowStepInput", + "https://w3id.org/cwl/cwl#WorkflowStepOutput": "WorkflowStepOutput", + "https://w3id.org/cwl/cwl#PickValueMethod/all_non_null": "all_non_null", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://w3id.org/cwl/cwl#LoadListingEnum/deep_listing": "deep_listing", + "https://w3id.org/cwl/cwl#ScatterMethod/dotproduct": "dotproduct", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/cwl#draft-2": "draft-2", + "https://w3id.org/cwl/cwl#draft-3": "draft-3", + "https://w3id.org/cwl/cwl#draft-3.dev1": "draft-3.dev1", + "https://w3id.org/cwl/cwl#draft-3.dev2": "draft-3.dev2", + "https://w3id.org/cwl/cwl#draft-3.dev3": "draft-3.dev3", + "https://w3id.org/cwl/cwl#draft-3.dev4": "draft-3.dev4", + "https://w3id.org/cwl/cwl#draft-3.dev5": "draft-3.dev5", + "https://w3id.org/cwl/cwl#draft-4.dev1": "draft-4.dev1", + "https://w3id.org/cwl/cwl#draft-4.dev2": "draft-4.dev2", + "https://w3id.org/cwl/cwl#draft-4.dev3": "draft-4.dev3", + "https://w3id.org/cwl/salad#enum": "enum", + "https://w3id.org/cwl/cwl#PickValueMethod/first_non_null": "first_non_null", + "https://w3id.org/cwl/cwl#ScatterMethod/flat_crossproduct": "flat_crossproduct", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_flattened": "merge_flattened", + "https://w3id.org/cwl/cwl#LinkMergeMethod/merge_nested": "merge_nested", + "https://w3id.org/cwl/cwl#ScatterMethod/nested_crossproduct": "nested_crossproduct", + "https://w3id.org/cwl/cwl#LoadListingEnum/no_listing": "no_listing", + "https://w3id.org/cwl/salad#null": "null", + "https://w3id.org/cwl/salad#record": "record", + "https://w3id.org/cwl/cwl#LoadListingEnum/shallow_listing": "shallow_listing", + "https://w3id.org/cwl/cwl#stderr": "stderr", + "https://w3id.org/cwl/cwl#stdin": "stdin", + "https://w3id.org/cwl/cwl#stdout": "stdout", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://w3id.org/cwl/cwl#PickValueMethod/the_only_non_null": "the_only_non_null", + "https://w3id.org/cwl/cwl#v1.0": "v1.0", + "https://w3id.org/cwl/cwl#v1.0.dev4": "v1.0.dev4", + "https://w3id.org/cwl/cwl#v1.1": "v1.1", + "https://w3id.org/cwl/cwl#v1.1.0-dev1": "v1.1.0-dev1", + "https://w3id.org/cwl/cwl#v1.2": "v1.2", + "https://w3id.org/cwl/cwl#v1.2.0-dev1": "v1.2.0-dev1", + "https://w3id.org/cwl/cwl#v1.2.0-dev2": "v1.2.0-dev2", + "https://w3id.org/cwl/cwl#v1.2.0-dev3": "v1.2.0-dev3", + "https://w3id.org/cwl/cwl#v1.2.0-dev4": "v1.2.0-dev4", + "https://w3id.org/cwl/cwl#v1.2.0-dev5": "v1.2.0-dev5", +} + +strtype = _PrimitiveLoader(str) +inttype = _PrimitiveLoader(int) +floattype = _PrimitiveLoader(float) +booltype = _PrimitiveLoader(bool) +None_type = _PrimitiveLoader(type(None)) +Any_type = _AnyLoader() +PrimitiveTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + ), + "PrimitiveType", +) +""" +Names of salad data types (based on Avro schema declarations). + +Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for +detailed information. + +null: no value +boolean: a binary value +int: 32-bit signed integer +long: 64-bit signed integer +float: single precision (32-bit) IEEE 754 floating-point number +double: double precision (64-bit) IEEE 754 floating-point number +string: Unicode character sequence +""" +AnyLoader = _EnumLoader(("Any",), "Any") +""" +The **Any** type validates for any non-null value. +""" +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +CWLVersionLoader = _EnumLoader( + ( + "draft-2", + "draft-3.dev1", + "draft-3.dev2", + "draft-3.dev3", + "draft-3.dev4", + "draft-3.dev5", + "draft-3", + "draft-4.dev1", + "draft-4.dev2", + "draft-4.dev3", + "v1.0.dev4", + "v1.0", + "v1.1.0-dev1", + "v1.1", + "v1.2.0-dev1", + "v1.2.0-dev2", + "v1.2.0-dev3", + "v1.2.0-dev4", + "v1.2.0-dev5", + "v1.2", + ), + "CWLVersion", +) +""" +Version symbols for published CWL document versions. +""" +CWLTypeLoader = _EnumLoader( + ( + "null", + "boolean", + "int", + "long", + "float", + "double", + "string", + "File", + "Directory", + ), + "CWLType", +) +""" +Extends primitive types with the concept of a file and directory as a builtin type. +File: A File object +Directory: A Directory object +""" +FileLoader = _RecordLoader(File) +DirectoryLoader = _RecordLoader(Directory) +LoadListingEnumLoader = _EnumLoader( + ( + "no_listing", + "shallow_listing", + "deep_listing", + ), + "LoadListingEnum", +) +""" +Specify the desired behavior for loading the `listing` field of +a Directory object for use by expressions. + +no_listing: Do not load the directory listing. +shallow_listing: Only load the top level listing, do not recurse into subdirectories. +deep_listing: Load the directory listing and recursively load all subdirectories as well. +""" +ExpressionLoader = _ExpressionLoader(str) +InputBindingLoader = _RecordLoader(InputBinding) +InputRecordFieldLoader = _RecordLoader(InputRecordField) +InputRecordSchemaLoader = _RecordLoader(InputRecordSchema) +InputEnumSchemaLoader = _RecordLoader(InputEnumSchema) +InputArraySchemaLoader = _RecordLoader(InputArraySchema) +OutputRecordFieldLoader = _RecordLoader(OutputRecordField) +OutputRecordSchemaLoader = _RecordLoader(OutputRecordSchema) +OutputEnumSchemaLoader = _RecordLoader(OutputEnumSchema) +OutputArraySchemaLoader = _RecordLoader(OutputArraySchema) +InlineJavascriptRequirementLoader = _RecordLoader(InlineJavascriptRequirement) +SchemaDefRequirementLoader = _RecordLoader(SchemaDefRequirement) +SecondaryFileSchemaLoader = _RecordLoader(SecondaryFileSchema) +LoadListingRequirementLoader = _RecordLoader(LoadListingRequirement) +EnvironmentDefLoader = _RecordLoader(EnvironmentDef) +CommandLineBindingLoader = _RecordLoader(CommandLineBinding) +CommandOutputBindingLoader = _RecordLoader(CommandOutputBinding) +CommandLineBindableLoader = _RecordLoader(CommandLineBindable) +CommandInputRecordFieldLoader = _RecordLoader(CommandInputRecordField) +CommandInputRecordSchemaLoader = _RecordLoader(CommandInputRecordSchema) +CommandInputEnumSchemaLoader = _RecordLoader(CommandInputEnumSchema) +CommandInputArraySchemaLoader = _RecordLoader(CommandInputArraySchema) +CommandOutputRecordFieldLoader = _RecordLoader(CommandOutputRecordField) +CommandOutputRecordSchemaLoader = _RecordLoader(CommandOutputRecordSchema) +CommandOutputEnumSchemaLoader = _RecordLoader(CommandOutputEnumSchema) +CommandOutputArraySchemaLoader = _RecordLoader(CommandOutputArraySchema) +CommandInputParameterLoader = _RecordLoader(CommandInputParameter) +CommandOutputParameterLoader = _RecordLoader(CommandOutputParameter) +stdinLoader = _EnumLoader(("stdin",), "stdin") +""" +Only valid as a `type` for a `CommandLineTool` input with no +`inputBinding` set. `stdin` must not be specified at the `CommandLineTool` +level. + +The following +``` +inputs: + an_input_name: + type: stdin +``` +is equivalent to +``` +inputs: + an_input_name: + type: File + streamable: true + +stdin: $(inputs.an_input_name.path) +``` +""" +stdoutLoader = _EnumLoader(("stdout",), "stdout") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stdout + +stdout: a_stdout_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stdout_file + +stdout: a_stdout_file +``` + +If there is no `stdout` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stdout +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stdout_filenameABCDEFG + +stdout: random_stdout_filenameABCDEFG +``` + +If the `CommandLineTool` contains logically chained commands +(e.g. `echo a && echo b`) `stdout` must include the output of +every command. +""" +stderrLoader = _EnumLoader(("stderr",), "stderr") +""" +Only valid as a `type` for a `CommandLineTool` output with no +`outputBinding` set. + +The following +``` +outputs: + an_output_name: + type: stderr + +stderr: a_stderr_file +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: a_stderr_file + +stderr: a_stderr_file +``` + +If there is no `stderr` name provided, a random filename will be created. +For example, the following +``` +outputs: + an_output_name: + type: stderr +``` +is equivalent to +``` +outputs: + an_output_name: + type: File + streamable: true + outputBinding: + glob: random_stderr_filenameABCDEFG + +stderr: random_stderr_filenameABCDEFG +``` +""" +CommandLineToolLoader = _RecordLoader(CommandLineTool) +DockerRequirementLoader = _RecordLoader(DockerRequirement) +SoftwareRequirementLoader = _RecordLoader(SoftwareRequirement) +SoftwarePackageLoader = _RecordLoader(SoftwarePackage) +DirentLoader = _RecordLoader(Dirent) +InitialWorkDirRequirementLoader = _RecordLoader(InitialWorkDirRequirement) +EnvVarRequirementLoader = _RecordLoader(EnvVarRequirement) +ShellCommandRequirementLoader = _RecordLoader(ShellCommandRequirement) +ResourceRequirementLoader = _RecordLoader(ResourceRequirement) +WorkReuseLoader = _RecordLoader(WorkReuse) +NetworkAccessLoader = _RecordLoader(NetworkAccess) +InplaceUpdateRequirementLoader = _RecordLoader(InplaceUpdateRequirement) +ToolTimeLimitLoader = _RecordLoader(ToolTimeLimit) +ExpressionToolOutputParameterLoader = _RecordLoader(ExpressionToolOutputParameter) +WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) +ExpressionToolLoader = _RecordLoader(ExpressionTool) +LinkMergeMethodLoader = _EnumLoader( + ( + "merge_nested", + "merge_flattened", + ), + "LinkMergeMethod", +) +""" +The input link merge method, described in [WorkflowStepInput](#WorkflowStepInput). +""" +PickValueMethodLoader = _EnumLoader( + ( + "first_non_null", + "the_only_non_null", + "all_non_null", + ), + "PickValueMethod", +) +""" +Picking non-null values among inbound data links, described in [WorkflowStepInput](#WorkflowStepInput). +""" +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) +ScatterMethodLoader = _EnumLoader( + ( + "dotproduct", + "nested_crossproduct", + "flat_crossproduct", + ), + "ScatterMethod", +) +""" +The scatter method, as described in [workflow step scatter](#WorkflowStep). +""" +WorkflowStepLoader = _RecordLoader(WorkflowStep) +WorkflowLoader = _RecordLoader(Workflow) +SubworkflowFeatureRequirementLoader = _RecordLoader(SubworkflowFeatureRequirement) +ScatterFeatureRequirementLoader = _RecordLoader(ScatterFeatureRequirement) +MultipleInputFeatureRequirementLoader = _RecordLoader(MultipleInputFeatureRequirement) +StepInputExpressionRequirementLoader = _RecordLoader(StepInputExpressionRequirement) +OperationInputParameterLoader = _RecordLoader(OperationInputParameter) +OperationOutputParameterLoader = _RecordLoader(OperationOutputParameter) +OperationLoader = _RecordLoader(Operation) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ) +) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + ) +) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype +) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader( + ( + PrimitiveTypeLoader, + RecordSchemaLoader, + EnumSchemaLoader, + ArraySchemaLoader, + strtype, + array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + 2, +) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_RecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" +) +Record_nameLoader = _EnumLoader(("record",), "Record_name") +typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2) +union_of_None_type_or_strtype = _UnionLoader( + ( + None_type, + strtype, + ) +) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader( + union_of_None_type_or_strtype, True, False, None +) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") +typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2) +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, + False, + True, + 2, +) +Array_nameLoader = _EnumLoader(("array",), "Array_name") +typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2) +File_classLoader = _EnumLoader(("File",), "File_class") +uri_File_classLoader_False_True_None = _URILoader(File_classLoader, False, True, None) +uri_union_of_None_type_or_strtype_False_False_None = _URILoader( + union_of_None_type_or_strtype, False, False, None +) +union_of_None_type_or_inttype = _UnionLoader( + ( + None_type, + inttype, + ) +) +union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + FileLoader, + DirectoryLoader, + ) +) +array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_FileLoader_or_DirectoryLoader +) +union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader = _SecondaryDSLLoader( + union_of_None_type_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +Directory_classLoader = _EnumLoader(("Directory",), "Directory_class") +uri_Directory_classLoader_False_True_None = _URILoader( + Directory_classLoader, False, True, None +) +union_of_None_type_or_booltype = _UnionLoader( + ( + None_type, + booltype, + ) +) +union_of_None_type_or_LoadListingEnumLoader = _UnionLoader( + ( + None_type, + LoadListingEnumLoader, + ) +) +array_of_SecondaryFileSchemaLoader = _ArrayLoader(SecondaryFileSchemaLoader) +union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _UnionLoader( + ( + None_type, + SecondaryFileSchemaLoader, + array_of_SecondaryFileSchemaLoader, + ) +) +secondaryfilesdsl_union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader = _SecondaryDSLLoader( + union_of_None_type_or_SecondaryFileSchemaLoader_or_array_of_SecondaryFileSchemaLoader +) +union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + array_of_strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype_or_ExpressionLoader, + True, + False, + None, +) +union_of_None_type_or_strtype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + ) +) +uri_union_of_None_type_or_strtype_or_ExpressionLoader_True_False_None = _URILoader( + union_of_None_type_or_strtype_or_ExpressionLoader, True, False, None +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + InputRecordSchemaLoader, + InputEnumSchemaLoader, + InputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + 2, +) +array_of_InputRecordFieldLoader = _ArrayLoader(InputRecordFieldLoader) +union_of_None_type_or_array_of_InputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_InputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_InputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_InputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_InputRecordSchemaLoader_or_InputEnumSchemaLoader_or_InputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + OutputRecordSchemaLoader, + OutputEnumSchemaLoader, + OutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + 2, +) +array_of_OutputRecordFieldLoader = _ArrayLoader(OutputRecordFieldLoader) +union_of_None_type_or_array_of_OutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_OutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_OutputRecordFieldLoader = _IdMapLoader( + union_of_None_type_or_array_of_OutputRecordFieldLoader, "name", "type" +) +uri_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_OutputRecordSchemaLoader_or_OutputEnumSchemaLoader_or_OutputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_None_type_or_FileLoader_or_DirectoryLoader_or_Any_type = _UnionLoader( + ( + None_type, + FileLoader, + DirectoryLoader, + Any_type, + ) +) +union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _UnionLoader( + ( + CommandInputParameterLoader, + WorkflowInputParameterLoader, + OperationInputParameterLoader, + ) +) +array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _ArrayLoader( + union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader +) +idmap_inputs_array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader = _IdMapLoader( + array_of_union_of_CommandInputParameterLoader_or_WorkflowInputParameterLoader_or_OperationInputParameterLoader, + "id", + "type", +) +union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _UnionLoader( + ( + CommandOutputParameterLoader, + ExpressionToolOutputParameterLoader, + WorkflowOutputParameterLoader, + OperationOutputParameterLoader, + ) +) +array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _ArrayLoader( + union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader +) +idmap_outputs_array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader = _IdMapLoader( + array_of_union_of_CommandOutputParameterLoader_or_ExpressionToolOutputParameterLoader_or_WorkflowOutputParameterLoader_or_OperationOutputParameterLoader, + "id", + "type", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + ) +) +idmap_requirements_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader, + "class", + "None", +) +union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + InlineJavascriptRequirementLoader, + SchemaDefRequirementLoader, + LoadListingRequirementLoader, + DockerRequirementLoader, + SoftwareRequirementLoader, + InitialWorkDirRequirementLoader, + EnvVarRequirementLoader, + ShellCommandRequirementLoader, + ResourceRequirementLoader, + WorkReuseLoader, + NetworkAccessLoader, + InplaceUpdateRequirementLoader, + ToolTimeLimitLoader, + SubworkflowFeatureRequirementLoader, + ScatterFeatureRequirementLoader, + MultipleInputFeatureRequirementLoader, + StepInputExpressionRequirementLoader, + Any_type, + ) +) +array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _ArrayLoader( + union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type +) +union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _UnionLoader( + ( + None_type, + array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_union_of_InlineJavascriptRequirementLoader_or_SchemaDefRequirementLoader_or_LoadListingRequirementLoader_or_DockerRequirementLoader_or_SoftwareRequirementLoader_or_InitialWorkDirRequirementLoader_or_EnvVarRequirementLoader_or_ShellCommandRequirementLoader_or_ResourceRequirementLoader_or_WorkReuseLoader_or_NetworkAccessLoader_or_InplaceUpdateRequirementLoader_or_ToolTimeLimitLoader_or_SubworkflowFeatureRequirementLoader_or_ScatterFeatureRequirementLoader_or_MultipleInputFeatureRequirementLoader_or_StepInputExpressionRequirementLoader_or_Any_type, + "class", + "None", +) +union_of_None_type_or_CWLVersionLoader = _UnionLoader( + ( + None_type, + CWLVersionLoader, + ) +) +uri_union_of_None_type_or_CWLVersionLoader_False_True_None = _URILoader( + union_of_None_type_or_CWLVersionLoader, False, True, None +) +union_of_None_type_or_array_of_strtype = _UnionLoader( + ( + None_type, + array_of_strtype, + ) +) +uri_union_of_None_type_or_array_of_strtype_True_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, True, False, None +) +InlineJavascriptRequirement_classLoader = _EnumLoader( + ("InlineJavascriptRequirement",), "InlineJavascriptRequirement_class" +) +uri_InlineJavascriptRequirement_classLoader_False_True_None = _URILoader( + InlineJavascriptRequirement_classLoader, False, True, None +) +SchemaDefRequirement_classLoader = _EnumLoader( + ("SchemaDefRequirement",), "SchemaDefRequirement_class" +) +uri_SchemaDefRequirement_classLoader_False_True_None = _URILoader( + SchemaDefRequirement_classLoader, False, True, None +) +union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _UnionLoader( + ( + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + ) +) +array_of_union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader = _ArrayLoader( + union_of_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader +) +union_of_strtype_or_ExpressionLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + ) +) +union_of_None_type_or_booltype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + booltype, + ExpressionLoader, + ) +) +LoadListingRequirement_classLoader = _EnumLoader( + ("LoadListingRequirement",), "LoadListingRequirement_class" +) +uri_LoadListingRequirement_classLoader_False_True_None = _URILoader( + LoadListingRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_strtype_or_ExpressionLoader_or_array_of_strtype = _UnionLoader( + ( + None_type, + strtype, + ExpressionLoader, + array_of_strtype, + ) +) +union_of_None_type_or_ExpressionLoader = _UnionLoader( + ( + None_type, + ExpressionLoader, + ) +) +union_of_None_type_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + CommandLineBindingLoader, + ) +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +array_of_CommandInputRecordFieldLoader = _ArrayLoader(CommandInputRecordFieldLoader) +union_of_None_type_or_array_of_CommandInputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandInputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandInputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandInputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + ) +) +array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _ArrayLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype +) +union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +union_of_None_type_or_CommandOutputBindingLoader = _UnionLoader( + ( + None_type, + CommandOutputBindingLoader, + ) +) +array_of_CommandOutputRecordFieldLoader = _ArrayLoader(CommandOutputRecordFieldLoader) +union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = _UnionLoader( + ( + None_type, + array_of_CommandOutputRecordFieldLoader, + ) +) +idmap_fields_union_of_None_type_or_array_of_CommandOutputRecordFieldLoader = ( + _IdMapLoader( + union_of_None_type_or_array_of_CommandOutputRecordFieldLoader, "name", "type" + ) +) +uri_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_False_True_2 = _URILoader( + union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + False, + True, + 2, +) +union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdinLoader, + CommandInputRecordSchemaLoader, + CommandInputEnumSchemaLoader, + CommandInputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdinLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandInputRecordSchemaLoader_or_CommandInputEnumSchemaLoader_or_CommandInputArraySchemaLoader_or_strtype, + 2, +) +union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype = _UnionLoader( + ( + CWLTypeLoader, + stdoutLoader, + stderrLoader, + CommandOutputRecordSchemaLoader, + CommandOutputEnumSchemaLoader, + CommandOutputArraySchemaLoader, + strtype, + array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + ) +) +typedsl_union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_2 = _TypeDSLLoader( + union_of_CWLTypeLoader_or_stdoutLoader_or_stderrLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype_or_array_of_union_of_CWLTypeLoader_or_CommandOutputRecordSchemaLoader_or_CommandOutputEnumSchemaLoader_or_CommandOutputArraySchemaLoader_or_strtype, + 2, +) +CommandLineTool_classLoader = _EnumLoader(("CommandLineTool",), "CommandLineTool_class") +uri_CommandLineTool_classLoader_False_True_None = _URILoader( + CommandLineTool_classLoader, False, True, None +) +array_of_CommandInputParameterLoader = _ArrayLoader(CommandInputParameterLoader) +idmap_inputs_array_of_CommandInputParameterLoader = _IdMapLoader( + array_of_CommandInputParameterLoader, "id", "type" +) +array_of_CommandOutputParameterLoader = _ArrayLoader(CommandOutputParameterLoader) +idmap_outputs_array_of_CommandOutputParameterLoader = _IdMapLoader( + array_of_CommandOutputParameterLoader, "id", "type" +) +union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + strtype, + ExpressionLoader, + CommandLineBindingLoader, + ) +) +array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = ( + _ArrayLoader(union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader) +) +union_of_None_type_or_array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader = _UnionLoader( + ( + None_type, + array_of_union_of_strtype_or_ExpressionLoader_or_CommandLineBindingLoader, + ) +) +array_of_inttype = _ArrayLoader(inttype) +union_of_None_type_or_array_of_inttype = _UnionLoader( + ( + None_type, + array_of_inttype, + ) +) +DockerRequirement_classLoader = _EnumLoader( + ("DockerRequirement",), "DockerRequirement_class" +) +uri_DockerRequirement_classLoader_False_True_None = _URILoader( + DockerRequirement_classLoader, False, True, None +) +SoftwareRequirement_classLoader = _EnumLoader( + ("SoftwareRequirement",), "SoftwareRequirement_class" +) +uri_SoftwareRequirement_classLoader_False_True_None = _URILoader( + SoftwareRequirement_classLoader, False, True, None +) +array_of_SoftwarePackageLoader = _ArrayLoader(SoftwarePackageLoader) +idmap_packages_array_of_SoftwarePackageLoader = _IdMapLoader( + array_of_SoftwarePackageLoader, "package", "specs" +) +uri_union_of_None_type_or_array_of_strtype_False_False_None = _URILoader( + union_of_None_type_or_array_of_strtype, False, False, None +) +InitialWorkDirRequirement_classLoader = _EnumLoader( + ("InitialWorkDirRequirement",), "InitialWorkDirRequirement_class" +) +uri_InitialWorkDirRequirement_classLoader_False_True_None = _URILoader( + InitialWorkDirRequirement_classLoader, False, True, None +) +union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + None_type, + DirentLoader, + ExpressionLoader, + FileLoader, + DirectoryLoader, + array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _ArrayLoader( + union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader +) +union_of_ExpressionLoader_or_array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader = _UnionLoader( + ( + ExpressionLoader, + array_of_union_of_None_type_or_DirentLoader_or_ExpressionLoader_or_FileLoader_or_DirectoryLoader_or_array_of_union_of_FileLoader_or_DirectoryLoader, + ) +) +EnvVarRequirement_classLoader = _EnumLoader( + ("EnvVarRequirement",), "EnvVarRequirement_class" +) +uri_EnvVarRequirement_classLoader_False_True_None = _URILoader( + EnvVarRequirement_classLoader, False, True, None +) +array_of_EnvironmentDefLoader = _ArrayLoader(EnvironmentDefLoader) +idmap_envDef_array_of_EnvironmentDefLoader = _IdMapLoader( + array_of_EnvironmentDefLoader, "envName", "envValue" +) +ShellCommandRequirement_classLoader = _EnumLoader( + ("ShellCommandRequirement",), "ShellCommandRequirement_class" +) +uri_ShellCommandRequirement_classLoader_False_True_None = _URILoader( + ShellCommandRequirement_classLoader, False, True, None +) +ResourceRequirement_classLoader = _EnumLoader( + ("ResourceRequirement",), "ResourceRequirement_class" +) +uri_ResourceRequirement_classLoader_False_True_None = _URILoader( + ResourceRequirement_classLoader, False, True, None +) +union_of_None_type_or_inttype_or_floattype_or_ExpressionLoader = _UnionLoader( + ( + None_type, + inttype, + floattype, + ExpressionLoader, + ) +) +WorkReuse_classLoader = _EnumLoader(("WorkReuse",), "WorkReuse_class") +uri_WorkReuse_classLoader_False_True_None = _URILoader( + WorkReuse_classLoader, False, True, None +) +union_of_booltype_or_ExpressionLoader = _UnionLoader( + ( + booltype, + ExpressionLoader, + ) +) +NetworkAccess_classLoader = _EnumLoader(("NetworkAccess",), "NetworkAccess_class") +uri_NetworkAccess_classLoader_False_True_None = _URILoader( + NetworkAccess_classLoader, False, True, None +) +InplaceUpdateRequirement_classLoader = _EnumLoader( + ("InplaceUpdateRequirement",), "InplaceUpdateRequirement_class" +) +uri_InplaceUpdateRequirement_classLoader_False_True_None = _URILoader( + InplaceUpdateRequirement_classLoader, False, True, None +) +ToolTimeLimit_classLoader = _EnumLoader(("ToolTimeLimit",), "ToolTimeLimit_class") +uri_ToolTimeLimit_classLoader_False_True_None = _URILoader( + ToolTimeLimit_classLoader, False, True, None +) +union_of_inttype_or_ExpressionLoader = _UnionLoader( + ( + inttype, + ExpressionLoader, + ) +) +union_of_None_type_or_InputBindingLoader = _UnionLoader( + ( + None_type, + InputBindingLoader, + ) +) +ExpressionTool_classLoader = _EnumLoader(("ExpressionTool",), "ExpressionTool_class") +uri_ExpressionTool_classLoader_False_True_None = _URILoader( + ExpressionTool_classLoader, False, True, None +) +array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) +idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader( + array_of_WorkflowInputParameterLoader, "id", "type" +) +array_of_ExpressionToolOutputParameterLoader = _ArrayLoader( + ExpressionToolOutputParameterLoader +) +idmap_outputs_array_of_ExpressionToolOutputParameterLoader = _IdMapLoader( + array_of_ExpressionToolOutputParameterLoader, "id", "type" +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1 +) +union_of_None_type_or_LinkMergeMethodLoader = _UnionLoader( + ( + None_type, + LinkMergeMethodLoader, + ) +) +union_of_None_type_or_PickValueMethodLoader = _UnionLoader( + ( + None_type, + PickValueMethodLoader, + ) +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2 +) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +idmap_in__array_of_WorkflowStepInputLoader = _IdMapLoader( + array_of_WorkflowStepInputLoader, "id", "source" +) +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + ( + strtype, + WorkflowStepOutputLoader, + ) +) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader( + union_of_strtype_or_WorkflowStepOutputLoader +) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader( + (array_of_union_of_strtype_or_WorkflowStepOutputLoader,) +) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_True_False_None = ( + _URILoader( + union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader, + True, + False, + None, + ) +) +array_of_Any_type = _ArrayLoader(Any_type) +union_of_None_type_or_array_of_Any_type = _UnionLoader( + ( + None_type, + array_of_Any_type, + ) +) +idmap_hints_union_of_None_type_or_array_of_Any_type = _IdMapLoader( + union_of_None_type_or_array_of_Any_type, "class", "None" +) +union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + strtype, + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +uri_union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_False_False_None = _URILoader( + union_of_strtype_or_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + False, + False, + None, +) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_0 = _URILoader( + union_of_None_type_or_strtype_or_array_of_strtype, False, False, 0 +) +union_of_None_type_or_ScatterMethodLoader = _UnionLoader( + ( + None_type, + ScatterMethodLoader, + ) +) +uri_union_of_None_type_or_ScatterMethodLoader_False_True_None = _URILoader( + union_of_None_type_or_ScatterMethodLoader, False, True, None +) +Workflow_classLoader = _EnumLoader(("Workflow",), "Workflow_class") +uri_Workflow_classLoader_False_True_None = _URILoader( + Workflow_classLoader, False, True, None +) +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader( + array_of_WorkflowOutputParameterLoader, "id", "type" +) +array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) +idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader( + union_of_array_of_WorkflowStepLoader, "id", "None" +) +SubworkflowFeatureRequirement_classLoader = _EnumLoader( + ("SubworkflowFeatureRequirement",), "SubworkflowFeatureRequirement_class" +) +uri_SubworkflowFeatureRequirement_classLoader_False_True_None = _URILoader( + SubworkflowFeatureRequirement_classLoader, False, True, None +) +ScatterFeatureRequirement_classLoader = _EnumLoader( + ("ScatterFeatureRequirement",), "ScatterFeatureRequirement_class" +) +uri_ScatterFeatureRequirement_classLoader_False_True_None = _URILoader( + ScatterFeatureRequirement_classLoader, False, True, None +) +MultipleInputFeatureRequirement_classLoader = _EnumLoader( + ("MultipleInputFeatureRequirement",), "MultipleInputFeatureRequirement_class" +) +uri_MultipleInputFeatureRequirement_classLoader_False_True_None = _URILoader( + MultipleInputFeatureRequirement_classLoader, False, True, None +) +StepInputExpressionRequirement_classLoader = _EnumLoader( + ("StepInputExpressionRequirement",), "StepInputExpressionRequirement_class" +) +uri_StepInputExpressionRequirement_classLoader_False_True_None = _URILoader( + StepInputExpressionRequirement_classLoader, False, True, None +) +Operation_classLoader = _EnumLoader(("Operation",), "Operation_class") +uri_Operation_classLoader_False_True_None = _URILoader( + Operation_classLoader, False, True, None +) +array_of_OperationInputParameterLoader = _ArrayLoader(OperationInputParameterLoader) +idmap_inputs_array_of_OperationInputParameterLoader = _IdMapLoader( + array_of_OperationInputParameterLoader, "id", "type" +) +array_of_OperationOutputParameterLoader = _ArrayLoader(OperationOutputParameterLoader) +idmap_outputs_array_of_OperationOutputParameterLoader = _IdMapLoader( + array_of_OperationOutputParameterLoader, "id", "type" +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + ) +) +array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _ArrayLoader( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader +) +union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader = _UnionLoader( + ( + CommandLineToolLoader, + ExpressionToolLoader, + WorkflowLoader, + OperationLoader, + array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + ) +) + + +def load_document( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + ) + return result + + +def load_document_with_metadata( + doc: Any, + baseuri: Optional[str] = None, + loadingOptions: Optional[LoadingOptions] = None, + addl_metadata_fields: Optional[MutableSequence[str]] = None, +) -> Any: + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=baseuri) + return _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + doc, + baseuri, + loadingOptions, + addl_metadata_fields=addl_metadata_fields, + ) + + +def load_document_by_string( + string: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + yaml = yaml_no_ts() + result = yaml.load(string) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + result, + uri, + loadingOptions, + ) + return result + + +def load_document_by_yaml( + yaml: Any, + uri: str, + loadingOptions: Optional[LoadingOptions] = None, +) -> Any: + """ + Shortcut to load via a YAML object. + yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True + """ + add_lc_filename(yaml, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + + result, metadata = _document_load( + union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader_or_array_of_union_of_CommandLineToolLoader_or_ExpressionToolLoader_or_WorkflowLoader_or_OperationLoader, + yaml, + uri, + loadingOptions, + ) + return result diff --git a/schema_salad/metaschema.py b/schema_salad/metaschema.py index e6eb44f3a..c659889b5 100644 --- a/schema_salad/metaschema.py +++ b/schema_salad/metaschema.py @@ -23,13 +23,14 @@ Type, Union, cast, + no_type_check, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url from rdflib import Graph from rdflib.plugins.parsers.notation3 import BadSyntax -from ruamel.yaml.comments import CommentedMap +from ruamel.yaml.comments import CommentedMap, CommentedSeq from schema_salad.exceptions import SchemaSaladException, ValidationException from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher @@ -44,6 +45,8 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] +doc_line_info = CommentedMap() + class LoadingOptions: idx: IdxType @@ -202,8 +205,14 @@ def fromDoc( @abstractmethod def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, + ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -234,6 +243,196 @@ def load_field(val, fieldtype, baseuri, loadingOptions, lc=None): save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: Dict[Any, Dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: Dict[int, int], + min_col: int = 0, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. + """ + if inserted_line_info is None: + inserted_line_info = {} + + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len, inserted_line_info + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on + line = max_line + + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + shift + if line in inserted_line_info: + line = max_line + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line + shift, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]) + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1, inserted_line_info + + +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + """Take a list of keys/indexes and iterates through the global CommentedMap.""" + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc + + +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, Dict[str, int]] = {} + if doc is None: + return {} + if doc.lc.data is None: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + """Given a array of line column information, get the minimum column.""" + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 def extract_type(val_type: Type[Any]) -> str: """Take a type of value, and extracts the value as a string.""" val_str = str(val_type) @@ -296,15 +495,71 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. + """ + if keys is None: + keys = [] + + doc = iterate_through_doc(keys) + if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + return val.save( + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) if isinstance(val, MutableSequence): - return [save(v, top=False, base_url=base_url, relative_uris=relative_uris) for v in val] + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + new_keys = keys + if doc: + if str(i) in doc: + r.lc.data[i] = doc.lc.data[i] + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + ) + return r + if isinstance(val, MutableMapping): - newdict = {} + newdict = CommentedMap() + new_keys = keys for key in val: - newdict[key] = save(val[key], top=False, base_url=base_url, relative_uris=relative_uris) + + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + return newdict if val is None or isinstance(val, (int, float, bool, str)): return val @@ -842,7 +1097,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None, lc=None): def _document_load( loader: _Loader, - doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], baseuri: str, loadingOptions: LoadingOptions, addl_metadata_fields: Optional[MutableSequence[str]] = None, @@ -882,6 +1137,10 @@ def _document_load( if "$base" in doc: doc.pop("$base") + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + if "$graph" in doc: loadingOptions.idx[baseuri] = ( loader.load(doc["$graph"], baseuri, loadingOptions), @@ -897,7 +1156,6 @@ def _document_load( loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] return loadingOptions.idx[baseuri] - if isinstance(doc, MutableSequence): loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions), @@ -1250,9 +1508,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1260,16 +1539,105 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.doc is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.type_ is not None: + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1444,9 +1812,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1454,13 +1843,90 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.fields is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.fields is not None and "fields" not in r: r["fields"] = save( - self.fields, top=False, base_url=base_url, relative_uris=relative_uris + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.type_ is not None: + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + self.type_, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1699,9 +2165,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1709,15 +2196,101 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris + ) r["symbols"] = u - if self.type_ is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -1889,9 +2462,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -1899,12 +2493,84 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.items is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.items is not None and "items" not in r: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) r["items"] = u - if self.type_ is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="items", + val=r.get("items"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=base_url, relative_uris=relative_uris + self.type_, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2546,9 +3212,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -2556,63 +3243,273 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self._id is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self._id is not None and "_id" not in r: u = save_relative_uri(self._id, base_url, True, None, relative_uris) r["_id"] = u - if self._type is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="_id", + val=r.get("_id"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self._type is not None and "_type" not in r: r["_type"] = save( - self._type, top=False, base_url=base_url, relative_uris=relative_uris + self._type, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self._container is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="_type", + val=r.get("_type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self._container is not None and "_container" not in r: r["_container"] = save( self._container, top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="_container", + val=r.get("_container"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.identity is not None: + if self.identity is not None and "identity" not in r: r["identity"] = save( - self.identity, top=False, base_url=base_url, relative_uris=relative_uris + self.identity, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.noLinkCheck is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="identity", + val=r.get("identity"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.noLinkCheck is not None and "noLinkCheck" not in r: r["noLinkCheck"] = save( self.noLinkCheck, top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="noLinkCheck", + val=r.get("noLinkCheck"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.mapSubject is not None: + if self.mapSubject is not None and "mapSubject" not in r: r["mapSubject"] = save( self.mapSubject, top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.mapPredicate is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="mapSubject", + val=r.get("mapSubject"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.mapPredicate is not None and "mapPredicate" not in r: r["mapPredicate"] = save( self.mapPredicate, top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="mapPredicate", + val=r.get("mapPredicate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.refScope is not None: + if self.refScope is not None and "refScope" not in r: r["refScope"] = save( - self.refScope, top=False, base_url=base_url, relative_uris=relative_uris + self.refScope, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.typeDSL is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="refScope", + val=r.get("refScope"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.typeDSL is not None and "typeDSL" not in r: r["typeDSL"] = save( - self.typeDSL, top=False, base_url=base_url, relative_uris=relative_uris + self.typeDSL, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="typeDSL", + val=r.get("typeDSL"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.secondaryFilesDSL is not None: + if self.secondaryFilesDSL is not None and "secondaryFilesDSL" not in r: r["secondaryFilesDSL"] = save( self.secondaryFilesDSL, top=False, base_url=base_url, relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.subscope is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="secondaryFilesDSL", + val=r.get("secondaryFilesDSL"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.subscope is not None and "subscope" not in r: r["subscope"] = save( - self.subscope, top=False, base_url=base_url, relative_uris=relative_uris + self.subscope, + top=False, + base_url=base_url, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="subscope", + val=r.get("subscope"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -2801,9 +3698,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -2811,14 +3729,81 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.specializeFrom is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.specializeFrom is not None and "specializeFrom" not in r: u = save_relative_uri( self.specializeFrom, base_url, False, 1, relative_uris ) r["specializeFrom"] = u - if self.specializeTo is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specializeFrom", + val=r.get("specializeFrom"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.specializeTo is not None and "specializeTo" not in r: u = save_relative_uri(self.specializeTo, base_url, False, 1, relative_uris) r["specializeTo"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specializeTo", + val=r.get("specializeTo"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) # top refers to the directory level if top: @@ -3175,9 +4160,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -3185,27 +4191,147 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.doc is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.type_ is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.jsonldPredicate is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.jsonldPredicate is not None and "jsonldPredicate" not in r: r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="jsonldPredicate", + val=r.get("jsonldPredicate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.default is not None: + if self.default is not None and "default" not in r: r["default"] = save( - self.default, top=False, base_url=self.name, relative_uris=relative_uris + self.default, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="default", + val=r.get("default"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -3948,9 +5074,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -3958,64 +5105,297 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.inVocab is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inVocab is not None and "inVocab" not in r: r["inVocab"] = save( - self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris + self.inVocab, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inVocab", + val=r.get("inVocab"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.fields is not None: + if self.fields is not None and "fields" not in r: r["fields"] = save( - self.fields, top=False, base_url=self.name, relative_uris=relative_uris + self.fields, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="fields", + val=r.get("fields"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.type_ is not None: + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.doc is not None: + if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docParent is not None and "docParent" not in r: + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris ) - if self.docParent is not None: - u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u - if self.docChild is not None: - u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docParent", + val=r.get("docParent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docChild is not None and "docChild" not in r: + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u - if self.docAfter is not None: - u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docChild", + val=r.get("docChild"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docAfter is not None and "docAfter" not in r: + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u - if self.jsonldPredicate is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docAfter", + val=r.get("docAfter"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.jsonldPredicate is not None and "jsonldPredicate" not in r: r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="jsonldPredicate", + val=r.get("jsonldPredicate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.documentRoot is not None: + if self.documentRoot is not None and "documentRoot" not in r: r["documentRoot"] = save( self.documentRoot, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.abstract is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="documentRoot", + val=r.get("documentRoot"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.abstract is not None and "abstract" not in r: r["abstract"] = save( self.abstract, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="abstract", + val=r.get("abstract"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.extends is not None: - u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) + if self.extends is not None and "extends" not in r: + u = save_relative_uri(self.extends, str(self.name), False, 1, relative_uris) r["extends"] = u - if self.specialize is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="extends", + val=r.get("extends"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.specialize is not None and "specialize" not in r: r["specialize"] = save( self.specialize, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="specialize", + val=r.get("specialize"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level @@ -4676,9 +6056,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -4686,50 +6087,252 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.inVocab is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inVocab is not None and "inVocab" not in r: r["inVocab"] = save( - self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris + self.inVocab, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inVocab", + val=r.get("inVocab"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.symbols is not None and "symbols" not in r: + u = save_relative_uri( + self.symbols, str(self.name), True, None, relative_uris ) - if self.symbols is not None: - u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u - if self.type_ is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="symbols", + val=r.get("symbols"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.doc is not None: + if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docParent is not None and "docParent" not in r: + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris ) - if self.docParent is not None: - u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u - if self.docChild is not None: - u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docParent", + val=r.get("docParent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docChild is not None and "docChild" not in r: + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u - if self.docAfter is not None: - u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docChild", + val=r.get("docChild"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docAfter is not None and "docAfter" not in r: + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u - if self.jsonldPredicate is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docAfter", + val=r.get("docAfter"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.jsonldPredicate is not None and "jsonldPredicate" not in r: r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="jsonldPredicate", + val=r.get("jsonldPredicate"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.documentRoot is not None: + if self.documentRoot is not None and "documentRoot" not in r: r["documentRoot"] = save( self.documentRoot, top=False, - base_url=self.name, + base_url=str(self.name), relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="documentRoot", + val=r.get("documentRoot"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.extends is not None: - u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) + if self.extends is not None and "extends" not in r: + u = save_relative_uri(self.extends, str(self.name), False, 1, relative_uris) r["extends"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="extends", + val=r.get("extends"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) # top refers to the directory level if top: @@ -5191,9 +6794,30 @@ def fromDoc( return _constructed def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -5201,29 +6825,177 @@ def save( else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] - if self.name is not None: + + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) + if self.name is not None and "name" not in r: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u - if self.inVocab is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="name", + val=r.get("name"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.inVocab is not None and "inVocab" not in r: r["inVocab"] = save( - self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris + self.inVocab, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, ) - if self.doc is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="inVocab", + val=r.get("inVocab"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.doc is not None and "doc" not in r: r["doc"] = save( - self.doc, top=False, base_url=self.name, relative_uris=relative_uris + self.doc, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="doc", + val=r.get("doc"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docParent is not None and "docParent" not in r: + u = save_relative_uri( + self.docParent, str(self.name), False, None, relative_uris ) - if self.docParent is not None: - u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u - if self.docChild is not None: - u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docParent", + val=r.get("docParent"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docChild is not None and "docChild" not in r: + u = save_relative_uri( + self.docChild, str(self.name), False, None, relative_uris + ) r["docChild"] = u - if self.docAfter is not None: - u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docChild", + val=r.get("docChild"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.docAfter is not None and "docAfter" not in r: + u = save_relative_uri( + self.docAfter, str(self.name), False, None, relative_uris + ) r["docAfter"] = u - if self.type_ is not None: + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="docAfter", + val=r.get("docAfter"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, + ) + if self.type_ is not None and "type" not in r: r["type"] = save( - self.type_, top=False, base_url=self.name, relative_uris=relative_uris + self.type_, + top=False, + base_url=str(self.name), + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift, + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="type", + val=r.get("type"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift, ) # top refers to the directory level diff --git a/schema_salad/python_codegen.py b/schema_salad/python_codegen.py index 19cc69e7e..0e74d7a22 100644 --- a/schema_salad/python_codegen.py +++ b/schema_salad/python_codegen.py @@ -268,13 +268,83 @@ def fromDoc( ) self.idfield = idfield + if "id" in field_names: + self.serializer.write( + """ + def save( + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() - self.serializer.write( - """ + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc: + if self.id: + temp_id = self.id + if len(temp_id.split('#')) > 1: + temp_id = self.id.split("#")[1] + if temp_id in doc: + keys.append(temp_id) + temp_doc = doc.get(temp_id) + if isinstance(temp_doc, CommentedMap): + doc = temp_doc + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} + + if relative_uris: + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + else: + for ef in self.extension_fields: + r[ef] = self.extension_fields[ef] +""" + ) + else: + self.serializer.write( + """ def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: - r: Dict[str, Any] = {} + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0 + ) -> CommentedMap: + if keys is None: + keys = [] + r = CommentedMap() + keys = copy.copy(keys) + + doc = iterate_through_doc(keys) + + if inserted_line_info is None: + inserted_line_info = {} + + if doc is not None: + r._yaml_set_line_col(doc.lc.line, doc.lc.col) + line_numbers = get_line_numbers(doc) + max_len = get_max_line_num(doc) + min_col = get_min_col(line_numbers) + cols: Dict[int, int] = {} if relative_uris: for ef in self.extension_fields: @@ -283,7 +353,7 @@ def save( for ef in self.extension_fields: r[ef] = self.extension_fields[ef] """ - ) + ) if "class" in field_names: self.out.write( @@ -305,6 +375,103 @@ def save( class_=classname ) ) + if "id" in field_names: + self.serializer.write( + """ + if doc: + base_url_to_save = base_url + if self.id: + base_url_to_save = self.id + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url_to_save, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) +""" + ) + else: + self.serializer.write( + """ + if doc: + for key in doc.lc.data.keys(): + if isinstance(key, str): + if hasattr(self, key): + if getattr(self, key) is not None: + if key != 'class': + line = doc.lc.data[key][0] + shift + if inserted_line_info: + while line in inserted_line_info: + line += 1 + shift += 1 + saved_val = save( + getattr(self, key), + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=keys + [key], + inserted_line_info=inserted_line_info, + shift=shift + ) + + # If the returned value is a list of size 1, just save the value in the list + if type(saved_val) == list: + if ( + len(saved_val) == 1 + ): + saved_val = saved_val[0] + + r[key] = saved_val + + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key=key, + val=r.get(key), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) +""" + ) def end_class(self, classname: str, field_names: List[str]) -> None: """Signal that we are done with this class.""" @@ -365,6 +532,14 @@ def end_class(self, classname: str, field_names: List[str]) -> None: ) ) + # names = [] + # for name in field_names: + # names.append("('%s', 0)"%name) + + # self.serializer.write( + # fmt(f"""ordered_attrs = CommentedMap(["{', '.join(names)}])\n""", 4) + # ) + safe_init_fields = [ self.safe_name(f) for f in field_names if f != "class" ] # type: List[str] @@ -396,6 +571,7 @@ def type_loader(self, type_declaration: Union[List[Any], Dict[str, Any], str]) - sub_names: List[str] = list( dict.fromkeys([self.type_loader(i).name for i in type_declaration]) ) + return self.declare_type( TypeDef( "union_of_{}".format("_or_".join(sub_names)), @@ -598,15 +774,27 @@ def declare_field( if name == self.idfield or not self.idfield: baseurl = "base_url" else: - baseurl = f"self.{self.safe_name(self.idfield)}" + baseurl = f"str(self.{self.safe_name(self.idfield)})" if fieldtype.is_uri: self.serializer.write( fmt( """ -if self.{safename} is not None: +if self.{safename} is not None and "{fieldname}" not in r: u = save_relative_uri(self.{safename}, {baseurl}, {scoped_id}, {ref_scope}, relative_uris) r["{fieldname}"] = u + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="{fieldname}", + val=r.get("{fieldname}"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift + ) """.format( safename=self.safe_name(name), fieldname=shortname(name).strip(), @@ -621,9 +809,26 @@ def declare_field( self.serializer.write( fmt( """ -if self.{safename} is not None: +if self.{safename} is not None and "{fieldname}" not in r: r["{fieldname}"] = save( - self.{safename}, top=False, base_url={baseurl}, relative_uris=relative_uris + self.{safename}, + top=False, + base_url={baseurl}, + relative_uris=relative_uris, + inserted_line_info=inserted_line_info, + shift=shift + ) + max_len, inserted_line_info = add_kv( + old_doc=doc, + new_doc=r, + line_numbers=line_numbers, + key="{fieldname}", + val=r.get("{fieldname}"), + cols=cols, + min_col=min_col, + max_len=max_len, + inserted_line_info=inserted_line_info, + shift=shift ) """.format( safename=self.safe_name(name), diff --git a/schema_salad/python_codegen_support.py b/schema_salad/python_codegen_support.py index d086031f0..9c60c5a75 100644 --- a/schema_salad/python_codegen_support.py +++ b/schema_salad/python_codegen_support.py @@ -20,13 +20,14 @@ Type, Union, cast, + no_type_check, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url from rdflib import Graph from rdflib.plugins.parsers.notation3 import BadSyntax -from ruamel.yaml.comments import CommentedMap +from ruamel.yaml.comments import CommentedMap, CommentedSeq from schema_salad.exceptions import SchemaSaladException, ValidationException from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher @@ -41,6 +42,8 @@ IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] +doc_line_info = CommentedMap() + class LoadingOptions: idx: IdxType @@ -199,8 +202,14 @@ def fromDoc( @abstractmethod def save( - self, top: bool = False, base_url: str = "", relative_uris: bool = True - ) -> Dict[str, Any]: + self, + top: bool = False, + base_url: str = "", + relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, + ) -> CommentedMap: """Convert this object to a JSON/YAML friendly dictionary.""" @@ -231,6 +240,196 @@ def load_field(val, fieldtype, baseuri, loadingOptions, lc=None): save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] +def add_kv( + old_doc: CommentedMap, + new_doc: CommentedMap, + line_numbers: Dict[Any, Dict[str, int]], + key: str, + val: Any, + max_len: int, + cols: Dict[int, int], + min_col: int = 0, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, +) -> Tuple[int, Optional[Dict[int, int]]]: + """Add key value pair into Commented Map. + + Function to add key value pair into new CommentedMap given old CommentedMap, line_numbers + for each key/val pair in the old CommentedMap,key/val pair to insert, max_line of the old CommentedMap, + and max col value taken for each line. + """ + if inserted_line_info is None: + inserted_line_info = {} + + if len(inserted_line_info.keys()) >= 1: + max_line = max(inserted_line_info.keys()) + 1 + else: + max_line = 0 + + if key in line_numbers: # If the passed key to insert is in the original CommentedMap as a key + line_info = old_doc.lc.data[key] # Get the line information for the key + if ( + line_info[0] + shift not in inserted_line_info + ): # If the line of the key + shift isn't taken, add it + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[key][0] + shift, + old_doc.lc.data[key][1], + old_doc.lc.data[key][2] + shift, + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[old_doc.lc.data[key][0] + shift] = old_doc.lc.data[key][1] + else: # If the line is already taken + line = line_info[0] + shift + while line in inserted_line_info.keys(): # Find the closest free line + line += 1 + new_doc.lc.add_kv_line_col( + key, + [ + line, + old_doc.lc.data[key][1], + line + (line - old_doc.lc.data[key][2]), + old_doc.lc.data[key][3], + ], + ) + inserted_line_info[line] = old_doc.lc.data[key][1] + return max_len, inserted_line_info + elif isinstance(val, (int, float, str)) and not isinstance( + val, bool + ): # If the value is hashable + if val in line_numbers: # If the value is in the original CommentedMap + line = line_numbers[val]["line"] + shift # Get the line info for the value + if line in inserted_line_info: # Get the appropriate line to place value on + line = max_line + + col = line_numbers[val]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif isinstance(val, str): # Logic for DSL expansition with "?" + if val + "?" in line_numbers: + line = line_numbers[val + "?"]["line"] + shift + if line in inserted_line_info: + line = max_line + col = line_numbers[val + "?"]["col"] + new_doc.lc.add_kv_line_col(key, [line, col, line, col + len(key) + 2]) + inserted_line_info[line] = col + len(key) + 2 + return max_len, inserted_line_info + elif old_doc: + if val in old_doc: + index = old_doc.lc.data.index(val) + line_info = old_doc.lc.data[index] + if line_info[0] + shift not in inserted_line_info: + new_doc.lc.add_kv_line_col( + key, + [ + old_doc.lc.data[index][0] + shift, + old_doc.lc.data[index][1], + old_doc.lc.data[index][2] + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[old_doc.lc.data[index][0] + shift] = old_doc.lc.data[index][ + 1 + ] + else: + new_doc.lc.add_kv_line_col( + key, + [ + max_line + shift, + old_doc.lc.data[index][1], + max_line + (max_line - old_doc.lc.data[index][2]) + shift, + old_doc.lc.data[index][3], + ], + ) + inserted_line_info[max_line + shift] = old_doc.lc.data[index][1] + # If neither the key or value is in the original CommentedMap/old doc (or value is not hashable) + new_doc.lc.add_kv_line_col(key, [max_line, min_col, max_line, min_col + len(key) + 2]) + inserted_line_info[max_line] = min_col + len(key) + 2 + return max_len + 1, inserted_line_info + + +@no_type_check +def iterate_through_doc(keys: List[Any]) -> Optional[CommentedMap]: + """Take a list of keys/indexes and iterates through the global CommentedMap.""" + doc = doc_line_info + for key in keys: + if isinstance(doc, CommentedMap): + doc = doc.get(key) + elif isinstance(doc, (CommentedSeq, list)) and isinstance(key, int): + if key < len(doc): + doc = doc[key] + else: + return None + else: + return None + if isinstance(doc, CommentedSeq): + to_return = CommentedMap() + for index, key in enumerate(doc): + to_return[key] = "" + to_return.lc.add_kv_line_col( + key, + [ + doc.lc.data[index][0], + doc.lc.data[index][1], + doc.lc.data[index][0], + doc.lc.data[index][1], + ], + ) + return to_return + return doc + + +def get_line_numbers(doc: Optional[CommentedMap]) -> Dict[Any, Dict[str, int]]: + """Get line numbers for kv pairs in CommentedMap. + + For each key/value pair in a CommentedMap, save the line/col info into a dictionary, + only save value info if value is hashable. + """ + line_numbers: Dict[Any, Dict[str, int]] = {} + if doc is None: + return {} + if doc.lc.data is None: + return {} + for key, value in doc.lc.data.items(): + line_numbers[key] = {} + + line_numbers[key]["line"] = doc.lc.data[key][0] + line_numbers[key]["col"] = doc.lc.data[key][1] + if isinstance(value, (int, float, bool, str)): + line_numbers[value] = {} + line_numbers[value]["line"] = doc.lc.data[key][2] + line_numbers[value]["col"] = doc.lc.data[key][3] + return line_numbers + + +def get_min_col(line_numbers: Dict[Any, Dict[str, int]]) -> int: + """Given a array of line column information, get the minimum column.""" + min_col = 0 + for line in line_numbers: + if line_numbers[line]["col"] > min_col: + min_col = line_numbers[line]["col"] + return min_col + + +def get_max_line_num(doc: CommentedMap) -> int: + """Get the max line number for a CommentedMap. + + Iterate through the the key with the highest line number until you reach a non-CommentedMap value + or empty CommentedMap. + """ + max_line = 0 + max_key = "" + cur = doc + while isinstance(cur, CommentedMap) and len(cur) > 0: + for key in cur.lc.data.keys(): + if cur.lc.data[key][2] >= max_line: + max_line = cur.lc.data[key][2] + max_key = key + cur = cur[max_key] + return max_line + 1 def extract_type(val_type: Type[Any]) -> str: """Take a type of value, and extracts the value as a string.""" val_str = str(val_type) @@ -293,15 +492,71 @@ def save( top: bool = True, base_url: str = "", relative_uris: bool = True, + keys: Optional[List[Any]] = None, + inserted_line_info: Optional[Dict[int, int]] = None, + shift: int = 0, ) -> save_type: + """Save a val of any type. + + Recursively calls save method from class if val is of type Saveable. + Otherwise, saves val to CommentedMap or CommentedSeq. + """ + if keys is None: + keys = [] + + doc = iterate_through_doc(keys) + if isinstance(val, Saveable): - return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + return val.save( + top=top, + base_url=base_url, + relative_uris=relative_uris, + keys=keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) if isinstance(val, MutableSequence): - return [save(v, top=False, base_url=base_url, relative_uris=relative_uris) for v in val] + r = CommentedSeq() + r.lc.data = {} + for i in range(0, len(val)): + new_keys = keys + if doc: + if str(i) in doc: + r.lc.data[i] = doc.lc.data[i] + new_keys.append(i) + r.append( + save( + val[i], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + ) + return r + if isinstance(val, MutableMapping): - newdict = {} + newdict = CommentedMap() + new_keys = keys for key in val: - newdict[key] = save(val[key], top=False, base_url=base_url, relative_uris=relative_uris) + + if doc: + if key in doc: + newdict.lc.add_kv_line_col(key, doc.lc.data[key]) + new_keys.append(key) + + newdict[key] = save( + val[key], + top=False, + base_url=base_url, + relative_uris=relative_uris, + keys=new_keys, + inserted_line_info=inserted_line_info, + shift=shift, + ) + return newdict if val is None or isinstance(val, (int, float, bool, str)): return val @@ -839,7 +1094,7 @@ def load(self, doc, baseuri, loadingOptions, docRoot=None, lc=None): def _document_load( loader: _Loader, - doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], + doc: Union[CommentedMap, str, MutableMapping[str, Any], MutableSequence[Any]], baseuri: str, loadingOptions: LoadingOptions, addl_metadata_fields: Optional[MutableSequence[str]] = None, @@ -879,6 +1134,10 @@ def _document_load( if "$base" in doc: doc.pop("$base") + if isinstance(doc, CommentedMap): + global doc_line_info + doc_line_info = doc + if "$graph" in doc: loadingOptions.idx[baseuri] = ( loader.load(doc["$graph"], baseuri, loadingOptions), @@ -894,7 +1153,6 @@ def _document_load( loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] return loadingOptions.idx[baseuri] - if isinstance(doc, MutableSequence): loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions), diff --git a/schema_salad/tests/count-lines6-wf_v1_0.cwl b/schema_salad/tests/count-lines6-wf_v1_0.cwl new file mode 100644 index 000000000..88db53f1b --- /dev/null +++ b/schema_salad/tests/count-lines6-wf_v1_0.cwl @@ -0,0 +1,26 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.0 + +requirements: + - class: ScatterFeatureRequirement + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_0.cwl + scatter: file1 + in: + file1: + source: [file1, file2] + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/schema_salad/tests/count-lines6-wf_v1_1.cwl b/schema_salad/tests/count-lines6-wf_v1_1.cwl new file mode 100644 index 000000000..b61cc4453 --- /dev/null +++ b/schema_salad/tests/count-lines6-wf_v1_1.cwl @@ -0,0 +1,26 @@ +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.1 + +requirements: + - class: ScatterFeatureRequirement + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_1.cwl + scatter: file1 + in: + file1: + source: [file1, file2] + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/schema_salad/tests/count-lines6-wf_v1_2.cwl b/schema_salad/tests/count-lines6-wf_v1_2.cwl new file mode 100644 index 000000000..398fe8902 --- /dev/null +++ b/schema_salad/tests/count-lines6-wf_v1_2.cwl @@ -0,0 +1,27 @@ + +#!/usr/bin/env cwl-runner +class: Workflow +cwlVersion: v1.2 + +requirements: + - class: ScatterFeatureRequirement + - class: MultipleInputFeatureRequirement + +inputs: + file1: File[] + file2: File[] + +outputs: + count_output: + type: int + outputSource: step1/output + +steps: + step1: + run: wc3-tool_v1_2.cwl + scatter: file1 + in: + file1: + source: [file1, file2] + linkMerge: merge_nested + out: [output] \ No newline at end of file diff --git a/schema_salad/tests/test_line_numbers.py b/schema_salad/tests/test_line_numbers.py new file mode 100644 index 000000000..283acac84 --- /dev/null +++ b/schema_salad/tests/test_line_numbers.py @@ -0,0 +1,179 @@ +import importlib +from pathlib import Path +from typing import Any, Dict, List, MutableSequence, Optional, Union, cast +from urllib.parse import urlparse + +from ruamel.yaml.comments import CommentedMap + +from schema_salad.utils import yaml_no_ts +from schema_salad import codegen +from schema_salad.avro.schema import Names +from schema_salad.exceptions import ValidationException +from schema_salad.schema import load_schema + +from .util import get_data, cwl_file_uri + +def test_secondary_files_dsl(tmp_path: Path) -> None: + """ + Checks object is properly saving when dsl is used + """ + t = "test_secondary_files_dsl.cwl" + path = get_data("tests/" + t) + obj = load_document_by_uri(tmp_path, str(path)) + saved_obj = obj.save() + assert isinstance(saved_obj, CommentedMap) + assert saved_obj.lc.data == { + "cwlVersion": [1, 0, 1, 12], + "baseCommand": [2, 0, 2, 13], + "inputs": [4, 0, 5, 2], + "outputs": [15, 0, 16, 2], + "stdout": [25, 0, 25, 8], + "id": [26, 0, 26, 4], + } + assert saved_obj["inputs"][0].lc.data == { + "type": [6, 3, 6, 9], + "secondaryFiles": [10, 3, 13, 19], + "default": [11, 3, 11, 12], + "id": [12, 3, 12, 7], + } + assert saved_obj["inputs"][0]["type"] == "File" + assert saved_obj["inputs"][1].lc.data == {"id": [13, 2, 13, 6], "type": [14, 2, 14, 8]} + assert saved_obj["outputs"][0].lc.data == { + "type": [17, 4, 17, 10], + "secondaryFiles": [21, 4, 28, 20], + "outputBinding": [22, 4, 23, 6], + "id": [24, 4, 24, 8], + } + assert saved_obj["outputs"][0]["secondaryFiles"][0].lc.data == {"pattern": [18, 21, 18, 30]} + assert saved_obj["outputs"][0]["secondaryFiles"][1].lc.data == { + "pattern": [19, 35, 19, 44], + "required": [20, 35, 20, 45], + } + + +def test_outputs_before_inputs(tmp_path: Path) -> None: + """ + Tests when output comes in cwl file before inputs + """ + t = "test_outputs_before_inputs.cwl" + path = get_data("tests/" + t) + obj = load_document_by_uri(tmp_path, str(path)) + saved_obj = obj.save() + assert isinstance(saved_obj, CommentedMap) + assert { + "cwlVersion": [1, 0, 1, 12], + "baseCommand": [2, 0, 2, 13], + "outputs": [4, 0, 5, 2], + "inputs": [10, 0, 11, 2], + "stdout": [17, 0, 17, 8], + "id": [18, 0, 18, 4], + } + assert saved_obj["inputs"][0].lc.data == { + "type": [12, 3, 12, 9], + "default": [13, 3, 13, 12], + "id": [14, 3, 14, 7], + } + assert saved_obj["inputs"][0]["type"] == "File" + assert saved_obj["inputs"][1].lc.data == {"id": [15, 2, 15, 6], "type": [16, 2, 16, 8]} + assert saved_obj["outputs"][0].lc.data == { + "type": [6, 4, 6, 10], + "outputBinding": [7, 4, 8, 6], + "id": [9, 4, 9, 8], + } + + +def test_type_dsl(tmp_path: Path) -> None: + """ + Checks object is properly saving when type DSL is used. + In this example, type for the input is File? which should expand to + null, File. + """ + t = "test_type_dsl.cwl" + path = get_data("tests/" + t) + obj = load_document_by_uri(tmp_path, str(path)) + saved_obj = obj.save() + assert isinstance(saved_obj, CommentedMap) + assert { + "cwlVersion": [1, 0, 1, 12], + "baseCommand": [2, 0, 2, 13], + "inputs": [4, 0, 5, 2], + "outputs": [11, 0, 12, 2], + "stdout": [17, 0, 17, 8], + "id": [18, 0, 18, 4], + } + assert saved_obj["inputs"][0].lc.data == { + "type": [6, 3, 6, 9], + "default": [7, 3, 7, 12], + "id": [8, 3, 8, 7], + } + assert saved_obj["inputs"][0]["type"] == ["null", "File"] + assert saved_obj["inputs"][1].lc.data == {"id": [9, 2, 9, 6], "type": [10, 2, 10, 8]} + assert saved_obj["outputs"][0].lc.data == { + "type": [13, 4, 13, 10], + "outputBinding": [14, 4, 15, 6], + "id": [16, 4, 16, 8], + } + assert saved_obj["outputs"][0]["outputBinding"].lc.data == {"glob": [15, 6, 15, 12]} + + +def load_document_by_uri(tmp_path: Path, path: Union[str, Path]) -> Any: + src_target = tmp_path / "cwl_v1_0.py" + python_codegen(cwl_file_uri, src_target) + spec = importlib.util.spec_from_file_location("cwl_v1_0", src_target) + assert isinstance(spec, importlib.machinery.ModuleSpec) + assert isinstance(spec.loader, importlib.abc.Loader) + temp_cwl_v1_0 = importlib.util.module_from_spec(spec) + spec.loader.exec_module(temp_cwl_v1_0) + cwl_v1_0: Any = temp_cwl_v1_0 + + if isinstance(path, str): + uri = urlparse(path) + if not uri.scheme or uri.scheme == "file": + real_path = Path(uri.path).resolve().as_uri() + else: + real_path = path + else: + real_path = path.resolve().as_uri() + + baseuri = str(real_path) + + loadingOptions = cwl_v1_0.LoadingOptions(fileuri=baseuri) + + with open(path, "r") as file: + doc = file.read() + # doc = loadingOptions.fetcher.fetch_text(urllib.parse.unquote(str(real_path))) + yaml = yaml_no_ts() + doc = yaml.load(doc) + + result = cwl_v1_0.load_document_by_yaml( + doc, baseuri, cast(Optional[cwl_v1_0.LoadingOptions], loadingOptions) + ) + + if isinstance(result, MutableSequence): + lst = [] + for r in result: + lst.append(r) + return lst + return result + + + +def python_codegen( + file_uri: str, + target: Path, + parser_info: Optional[str] = None, + package: Optional[str] = None, +) -> None: + document_loader, avsc_names, schema_metadata, metaschema_loader = load_schema(file_uri) + assert isinstance(avsc_names, Names) + schema_raw_doc = metaschema_loader.fetch(file_uri) + schema_doc, schema_metadata = metaschema_loader.resolve_all(schema_raw_doc, file_uri) + codegen.codegen( + "python", + cast(List[Dict[str, Any]], schema_doc), + schema_metadata, + document_loader, + target=str(target), + parser_info=parser_info, + package=package, + ) diff --git a/schema_salad/tests/test_outputs_before_inputs.cwl b/schema_salad/tests/test_outputs_before_inputs.cwl new file mode 100644 index 000000000..063e29119 --- /dev/null +++ b/schema_salad/tests/test_outputs_before_inputs.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.0 +baseCommand: python3 + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +inputs: + files: + type: File + default: "script.py" + other_file: File + +stdout: hello-out.txt \ No newline at end of file diff --git a/schema_salad/tests/test_schema/test_outputs_before_inputs.cwl b/schema_salad/tests/test_schema/test_outputs_before_inputs.cwl new file mode 100644 index 000000000..e1594c790 --- /dev/null +++ b/schema_salad/tests/test_schema/test_outputs_before_inputs.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +inputs: + files: + type: File + default: "script.py" + other_file: File + +stdout: hello-out.txt \ No newline at end of file diff --git a/schema_salad/tests/test_schema/test_secondary_files_dsl.cwl b/schema_salad/tests/test_schema/test_secondary_files_dsl.cwl new file mode 100644 index 000000000..1f6c712a4 --- /dev/null +++ b/schema_salad/tests/test_schema/test_secondary_files_dsl.cwl @@ -0,0 +1,18 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +inputs: + files: + type: File + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + secondaryFiles: ["inputB.txt", "inputC.txt?"] + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt diff --git a/schema_salad/tests/test_schema/test_type_dsl.cwl b/schema_salad/tests/test_schema/test_type_dsl.cwl new file mode 100644 index 000000000..5b822d812 --- /dev/null +++ b/schema_salad/tests/test_schema/test_type_dsl.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.2 +baseCommand: python3 + +inputs: + files: + type: File? + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt diff --git a/schema_salad/tests/test_secondary_files_dsl.cwl b/schema_salad/tests/test_secondary_files_dsl.cwl new file mode 100644 index 000000000..61d37ad0f --- /dev/null +++ b/schema_salad/tests/test_secondary_files_dsl.cwl @@ -0,0 +1,19 @@ +class: CommandLineTool +cwlVersion: v1.0 +baseCommand: python3 + +inputs: + files: + type: File + secondaryFiles: ["inputB.txt", "inputC.txt?"] + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + secondaryFiles: ["inputB.txt", "inputC.txt?"] + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt diff --git a/schema_salad/tests/test_type_dsl.cwl b/schema_salad/tests/test_type_dsl.cwl new file mode 100644 index 000000000..8735505b0 --- /dev/null +++ b/schema_salad/tests/test_type_dsl.cwl @@ -0,0 +1,17 @@ +class: CommandLineTool +cwlVersion: v1.0 +baseCommand: python3 + +inputs: + files: + type: File? + default: "script.py" + other_file: File + +outputs: + hello_output: + type: File + outputBinding: + glob: hello-out.txt + +stdout: hello-out.txt diff --git a/schema_salad/tests/wc3-tool_v1_0.cwl b/schema_salad/tests/wc3-tool_v1_0.cwl new file mode 100644 index 000000000..a213192cb --- /dev/null +++ b/schema_salad/tests/wc3-tool_v1_0.cwl @@ -0,0 +1,25 @@ +class: CommandLineTool +cwlVersion: v1.0 + +requirements: + - class: InlineJavascriptRequirement +hints: + ResourceRequirement: + ramMin: 8 + +inputs: + file1: + type: File[] + inputBinding: {} +outputs: + output: + type: int + outputBinding: + glob: output.txt + loadContents: true + outputEval: | + ${ + var s = self[0].contents.split(/\r?\n/); + return parseInt(s[s.length-2]); + } +stdout: output.txt \ No newline at end of file diff --git a/schema_salad/tests/wc3-tool_v1_1.cwl b/schema_salad/tests/wc3-tool_v1_1.cwl new file mode 100644 index 000000000..30d632de4 --- /dev/null +++ b/schema_salad/tests/wc3-tool_v1_1.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: CommandLineTool +cwlVersion: v1.1 + +requirements: + - class: InlineJavascriptRequirement + +inputs: + file1: + type: File[] + inputBinding: {} +outputs: + output: + type: int + outputBinding: + glob: output.txt + loadContents: true + outputEval: | + ${ + var s = self[0].contents.split(/\r?\n/); + return parseInt(s[s.length-2]); + } +stdout: output.txt +baseCommand: wc \ No newline at end of file diff --git a/schema_salad/tests/wc3-tool_v1_2.cwl b/schema_salad/tests/wc3-tool_v1_2.cwl new file mode 100644 index 000000000..23df81489 --- /dev/null +++ b/schema_salad/tests/wc3-tool_v1_2.cwl @@ -0,0 +1,24 @@ +#!/usr/bin/env cwl-runner +class: CommandLineTool +cwlVersion: v1.2 + +requirements: + - class: InlineJavascriptRequirement + +inputs: + file1: + type: File[] + inputBinding: {} +outputs: + output: + type: int + outputBinding: + glob: output.txt + loadContents: true + outputEval: | + ${ + var s = self[0].contents.split(/\r?\n/); + return parseInt(s[s.length-2]); + } +stdout: output.txt +baseCommand: wc \ No newline at end of file