#
# This file was autogenerated using schema-salad-tool --codegen=python
# The code itself is released under the Apache 2.0 license and the help text is
# subject to the license of the original schema.
import copy
import logging
import os
import pathlib
import re
import tempfile
import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401
import xml.sax # nosec
from abc import ABC, abstractmethod
from io import StringIO
from typing import (
Any,
Dict,
List,
MutableMapping,
MutableSequence,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit
from urllib.request import pathname2url
from rdflib import Graph
from rdflib.plugins.parsers.notation3 import BadSyntax
from ruamel.yaml.comments import CommentedMap
from schema_salad.exceptions import SchemaSaladException, ValidationException
from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher
from schema_salad.sourceline import SourceLine, add_lc_filename
from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+
_vocab: Dict[str, str] = {}
_rvocab: Dict[str, str] = {}
_logger = logging.getLogger("salad")
IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]]
[docs]class LoadingOptions:
idx: IdxType
fileuri: Optional[str]
baseuri: str
namespaces: MutableMapping[str, str]
schemas: MutableSequence[str]
original_doc: Optional[Any]
addl_metadata: MutableMapping[str, Any]
fetcher: Fetcher
vocab: Dict[str, str]
rvocab: Dict[str, str]
cache: CacheType
def __init__(
self,
fetcher: Optional[Fetcher] = None,
namespaces: Optional[Dict[str, str]] = None,
schemas: Optional[List[str]] = None,
fileuri: Optional[str] = None,
copyfrom: Optional["LoadingOptions"] = None,
original_doc: Optional[Any] = None,
addl_metadata: Optional[Dict[str, str]] = None,
baseuri: Optional[str] = None,
idx: Optional[IdxType] = None,
) -> None:
"""Create a LoadingOptions object."""
self.original_doc = original_doc
if idx is not None:
self.idx = idx
else:
self.idx = copyfrom.idx if copyfrom is not None else {}
if fileuri is not None:
self.fileuri = fileuri
else:
self.fileuri = copyfrom.fileuri if copyfrom is not None else None
if baseuri is not None:
self.baseuri = baseuri
else:
self.baseuri = copyfrom.baseuri if copyfrom is not None else ""
if namespaces is not None:
self.namespaces = namespaces
else:
self.namespaces = copyfrom.namespaces if copyfrom is not None else {}
if schemas is not None:
self.schemas = schemas
else:
self.schemas = copyfrom.schemas if copyfrom is not None else []
if addl_metadata is not None:
self.addl_metadata = addl_metadata
else:
self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {}
if fetcher is not None:
self.fetcher = fetcher
elif copyfrom is not None:
self.fetcher = copyfrom.fetcher
else:
import requests
from cachecontrol.caches import FileCache
from cachecontrol.wrapper import CacheControl
root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir()))
session = CacheControl(
requests.Session(),
cache=FileCache(root / ".cache" / "salad"),
)
self.fetcher: Fetcher = DefaultFetcher({}, session)
self.cache = (
self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {}
)
self.vocab = _vocab
self.rvocab = _rvocab
if namespaces is not None:
self.vocab = self.vocab.copy()
self.rvocab = self.rvocab.copy()
for k, v in namespaces.items():
self.vocab[k] = v
self.rvocab[v] = k
@property
def graph(self) -> Graph:
"""Generate a merged rdflib.Graph from all entries in self.schemas."""
graph = Graph()
if not self.schemas:
return graph
key = str(hash(tuple(self.schemas)))
if key in self.cache:
return cast(Graph, self.cache[key])
for schema in self.schemas:
fetchurl = (
self.fetcher.urljoin(self.fileuri, schema)
if self.fileuri is not None
else pathlib.Path(schema).resolve().as_uri()
)
if fetchurl not in self.cache or self.cache[fetchurl] is True:
_logger.debug("Getting external schema %s", fetchurl)
try:
content = self.fetcher.fetch_text(fetchurl)
except Exception as e:
_logger.warning(
"Could not load extension schema %s: %s", fetchurl, str(e)
)
continue
newGraph = Graph()
err_msg = "unknown error"
for fmt in ["xml", "turtle"]:
try:
newGraph.parse(data=content, format=fmt, publicID=str(fetchurl))
self.cache[fetchurl] = newGraph
graph += newGraph
break
except (xml.sax.SAXParseException, TypeError, BadSyntax) as e:
err_msg = str(e)
else:
_logger.warning(
"Could not load extension schema %s: %s", fetchurl, err_msg
)
self.cache[key] = graph
return graph
[docs]class Saveable(ABC):
"""Mark classes than have a save() and fromDoc() function."""
[docs] @classmethod
@abstractmethod
def fromDoc(
cls,
_doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "Saveable":
"""Construct this object from the result of yaml.load()."""
[docs] @abstractmethod
def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
"""Convert this object to a JSON/YAML friendly dictionary."""
[docs]def load_field(val, fieldtype, baseuri, loadingOptions):
# type: (Union[str, Dict[str, str]], _Loader, str, LoadingOptions) -> Any
if isinstance(val, MutableMapping):
if "$import" in val:
if loadingOptions.fileuri is None:
raise SchemaSaladException("Cannot load $import without fileuri")
result, metadata = _document_load_by_url(
fieldtype,
loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]),
loadingOptions,
)
return result
elif "$include" in val:
if loadingOptions.fileuri is None:
raise SchemaSaladException("Cannot load $import without fileuri")
val = loadingOptions.fetcher.fetch_text(
loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"])
)
return fieldtype.load(val, baseuri, loadingOptions)
save_type = Optional[
Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]
]
[docs]def save(
val: Any,
top: bool = True,
base_url: str = "",
relative_uris: bool = True,
) -> save_type:
if isinstance(val, Saveable):
return val.save(top=top, base_url=base_url, relative_uris=relative_uris)
if isinstance(val, MutableSequence):
return [
save(v, top=False, base_url=base_url, relative_uris=relative_uris)
for v in val
]
if isinstance(val, MutableMapping):
newdict = {}
for key in val:
newdict[key] = save(
val[key], top=False, base_url=base_url, relative_uris=relative_uris
)
return newdict
if val is None or isinstance(val, (int, float, bool, str)):
return val
raise Exception("Not Saveable: %s" % type(val))
[docs]def expand_url(
url, # type: str
base_url, # type: str
loadingOptions, # type: LoadingOptions
scoped_id=False, # type: bool
vocab_term=False, # type: bool
scoped_ref=None, # type: Optional[int]
):
# type: (...) -> str
if url in ("@id", "@type"):
return url
if vocab_term and url in loadingOptions.vocab:
return url
if bool(loadingOptions.vocab) and ":" in url:
prefix = url.split(":")[0]
if prefix in loadingOptions.vocab:
url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :]
split = urlsplit(url)
if (
(bool(split.scheme) and split.scheme in ["http", "https", "file"])
or url.startswith("$(")
or url.startswith("${")
):
pass
elif scoped_id and not bool(split.fragment):
splitbase = urlsplit(base_url)
frg = ""
if bool(splitbase.fragment):
frg = splitbase.fragment + "/" + split.path
else:
frg = split.path
pt = splitbase.path if splitbase.path != "" else "/"
url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg))
elif scoped_ref is not None and not bool(split.fragment):
splitbase = urlsplit(base_url)
sp = splitbase.fragment.split("/")
n = scoped_ref
while n > 0 and len(sp) > 0:
sp.pop()
n -= 1
sp.append(url)
url = urlunsplit(
(
splitbase.scheme,
splitbase.netloc,
splitbase.path,
splitbase.query,
"/".join(sp),
)
)
else:
url = loadingOptions.fetcher.urljoin(base_url, url)
if vocab_term:
split = urlsplit(url)
if bool(split.scheme):
if url in loadingOptions.rvocab:
return loadingOptions.rvocab[url]
else:
raise ValidationException(f"Term '{url}' not in vocabulary")
return url
class _Loader:
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
pass
class _AnyLoader(_Loader):
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if doc is not None:
return doc
raise ValidationException("Expected non-null")
class _PrimitiveLoader(_Loader):
def __init__(self, tp):
# type: (Union[type, Tuple[Type[str], Type[str]]]) -> None
self.tp = tp
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if not isinstance(doc, self.tp):
raise ValidationException(
"Expected a {} but got {}".format(
self.tp.__class__.__name__, doc.__class__.__name__
)
)
return doc
def __repr__(self): # type: () -> str
return str(self.tp)
class _ArrayLoader(_Loader):
def __init__(self, items):
# type: (_Loader) -> None
self.items = items
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if not isinstance(doc, MutableSequence):
raise ValidationException(f"Expected a list, was {type(doc)}")
r = [] # type: List[Any]
errors = [] # type: List[SchemaSaladException]
for i in range(0, len(doc)):
try:
lf = load_field(
doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions
)
if isinstance(lf, MutableSequence):
r.extend(lf)
else:
r.append(lf)
except ValidationException as e:
errors.append(e.with_sourceline(SourceLine(doc, i, str)))
if errors:
raise ValidationException("", None, errors)
return r
def __repr__(self): # type: () -> str
return f"array<{self.items}>"
class _EnumLoader(_Loader):
def __init__(self, symbols, name):
# type: (Sequence[str], str) -> None
self.symbols = symbols
self.name = name
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if doc in self.symbols:
return doc
else:
raise ValidationException(f"Expected one of {self.symbols}")
def __repr__(self): # type: () -> str
return self.name
class _SecondaryDSLLoader(_Loader):
def __init__(self, inner):
# type: (_Loader) -> None
self.inner = inner
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
r: List[Dict[str, Any]] = []
if isinstance(doc, MutableSequence):
for d in doc:
if isinstance(d, str):
if d.endswith("?"):
r.append({"pattern": d[:-1], "required": False})
else:
r.append({"pattern": d})
elif isinstance(d, dict):
new_dict: Dict[str, Any] = {}
dict_copy = copy.deepcopy(d)
if "pattern" in dict_copy:
new_dict["pattern"] = dict_copy.pop("pattern")
else:
raise ValidationException(
"Missing pattern in secondaryFiles specification entry: {}".format(
d
)
)
new_dict["required"] = (
dict_copy.pop("required") if "required" in dict_copy else None
)
if len(dict_copy):
raise ValidationException(
"Unallowed values in secondaryFiles specification entry: {}".format(
dict_copy
)
)
r.append(new_dict)
else:
raise ValidationException(
"Expected a string or sequence of (strings or mappings)."
)
elif isinstance(doc, MutableMapping):
new_dict = {}
doc_copy = copy.deepcopy(doc)
if "pattern" in doc_copy:
new_dict["pattern"] = doc_copy.pop("pattern")
else:
raise ValidationException(
"Missing pattern in secondaryFiles specification entry: {}".format(
doc
)
)
new_dict["required"] = (
doc_copy.pop("required") if "required" in doc_copy else None
)
if len(doc_copy):
raise ValidationException(
"Unallowed values in secondaryFiles specification entry: {}".format(
doc_copy
)
)
r.append(new_dict)
elif isinstance(doc, str):
if doc.endswith("?"):
r.append({"pattern": doc[:-1], "required": False})
else:
r.append({"pattern": doc})
else:
raise ValidationException("Expected str or sequence of str")
return self.inner.load(r, baseuri, loadingOptions, docRoot)
class _RecordLoader(_Loader):
def __init__(self, classtype):
# type: (Type[Saveable]) -> None
self.classtype = classtype
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if not isinstance(doc, MutableMapping):
raise ValidationException(f"Expected a dict, was {type(doc)}")
return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot)
def __repr__(self): # type: () -> str
return str(self.classtype.__name__)
class _ExpressionLoader(_Loader):
def __init__(self, items: Type[str]) -> None:
self.items = items
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if not isinstance(doc, str):
raise ValidationException(f"Expected a str, was {type(doc)}")
return doc
class _UnionLoader(_Loader):
def __init__(self, alternates):
# type: (Sequence[_Loader]) -> None
self.alternates = alternates
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
errors = []
for t in self.alternates:
try:
return t.load(doc, baseuri, loadingOptions, docRoot=docRoot)
except ValidationException as e:
errors.append(ValidationException(f"tried {t} but", None, [e]))
raise ValidationException("", None, errors, "-")
def __repr__(self): # type: () -> str
return " | ".join(str(a) for a in self.alternates)
class _URILoader(_Loader):
def __init__(self, inner, scoped_id, vocab_term, scoped_ref):
# type: (_Loader, bool, bool, Union[int, None]) -> None
self.inner = inner
self.scoped_id = scoped_id
self.vocab_term = vocab_term
self.scoped_ref = scoped_ref
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if isinstance(doc, MutableSequence):
newdoc = []
for i in doc:
if isinstance(i, str):
newdoc.append(
expand_url(
i,
baseuri,
loadingOptions,
self.scoped_id,
self.vocab_term,
self.scoped_ref,
)
)
else:
newdoc.append(i)
doc = newdoc
elif isinstance(doc, str):
doc = expand_url(
doc,
baseuri,
loadingOptions,
self.scoped_id,
self.vocab_term,
self.scoped_ref,
)
return self.inner.load(doc, baseuri, loadingOptions)
class _TypeDSLLoader(_Loader):
typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$")
def __init__(self, inner, refScope):
# type: (_Loader, Union[int, None]) -> None
self.inner = inner
self.refScope = refScope
def resolve(
self,
doc, # type: str
baseuri, # type: str
loadingOptions, # type: LoadingOptions
):
# type: (...) -> Union[List[Union[Dict[str, str], str]], Dict[str, str], str]
m = self.typeDSLregex.match(doc)
if m:
group1 = m.group(1)
assert group1 is not None # nosec
first = expand_url(
group1, baseuri, loadingOptions, False, True, self.refScope
)
second = third = None
if bool(m.group(2)):
second = {"type": "array", "items": first}
# second = CommentedMap((("type", "array"),
# ("items", first)))
# second.lc.add_kv_line_col("type", lc)
# second.lc.add_kv_line_col("items", lc)
# second.lc.filename = filename
if bool(m.group(3)):
third = ["null", second or first]
# third = CommentedSeq(["null", second or first])
# third.lc.add_kv_line_col(0, lc)
# third.lc.add_kv_line_col(1, lc)
# third.lc.filename = filename
return third or second or first
return doc
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if isinstance(doc, MutableSequence):
r = [] # type: List[Any]
for d in doc:
if isinstance(d, str):
resolved = self.resolve(d, baseuri, loadingOptions)
if isinstance(resolved, MutableSequence):
for i in resolved:
if i not in r:
r.append(i)
else:
if resolved not in r:
r.append(resolved)
else:
r.append(d)
doc = r
elif isinstance(doc, str):
doc = self.resolve(doc, baseuri, loadingOptions)
return self.inner.load(doc, baseuri, loadingOptions)
class _IdMapLoader(_Loader):
def __init__(self, inner, mapSubject, mapPredicate):
# type: (_Loader, str, Union[str, None]) -> None
self.inner = inner
self.mapSubject = mapSubject
self.mapPredicate = mapPredicate
def load(self, doc, baseuri, loadingOptions, docRoot=None):
# type: (Any, str, LoadingOptions, Optional[str]) -> Any
if isinstance(doc, MutableMapping):
r = [] # type: List[Any]
for k in sorted(doc.keys()):
val = doc[k]
if isinstance(val, CommentedMap):
v = copy.copy(val)
v.lc.data = val.lc.data
v.lc.filename = val.lc.filename
v[self.mapSubject] = k
r.append(v)
elif isinstance(val, MutableMapping):
v2 = copy.copy(val)
v2[self.mapSubject] = k
r.append(v2)
else:
if self.mapPredicate:
v3 = {self.mapPredicate: val}
v3[self.mapSubject] = k
r.append(v3)
else:
raise ValidationException("No mapPredicate")
doc = r
return self.inner.load(doc, baseuri, loadingOptions)
def _document_load(
loader: _Loader,
doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]],
baseuri: str,
loadingOptions: LoadingOptions,
addl_metadata_fields: Optional[MutableSequence[str]] = None,
) -> Tuple[Any, LoadingOptions]:
if isinstance(doc, str):
return _document_load_by_url(
loader,
loadingOptions.fetcher.urljoin(baseuri, doc),
loadingOptions,
addl_metadata_fields=addl_metadata_fields,
)
if isinstance(doc, MutableMapping):
addl_metadata = {}
if addl_metadata_fields is not None:
for mf in addl_metadata_fields:
if mf in doc:
addl_metadata[mf] = doc[mf]
docuri = baseuri
if "$base" in doc:
baseuri = doc["$base"]
loadingOptions = LoadingOptions(
copyfrom=loadingOptions,
namespaces=doc.get("$namespaces", None),
schemas=doc.get("$schemas", None),
baseuri=doc.get("$base", None),
addl_metadata=addl_metadata,
)
doc = {
k: v
for k, v in doc.items()
if k not in ("$namespaces", "$schemas", "$base")
}
if "$graph" in doc:
loadingOptions.idx[baseuri] = (
loader.load(doc["$graph"], baseuri, loadingOptions),
loadingOptions,
)
else:
loadingOptions.idx[baseuri] = (
loader.load(doc, baseuri, loadingOptions, docRoot=baseuri),
loadingOptions,
)
if docuri != baseuri:
loadingOptions.idx[docuri] = loadingOptions.idx[baseuri]
return loadingOptions.idx[baseuri]
if isinstance(doc, MutableSequence):
loadingOptions.idx[baseuri] = (
loader.load(doc, baseuri, loadingOptions),
loadingOptions,
)
return loadingOptions.idx[baseuri]
raise ValidationException(
"Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc)
)
def _document_load_by_url(
loader: _Loader,
url: str,
loadingOptions: LoadingOptions,
addl_metadata_fields: Optional[MutableSequence[str]] = None,
) -> Tuple[Any, LoadingOptions]:
if url in loadingOptions.idx:
return loadingOptions.idx[url]
doc_url, frg = urldefrag(url)
text = loadingOptions.fetcher.fetch_text(doc_url)
if isinstance(text, bytes):
textIO = StringIO(text.decode("utf-8"))
else:
textIO = StringIO(text)
textIO.name = str(doc_url)
yaml = yaml_no_ts()
result = yaml.load(textIO)
add_lc_filename(result, doc_url)
loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url)
_document_load(
loader,
result,
doc_url,
loadingOptions,
addl_metadata_fields=addl_metadata_fields,
)
return loadingOptions.idx[url]
[docs]def file_uri(path, split_frag=False): # type: (str, bool) -> str
if path.startswith("file://"):
return path
if split_frag:
pathsp = path.split("#", 2)
frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else ""
urlpath = pathname2url(str(pathsp[0]))
else:
urlpath = pathname2url(path)
frag = ""
if urlpath.startswith("//"):
return f"file:{urlpath}{frag}"
else:
return f"file://{urlpath}{frag}"
[docs]def prefix_url(url: str, namespaces: Dict[str, str]) -> str:
"""Expand short forms into full URLs using the given namespace dictionary."""
for k, v in namespaces.items():
if url.startswith(v):
return k + ":" + url[len(v) :]
return url
[docs]def save_relative_uri(
uri: Any,
base_url: str,
scoped_id: bool,
ref_scope: Optional[int],
relative_uris: bool,
) -> Any:
"""Convert any URI to a relative one, obeying the scoping rules."""
if isinstance(uri, MutableSequence):
return [
save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris)
for u in uri
]
elif isinstance(uri, str):
if not relative_uris or uri == base_url:
return uri
urisplit = urlsplit(uri)
basesplit = urlsplit(base_url)
if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc:
if urisplit.path != basesplit.path:
p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path))
if urisplit.fragment:
p = p + "#" + urisplit.fragment
return p
basefrag = basesplit.fragment + "/"
if ref_scope:
sp = basefrag.split("/")
i = 0
while i < ref_scope:
sp.pop()
i += 1
basefrag = "/".join(sp)
if urisplit.fragment.startswith(basefrag):
return urisplit.fragment[len(basefrag) :]
else:
return urisplit.fragment
return uri
else:
return save(uri, top=False, base_url=base_url, relative_uris=relative_uris)
[docs]def shortname(inputid: str) -> str:
"""
Compute the shortname of a fully qualified identifier.
See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names.
"""
parsed_id = urlparse(inputid)
if parsed_id.fragment:
return parsed_id.fragment.split("/")[-1]
return parsed_id.path.split("/")[-1]
[docs]def parser_info() -> str:
return "org.galaxyproject.gxformat2.v19_09"
[docs]class Documented(Saveable):
pass
[docs]class RecordField(Documented):
"""
A field of a record.
"""
def __init__(
self,
name: Any,
type: Any,
doc: Optional[Any] = None,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.doc = doc
self.name = name
self.type = type
def __eq__(self, other: Any) -> bool:
if isinstance(other, RecordField):
return bool(
self.doc == other.doc
and self.name == other.name
and self.type == other.type
)
return False
def __hash__(self) -> int:
return hash((self.doc, self.name, self.type))
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "RecordField":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
if "name" in _doc:
try:
name = load_field(
_doc.get("name"),
uri_strtype_True_False_None,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `name` field is not valid because:",
SourceLine(_doc, "name", str),
[e],
)
)
else:
name = None
__original_name_is_none = name is None
if name is None:
if docRoot is not None:
name = docRoot
else:
raise ValidationException("Missing name")
if not __original_name_is_none:
baseuri = name
if "doc" in _doc:
try:
doc = load_field(
_doc.get("doc"),
union_of_None_type_or_strtype_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `doc` field is not valid because:",
SourceLine(_doc, "doc", str),
[e],
)
)
else:
doc = None
try:
type = load_field(
_doc.get("type"),
typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `type` field is not valid because:",
SourceLine(_doc, "type", str),
[e],
)
)
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `doc`, `name`, `type`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'RecordField'", None, _errors__)
_constructed = cls(
doc=doc,
name=name,
type=type,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
loadingOptions.idx[name] = (_constructed, loadingOptions)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.name is not None:
u = save_relative_uri(self.name, base_url, True, None, relative_uris)
r["name"] = u
if self.doc is not None:
r["doc"] = save(
self.doc, top=False, base_url=self.name, relative_uris=relative_uris
)
if self.type is not None:
r["type"] = save(
self.type, top=False, base_url=self.name, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(["doc", "name", "type"])
[docs]class RecordSchema(Saveable):
def __init__(
self,
type: Any,
fields: Optional[Any] = None,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.fields = fields
self.type = type
def __eq__(self, other: Any) -> bool:
if isinstance(other, RecordSchema):
return bool(self.fields == other.fields and self.type == other.type)
return False
def __hash__(self) -> int:
return hash((self.fields, self.type))
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "RecordSchema":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
if "fields" in _doc:
try:
fields = load_field(
_doc.get("fields"),
idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `fields` field is not valid because:",
SourceLine(_doc, "fields", str),
[e],
)
)
else:
fields = None
try:
type = load_field(
_doc.get("type"),
typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `type` field is not valid because:",
SourceLine(_doc, "type", str),
[e],
)
)
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `fields`, `type`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'RecordSchema'", None, _errors__)
_constructed = cls(
fields=fields,
type=type,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.fields is not None:
r["fields"] = save(
self.fields, top=False, base_url=base_url, relative_uris=relative_uris
)
if self.type is not None:
r["type"] = save(
self.type, top=False, base_url=base_url, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(["fields", "type"])
[docs]class EnumSchema(Saveable):
"""
Define an enumerated type.
"""
def __init__(
self,
symbols: Any,
type: Any,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.symbols = symbols
self.type = type
def __eq__(self, other: Any) -> bool:
if isinstance(other, EnumSchema):
return bool(self.symbols == other.symbols and self.type == other.type)
return False
def __hash__(self) -> int:
return hash((self.symbols, self.type))
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "EnumSchema":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
try:
symbols = load_field(
_doc.get("symbols"),
uri_array_of_strtype_True_False_None,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `symbols` field is not valid because:",
SourceLine(_doc, "symbols", str),
[e],
)
)
try:
type = load_field(
_doc.get("type"),
typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `type` field is not valid because:",
SourceLine(_doc, "type", str),
[e],
)
)
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `symbols`, `type`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'EnumSchema'", None, _errors__)
_constructed = cls(
symbols=symbols,
type=type,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.symbols is not None:
u = save_relative_uri(self.symbols, base_url, True, None, relative_uris)
r["symbols"] = u
if self.type is not None:
r["type"] = save(
self.type, top=False, base_url=base_url, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(["symbols", "type"])
[docs]class ArraySchema(Saveable):
def __init__(
self,
items: Any,
type: Any,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.items = items
self.type = type
def __eq__(self, other: Any) -> bool:
if isinstance(other, ArraySchema):
return bool(self.items == other.items and self.type == other.type)
return False
def __hash__(self) -> int:
return hash((self.items, self.type))
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "ArraySchema":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
try:
items = load_field(
_doc.get("items"),
uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `items` field is not valid because:",
SourceLine(_doc, "items", str),
[e],
)
)
try:
type = load_field(
_doc.get("type"),
typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `type` field is not valid because:",
SourceLine(_doc, "type", str),
[e],
)
)
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `items`, `type`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'ArraySchema'", None, _errors__)
_constructed = cls(
items=items,
type=type,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.items is not None:
u = save_relative_uri(self.items, base_url, False, 2, relative_uris)
r["items"] = u
if self.type is not None:
r["type"] = save(
self.type, top=False, base_url=base_url, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(["items", "type"])
[docs]class Labeled(Saveable):
pass
[docs]class Identified(Saveable):
pass
[docs]class Parameter(Labeled, Documented, Identified):
"""
Define an input or output parameter to a process.
"""
pass
[docs]class OutputParameter(Parameter):
pass
[docs]class Process(Identified, Labeled, Documented):
"""
The base executable type in CWL is the `Process` object defined by the
document. Note that the `Process` object is abstract and cannot be
directly executed.
"""
pass
[docs]class HasUUID(Saveable):
pass
[docs]class HasStepErrors(Saveable):
pass
[docs]class HasStepPosition(Saveable):
pass
[docs]class StepPosition(Saveable):
"""
This field specifies the location of the step's node when rendered in the workflow editor.
"""
def __init__(
self,
top: Any,
left: Any,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.top = top
self.left = left
def __eq__(self, other: Any) -> bool:
if isinstance(other, StepPosition):
return bool(self.top == other.top and self.left == other.left)
return False
def __hash__(self) -> int:
return hash((self.top, self.left))
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "StepPosition":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
try:
top = load_field(
_doc.get("top"),
union_of_floattype_or_inttype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `top` field is not valid because:",
SourceLine(_doc, "top", str),
[e],
)
)
try:
left = load_field(
_doc.get("left"),
union_of_floattype_or_inttype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `left` field is not valid because:",
SourceLine(_doc, "left", str),
[e],
)
)
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `top`, `left`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'StepPosition'", None, _errors__)
_constructed = cls(
top=top,
left=left,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.top is not None:
r["top"] = save(
self.top, top=False, base_url=base_url, relative_uris=relative_uris
)
if self.left is not None:
r["left"] = save(
self.left, top=False, base_url=base_url, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(["top", "left"])
[docs]class WorkflowOutputParameter(OutputParameter):
"""
Describe an output parameter of a workflow. The parameter must be
connected to one parameter defined in the workflow that
will provide the value of the output parameter. It is legal to
connect a WorkflowInputParameter to a WorkflowOutputParameter.
"""
def __init__(
self,
label: Optional[Any] = None,
doc: Optional[Any] = None,
id: Optional[Any] = None,
outputSource: Optional[Any] = None,
type: Optional[Any] = None,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.label = label
self.doc = doc
self.id = id
self.outputSource = outputSource
self.type = type
def __eq__(self, other: Any) -> bool:
if isinstance(other, WorkflowOutputParameter):
return bool(
self.label == other.label
and self.doc == other.doc
and self.id == other.id
and self.outputSource == other.outputSource
and self.type == other.type
)
return False
def __hash__(self) -> int:
return hash((self.label, self.doc, self.id, self.outputSource, self.type))
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "WorkflowOutputParameter":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
if "id" in _doc:
try:
id = load_field(
_doc.get("id"),
uri_union_of_None_type_or_strtype_True_False_None,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `id` field is not valid because:",
SourceLine(_doc, "id", str),
[e],
)
)
else:
id = None
__original_id_is_none = id is None
if id is None:
if docRoot is not None:
id = docRoot
else:
id = "_:" + str(_uuid__.uuid4())
if not __original_id_is_none:
baseuri = id
if "label" in _doc:
try:
label = load_field(
_doc.get("label"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `label` field is not valid because:",
SourceLine(_doc, "label", str),
[e],
)
)
else:
label = None
if "doc" in _doc:
try:
doc = load_field(
_doc.get("doc"),
union_of_None_type_or_strtype_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `doc` field is not valid because:",
SourceLine(_doc, "doc", str),
[e],
)
)
else:
doc = None
if "outputSource" in _doc:
try:
outputSource = load_field(
_doc.get("outputSource"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `outputSource` field is not valid because:",
SourceLine(_doc, "outputSource", str),
[e],
)
)
else:
outputSource = None
if "type" in _doc:
try:
type = load_field(
_doc.get("type"),
typedsl_union_of_None_type_or_GalaxyTypeLoader_2,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `type` field is not valid because:",
SourceLine(_doc, "type", str),
[e],
)
)
else:
type = None
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `label`, `doc`, `id`, `outputSource`, `type`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException(
"Trying 'WorkflowOutputParameter'", None, _errors__
)
_constructed = cls(
label=label,
doc=doc,
id=id,
outputSource=outputSource,
type=type,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
loadingOptions.idx[id] = (_constructed, loadingOptions)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.id is not None:
u = save_relative_uri(self.id, base_url, True, None, relative_uris)
r["id"] = u
if self.label is not None:
r["label"] = save(
self.label, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.doc is not None:
r["doc"] = save(
self.doc, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.outputSource is not None:
r["outputSource"] = save(
self.outputSource,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.type is not None:
r["type"] = save(
self.type, top=False, base_url=self.id, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(["label", "doc", "id", "outputSource", "type"])
[docs]class WorkflowStep(
Identified,
Labeled,
Documented,
HasStepPosition,
ReferencesTool,
HasStepErrors,
HasUUID,
):
"""
This represents a non-input step a Galaxy Workflow.
# A note about `state` and `tool_state` fields.
Only one or the other should be specified. These are two ways to represent the "state"
of a tool at this workflow step. Both are essentially maps from parameter names to
parameter values.
`tool_state` is much more low-level and expects a flat dictionary with each value a JSON
dump. Nested tool structures such as conditionals and repeats should have all their values
in the JSON dumped string. In general `tool_state` may be present in workflows exported from
Galaxy but shouldn't be written by humans.
`state` can contained a typed map. Repeat values can be represented as YAML arrays. An alternative
to representing `state` this way is defining inputs with default values.
"""
def __init__(
self,
out: Any,
id: Optional[Any] = None,
label: Optional[Any] = None,
doc: Optional[Any] = None,
position: Optional[Any] = None,
tool_id: Optional[Any] = None,
tool_shed_repository: Optional[Any] = None,
tool_version: Optional[Any] = None,
errors: Optional[Any] = None,
uuid: Optional[Any] = None,
in_: Optional[Any] = None,
state: Optional[Any] = None,
tool_state: Optional[Any] = None,
type: Optional[Any] = None,
run: Optional[Any] = None,
runtime_inputs: Optional[Any] = None,
when: Optional[Any] = None,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.id = id
self.label = label
self.doc = doc
self.position = position
self.tool_id = tool_id
self.tool_shed_repository = tool_shed_repository
self.tool_version = tool_version
self.errors = errors
self.uuid = uuid
self.in_ = in_
self.out = out
self.state = state
self.tool_state = tool_state
self.type = type
self.run = run
self.runtime_inputs = runtime_inputs
self.when = when
def __eq__(self, other: Any) -> bool:
if isinstance(other, WorkflowStep):
return bool(
self.id == other.id
and self.label == other.label
and self.doc == other.doc
and self.position == other.position
and self.tool_id == other.tool_id
and self.tool_shed_repository == other.tool_shed_repository
and self.tool_version == other.tool_version
and self.errors == other.errors
and self.uuid == other.uuid
and self.in_ == other.in_
and self.out == other.out
and self.state == other.state
and self.tool_state == other.tool_state
and self.type == other.type
and self.run == other.run
and self.runtime_inputs == other.runtime_inputs
and self.when == other.when
)
return False
def __hash__(self) -> int:
return hash(
(
self.id,
self.label,
self.doc,
self.position,
self.tool_id,
self.tool_shed_repository,
self.tool_version,
self.errors,
self.uuid,
self.in_,
self.out,
self.state,
self.tool_state,
self.type,
self.run,
self.runtime_inputs,
self.when,
)
)
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "WorkflowStep":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
if "id" in _doc:
try:
id = load_field(
_doc.get("id"),
uri_union_of_None_type_or_strtype_True_False_None,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `id` field is not valid because:",
SourceLine(_doc, "id", str),
[e],
)
)
else:
id = None
__original_id_is_none = id is None
if id is None:
if docRoot is not None:
id = docRoot
else:
id = "_:" + str(_uuid__.uuid4())
if not __original_id_is_none:
baseuri = id
if "label" in _doc:
try:
label = load_field(
_doc.get("label"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `label` field is not valid because:",
SourceLine(_doc, "label", str),
[e],
)
)
else:
label = None
if "doc" in _doc:
try:
doc = load_field(
_doc.get("doc"),
union_of_None_type_or_strtype_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `doc` field is not valid because:",
SourceLine(_doc, "doc", str),
[e],
)
)
else:
doc = None
if "position" in _doc:
try:
position = load_field(
_doc.get("position"),
union_of_None_type_or_StepPositionLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `position` field is not valid because:",
SourceLine(_doc, "position", str),
[e],
)
)
else:
position = None
if "tool_id" in _doc:
try:
tool_id = load_field(
_doc.get("tool_id"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `tool_id` field is not valid because:",
SourceLine(_doc, "tool_id", str),
[e],
)
)
else:
tool_id = None
if "tool_shed_repository" in _doc:
try:
tool_shed_repository = load_field(
_doc.get("tool_shed_repository"),
union_of_None_type_or_ToolShedRepositoryLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `tool_shed_repository` field is not valid because:",
SourceLine(_doc, "tool_shed_repository", str),
[e],
)
)
else:
tool_shed_repository = None
if "tool_version" in _doc:
try:
tool_version = load_field(
_doc.get("tool_version"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `tool_version` field is not valid because:",
SourceLine(_doc, "tool_version", str),
[e],
)
)
else:
tool_version = None
if "errors" in _doc:
try:
errors = load_field(
_doc.get("errors"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `errors` field is not valid because:",
SourceLine(_doc, "errors", str),
[e],
)
)
else:
errors = None
if "uuid" in _doc:
try:
uuid = load_field(
_doc.get("uuid"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `uuid` field is not valid because:",
SourceLine(_doc, "uuid", str),
[e],
)
)
else:
uuid = None
if "in" in _doc:
try:
in_ = load_field(
_doc.get("in"),
idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `in` field is not valid because:",
SourceLine(_doc, "in", str),
[e],
)
)
else:
in_ = None
if "out" in _doc:
try:
out = load_field(
_doc.get("out"),
idmap_out_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `out` field is not valid because:",
SourceLine(_doc, "out", str),
[e],
)
)
else:
out = None
if "state" in _doc:
try:
state = load_field(
_doc.get("state"),
union_of_None_type_or_Any_type,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `state` field is not valid because:",
SourceLine(_doc, "state", str),
[e],
)
)
else:
state = None
if "tool_state" in _doc:
try:
tool_state = load_field(
_doc.get("tool_state"),
union_of_None_type_or_Any_type,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `tool_state` field is not valid because:",
SourceLine(_doc, "tool_state", str),
[e],
)
)
else:
tool_state = None
if "type" in _doc:
try:
type = load_field(
_doc.get("type"),
typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `type` field is not valid because:",
SourceLine(_doc, "type", str),
[e],
)
)
else:
type = None
if "run" in _doc:
subscope_baseuri = expand_url('run', baseuri, loadingOptions, True)
try:
run = load_field(
_doc.get("run"),
uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None,
subscope_baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `run` field is not valid because:",
SourceLine(_doc, "run", str),
[e],
)
)
else:
run = None
if "runtime_inputs" in _doc:
try:
runtime_inputs = load_field(
_doc.get("runtime_inputs"),
union_of_None_type_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `runtime_inputs` field is not valid because:",
SourceLine(_doc, "runtime_inputs", str),
[e],
)
)
else:
runtime_inputs = None
if "when" in _doc:
try:
when = load_field(
_doc.get("when"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `when` field is not valid because:",
SourceLine(_doc, "when", str),
[e],
)
)
else:
when = None
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `id`, `label`, `doc`, `position`, `tool_id`, `tool_shed_repository`, `tool_version`, `errors`, `uuid`, `in`, `out`, `state`, `tool_state`, `type`, `run`, `runtime_inputs`, `when`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'WorkflowStep'", None, _errors__)
_constructed = cls(
id=id,
label=label,
doc=doc,
position=position,
tool_id=tool_id,
tool_shed_repository=tool_shed_repository,
tool_version=tool_version,
errors=errors,
uuid=uuid,
in_=in_,
out=out,
state=state,
tool_state=tool_state,
type=type,
run=run,
runtime_inputs=runtime_inputs,
when=when,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
loadingOptions.idx[id] = (_constructed, loadingOptions)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.id is not None:
u = save_relative_uri(self.id, base_url, True, None, relative_uris)
r["id"] = u
if self.label is not None:
r["label"] = save(
self.label, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.doc is not None:
r["doc"] = save(
self.doc, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.position is not None:
r["position"] = save(
self.position, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.tool_id is not None:
r["tool_id"] = save(
self.tool_id, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.tool_shed_repository is not None:
r["tool_shed_repository"] = save(
self.tool_shed_repository,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.tool_version is not None:
r["tool_version"] = save(
self.tool_version,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.errors is not None:
r["errors"] = save(
self.errors, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.uuid is not None:
r["uuid"] = save(
self.uuid, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.in_ is not None:
r["in"] = save(
self.in_, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.out is not None:
r["out"] = save(
self.out, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.state is not None:
r["state"] = save(
self.state, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.tool_state is not None:
r["tool_state"] = save(
self.tool_state,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.type is not None:
r["type"] = save(
self.type, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.run is not None:
u = save_relative_uri(self.run, self.id, False, None, relative_uris)
r["run"] = u
if self.runtime_inputs is not None:
r["runtime_inputs"] = save(
self.runtime_inputs,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.when is not None:
r["when"] = save(
self.when, top=False, base_url=self.id, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(
[
"id",
"label",
"doc",
"position",
"tool_id",
"tool_shed_repository",
"tool_version",
"errors",
"uuid",
"in",
"out",
"state",
"tool_state",
"type",
"run",
"runtime_inputs",
"when",
]
)
[docs]class Sink(Saveable):
pass
[docs]class Report(Saveable):
"""
Definition of an invocation report for this workflow. Currently the only
field is 'markdown'.
"""
def __init__(
self,
markdown: Any,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.markdown = markdown
def __eq__(self, other: Any) -> bool:
if isinstance(other, Report):
return bool(self.markdown == other.markdown)
return False
def __hash__(self) -> int:
return hash((self.markdown))
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "Report":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
try:
markdown = load_field(
_doc.get("markdown"),
strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `markdown` field is not valid because:",
SourceLine(_doc, "markdown", str),
[e],
)
)
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `markdown`".format(k),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'Report'", None, _errors__)
_constructed = cls(
markdown=markdown,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.markdown is not None:
r["markdown"] = save(
self.markdown, top=False, base_url=base_url, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(["markdown"])
[docs]class WorkflowStepOutput(Identified):
"""
Associate an output parameter of the underlying process with a workflow
parameter. The workflow parameter (given in the `id` field) be may be used
as a `source` to connect with input parameters of other workflow steps, or
with an output parameter of the process.
A unique identifier for this workflow output parameter. This is
the identifier to use in the `source` field of `WorkflowStepInput`
to connect the output value to downstream parameters.
"""
def __init__(
self,
id: Optional[Any] = None,
add_tags: Optional[Any] = None,
change_datatype: Optional[Any] = None,
delete_intermediate_datasets: Optional[Any] = None,
hide: Optional[Any] = None,
remove_tags: Optional[Any] = None,
rename: Optional[Any] = None,
set_columns: Optional[Any] = None,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.id = id
self.add_tags = add_tags
self.change_datatype = change_datatype
self.delete_intermediate_datasets = delete_intermediate_datasets
self.hide = hide
self.remove_tags = remove_tags
self.rename = rename
self.set_columns = set_columns
def __eq__(self, other: Any) -> bool:
if isinstance(other, WorkflowStepOutput):
return bool(
self.id == other.id
and self.add_tags == other.add_tags
and self.change_datatype == other.change_datatype
and self.delete_intermediate_datasets
== other.delete_intermediate_datasets
and self.hide == other.hide
and self.remove_tags == other.remove_tags
and self.rename == other.rename
and self.set_columns == other.set_columns
)
return False
def __hash__(self) -> int:
return hash(
(
self.id,
self.add_tags,
self.change_datatype,
self.delete_intermediate_datasets,
self.hide,
self.remove_tags,
self.rename,
self.set_columns,
)
)
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "WorkflowStepOutput":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
if "id" in _doc:
try:
id = load_field(
_doc.get("id"),
uri_union_of_None_type_or_strtype_True_False_None,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `id` field is not valid because:",
SourceLine(_doc, "id", str),
[e],
)
)
else:
id = None
__original_id_is_none = id is None
if id is None:
if docRoot is not None:
id = docRoot
else:
id = "_:" + str(_uuid__.uuid4())
if not __original_id_is_none:
baseuri = id
if "add_tags" in _doc:
try:
add_tags = load_field(
_doc.get("add_tags"),
union_of_None_type_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `add_tags` field is not valid because:",
SourceLine(_doc, "add_tags", str),
[e],
)
)
else:
add_tags = None
if "change_datatype" in _doc:
try:
change_datatype = load_field(
_doc.get("change_datatype"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `change_datatype` field is not valid because:",
SourceLine(_doc, "change_datatype", str),
[e],
)
)
else:
change_datatype = None
if "delete_intermediate_datasets" in _doc:
try:
delete_intermediate_datasets = load_field(
_doc.get("delete_intermediate_datasets"),
union_of_None_type_or_booltype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `delete_intermediate_datasets` field is not valid because:",
SourceLine(_doc, "delete_intermediate_datasets", str),
[e],
)
)
else:
delete_intermediate_datasets = None
if "hide" in _doc:
try:
hide = load_field(
_doc.get("hide"),
union_of_None_type_or_booltype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `hide` field is not valid because:",
SourceLine(_doc, "hide", str),
[e],
)
)
else:
hide = None
if "remove_tags" in _doc:
try:
remove_tags = load_field(
_doc.get("remove_tags"),
union_of_None_type_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `remove_tags` field is not valid because:",
SourceLine(_doc, "remove_tags", str),
[e],
)
)
else:
remove_tags = None
if "rename" in _doc:
try:
rename = load_field(
_doc.get("rename"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `rename` field is not valid because:",
SourceLine(_doc, "rename", str),
[e],
)
)
else:
rename = None
if "set_columns" in _doc:
try:
set_columns = load_field(
_doc.get("set_columns"),
union_of_None_type_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `set_columns` field is not valid because:",
SourceLine(_doc, "set_columns", str),
[e],
)
)
else:
set_columns = None
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `id`, `add_tags`, `change_datatype`, `delete_intermediate_datasets`, `hide`, `remove_tags`, `rename`, `set_columns`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'WorkflowStepOutput'", None, _errors__)
_constructed = cls(
id=id,
add_tags=add_tags,
change_datatype=change_datatype,
delete_intermediate_datasets=delete_intermediate_datasets,
hide=hide,
remove_tags=remove_tags,
rename=rename,
set_columns=set_columns,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
loadingOptions.idx[id] = (_constructed, loadingOptions)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
if self.id is not None:
u = save_relative_uri(self.id, base_url, True, None, relative_uris)
r["id"] = u
if self.add_tags is not None:
r["add_tags"] = save(
self.add_tags, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.change_datatype is not None:
r["change_datatype"] = save(
self.change_datatype,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.delete_intermediate_datasets is not None:
r["delete_intermediate_datasets"] = save(
self.delete_intermediate_datasets,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.hide is not None:
r["hide"] = save(
self.hide, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.remove_tags is not None:
r["remove_tags"] = save(
self.remove_tags,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
if self.rename is not None:
r["rename"] = save(
self.rename, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.set_columns is not None:
r["set_columns"] = save(
self.set_columns,
top=False,
base_url=self.id,
relative_uris=relative_uris,
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(
[
"id",
"add_tags",
"change_datatype",
"delete_intermediate_datasets",
"hide",
"remove_tags",
"rename",
"set_columns",
]
)
[docs]class GalaxyWorkflow(Process, HasUUID):
"""
A Galaxy workflow description. This record corresponds to the description of a workflow that should be executable
on a Galaxy server that includes the contained tool definitions.
The workflows API or the user interface of Galaxy instances that are of version 19.09 or newer should be able to
import a document defining this record.
## A note about `label` field.
This is the name of the workflow in the Galaxy user interface. This is the mechanism that
users will primarily identify the workflow using. Legacy support - this may also be called 'name' and Galaxy will
consume the workflow document fine and treat this attribute correctly - however in order to validate against this
workflow definition schema the attribute should be called `label`.
"""
def __init__(
self,
inputs: Any,
outputs: Any,
steps: Any,
tags: Any,
id: Optional[Any] = None,
label: Optional[Any] = None,
doc: Optional[Any] = None,
uuid: Optional[Any] = None,
report: Optional[Any] = None,
creator: Optional[Any] = None,
license: Optional[Any] = None,
release: Optional[Any] = None,
extension_fields: Optional[Dict[str, Any]] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> None:
if extension_fields:
self.extension_fields = extension_fields
else:
self.extension_fields = CommentedMap()
if loadingOptions:
self.loadingOptions = loadingOptions
else:
self.loadingOptions = LoadingOptions()
self.id = id
self.label = label
self.doc = doc
self.inputs = inputs
self.outputs = outputs
self.uuid = uuid
self.class_ = "GalaxyWorkflow"
self.steps = steps
self.report = report
self.tags = tags
self.creator = creator
self.license = license
self.release = release
def __eq__(self, other: Any) -> bool:
if isinstance(other, GalaxyWorkflow):
return bool(
self.id == other.id
and self.label == other.label
and self.doc == other.doc
and self.inputs == other.inputs
and self.outputs == other.outputs
and self.uuid == other.uuid
and self.class_ == other.class_
and self.steps == other.steps
and self.report == other.report
and self.tags == other.tags
and self.creator == other.creator
and self.license == other.license
and self.release == other.release
)
return False
def __hash__(self) -> int:
return hash(
(
self.id,
self.label,
self.doc,
self.inputs,
self.outputs,
self.uuid,
self.class_,
self.steps,
self.report,
self.tags,
self.creator,
self.license,
self.release,
)
)
[docs] @classmethod
def fromDoc(
cls,
doc: Any,
baseuri: str,
loadingOptions: LoadingOptions,
docRoot: Optional[str] = None,
) -> "GalaxyWorkflow":
_doc = copy.copy(doc)
if hasattr(doc, "lc"):
_doc.lc.data = doc.lc.data
_doc.lc.filename = doc.lc.filename
_errors__ = []
if _doc.get("class") != "GalaxyWorkflow":
raise ValidationException("Not a GalaxyWorkflow")
if "id" in _doc:
try:
id = load_field(
_doc.get("id"),
uri_union_of_None_type_or_strtype_True_False_None,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `id` field is not valid because:",
SourceLine(_doc, "id", str),
[e],
)
)
else:
id = None
__original_id_is_none = id is None
if id is None:
if docRoot is not None:
id = docRoot
else:
id = "_:" + str(_uuid__.uuid4())
if not __original_id_is_none:
baseuri = id
if "label" in _doc:
try:
label = load_field(
_doc.get("label"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `label` field is not valid because:",
SourceLine(_doc, "label", str),
[e],
)
)
else:
label = None
if "doc" in _doc:
try:
doc = load_field(
_doc.get("doc"),
union_of_None_type_or_strtype_or_array_of_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `doc` field is not valid because:",
SourceLine(_doc, "doc", str),
[e],
)
)
else:
doc = None
try:
inputs = load_field(
_doc.get("inputs"),
idmap_inputs_array_of_WorkflowInputParameterLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `inputs` field is not valid because:",
SourceLine(_doc, "inputs", str),
[e],
)
)
try:
outputs = load_field(
_doc.get("outputs"),
idmap_outputs_array_of_WorkflowOutputParameterLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `outputs` field is not valid because:",
SourceLine(_doc, "outputs", str),
[e],
)
)
if "uuid" in _doc:
try:
uuid = load_field(
_doc.get("uuid"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `uuid` field is not valid because:",
SourceLine(_doc, "uuid", str),
[e],
)
)
else:
uuid = None
try:
steps = load_field(
_doc.get("steps"),
idmap_steps_union_of_array_of_WorkflowStepLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `steps` field is not valid because:",
SourceLine(_doc, "steps", str),
[e],
)
)
if "report" in _doc:
try:
report = load_field(
_doc.get("report"),
union_of_None_type_or_ReportLoader,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `report` field is not valid because:",
SourceLine(_doc, "report", str),
[e],
)
)
else:
report = None
if "tags" in _doc:
try:
tags = load_field(
_doc.get("tags"),
union_of_array_of_strtype_or_None_type,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `tags` field is not valid because:",
SourceLine(_doc, "tags", str),
[e],
)
)
else:
tags = None
if "creator" in _doc:
try:
creator = load_field(
_doc.get("creator"),
union_of_None_type_or_Any_type,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `creator` field is not valid because:",
SourceLine(_doc, "creator", str),
[e],
)
)
else:
creator = None
if "license" in _doc:
try:
license = load_field(
_doc.get("license"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `license` field is not valid because:",
SourceLine(_doc, "license", str),
[e],
)
)
else:
license = None
if "release" in _doc:
try:
release = load_field(
_doc.get("release"),
union_of_None_type_or_strtype,
baseuri,
loadingOptions,
)
except ValidationException as e:
_errors__.append(
ValidationException(
"the `release` field is not valid because:",
SourceLine(_doc, "release", str),
[e],
)
)
else:
release = None
extension_fields: Dict[str, Any] = {}
for k in _doc.keys():
if k not in cls.attrs:
if ":" in k:
ex = expand_url(
k, "", loadingOptions, scoped_id=False, vocab_term=False
)
extension_fields[ex] = _doc[k]
else:
_errors__.append(
ValidationException(
"invalid field `{}`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `uuid`, `class`, `steps`, `report`, `tags`, `creator`, `license`, `release`".format(
k
),
SourceLine(_doc, k, str),
)
)
break
if _errors__:
raise ValidationException("Trying 'GalaxyWorkflow'", None, _errors__)
_constructed = cls(
id=id,
label=label,
doc=doc,
inputs=inputs,
outputs=outputs,
uuid=uuid,
steps=steps,
report=report,
tags=tags,
creator=creator,
license=license,
release=release,
extension_fields=extension_fields,
loadingOptions=loadingOptions,
)
loadingOptions.idx[id] = (_constructed, loadingOptions)
return _constructed
[docs] def save(
self, top: bool = False, base_url: str = "", relative_uris: bool = True
) -> Dict[str, Any]:
r: Dict[str, Any] = {}
if relative_uris:
for ef in self.extension_fields:
r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef]
else:
for ef in self.extension_fields:
r[ef] = self.extension_fields[ef]
r["class"] = "GalaxyWorkflow"
if self.id is not None:
u = save_relative_uri(self.id, base_url, True, None, relative_uris)
r["id"] = u
if self.label is not None:
r["label"] = save(
self.label, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.doc is not None:
r["doc"] = save(
self.doc, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.inputs is not None:
r["inputs"] = save(
self.inputs, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.outputs is not None:
r["outputs"] = save(
self.outputs, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.uuid is not None:
r["uuid"] = save(
self.uuid, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.steps is not None:
r["steps"] = save(
self.steps, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.report is not None:
r["report"] = save(
self.report, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.tags is not None:
r["tags"] = save(
self.tags, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.creator is not None:
r["creator"] = save(
self.creator, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.license is not None:
r["license"] = save(
self.license, top=False, base_url=self.id, relative_uris=relative_uris
)
if self.release is not None:
r["release"] = save(
self.release, top=False, base_url=self.id, relative_uris=relative_uris
)
# top refers to the directory level
if top:
if self.loadingOptions.namespaces:
r["$namespaces"] = self.loadingOptions.namespaces
if self.loadingOptions.schemas:
r["$schemas"] = self.loadingOptions.schemas
return r
attrs = frozenset(
[
"id",
"label",
"doc",
"inputs",
"outputs",
"uuid",
"class",
"steps",
"report",
"tags",
"creator",
"license",
"release",
]
)
_vocab = {
"Any": "https://w3id.org/cwl/salad#Any",
"ArraySchema": "https://w3id.org/cwl/salad#ArraySchema",
"Documented": "https://w3id.org/cwl/salad#Documented",
"EnumSchema": "https://w3id.org/cwl/salad#EnumSchema",
"File": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File",
"GalaxyType": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType",
"GalaxyWorkflow": "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow",
"HasStepErrors": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepErrors",
"HasStepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition",
"HasUUID": "https://galaxyproject.org/gxformat2/gxformat2common#HasUUID",
"Identified": "https://w3id.org/cwl/cwl#Identified",
"InputParameter": "https://w3id.org/cwl/cwl#InputParameter",
"Labeled": "https://w3id.org/cwl/cwl#Labeled",
"OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter",
"Parameter": "https://w3id.org/cwl/cwl#Parameter",
"PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType",
"Process": "https://w3id.org/cwl/cwl#Process",
"RecordField": "https://w3id.org/cwl/salad#RecordField",
"RecordSchema": "https://w3id.org/cwl/salad#RecordSchema",
"ReferencesTool": "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool",
"Report": "https://galaxyproject.org/gxformat2/v19_09#Report",
"Sink": "https://galaxyproject.org/gxformat2/v19_09#Sink",
"StepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition",
"ToolShedRepository": "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository",
"WorkflowInputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter",
"WorkflowOutputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter",
"WorkflowStep": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep",
"WorkflowStepInput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput",
"WorkflowStepOutput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput",
"WorkflowStepType": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType",
"array": "https://w3id.org/cwl/salad#array",
"boolean": "http://www.w3.org/2001/XMLSchema#boolean",
"collection": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection",
"data": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data",
"double": "http://www.w3.org/2001/XMLSchema#double",
"enum": "https://w3id.org/cwl/salad#enum",
"float": "http://www.w3.org/2001/XMLSchema#float",
"int": "http://www.w3.org/2001/XMLSchema#int",
"integer": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/integer",
"long": "http://www.w3.org/2001/XMLSchema#long",
"null": "https://w3id.org/cwl/salad#null",
"pause": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause",
"record": "https://w3id.org/cwl/salad#record",
"string": "http://www.w3.org/2001/XMLSchema#string",
"subworkflow": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow",
"text": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/text",
"tool": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool",
}
_rvocab = {
"https://w3id.org/cwl/salad#Any": "Any",
"https://w3id.org/cwl/salad#ArraySchema": "ArraySchema",
"https://w3id.org/cwl/salad#Documented": "Documented",
"https://w3id.org/cwl/salad#EnumSchema": "EnumSchema",
"https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File": "File",
"https://galaxyproject.org/gxformat2/v19_09#GalaxyType": "GalaxyType",
"https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow": "GalaxyWorkflow",
"https://galaxyproject.org/gxformat2/gxformat2common#HasStepErrors": "HasStepErrors",
"https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition": "HasStepPosition",
"https://galaxyproject.org/gxformat2/gxformat2common#HasUUID": "HasUUID",
"https://w3id.org/cwl/cwl#Identified": "Identified",
"https://w3id.org/cwl/cwl#InputParameter": "InputParameter",
"https://w3id.org/cwl/cwl#Labeled": "Labeled",
"https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter",
"https://w3id.org/cwl/cwl#Parameter": "Parameter",
"https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType",
"https://w3id.org/cwl/cwl#Process": "Process",
"https://w3id.org/cwl/salad#RecordField": "RecordField",
"https://w3id.org/cwl/salad#RecordSchema": "RecordSchema",
"https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool": "ReferencesTool",
"https://galaxyproject.org/gxformat2/v19_09#Report": "Report",
"https://galaxyproject.org/gxformat2/v19_09#Sink": "Sink",
"https://galaxyproject.org/gxformat2/gxformat2common#StepPosition": "StepPosition",
"https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository": "ToolShedRepository",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter": "WorkflowInputParameter",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter": "WorkflowOutputParameter",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowStep": "WorkflowStep",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput": "WorkflowStepInput",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput": "WorkflowStepOutput",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType": "WorkflowStepType",
"https://w3id.org/cwl/salad#array": "array",
"http://www.w3.org/2001/XMLSchema#boolean": "boolean",
"https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection": "collection",
"https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data": "data",
"http://www.w3.org/2001/XMLSchema#double": "double",
"https://w3id.org/cwl/salad#enum": "enum",
"http://www.w3.org/2001/XMLSchema#float": "float",
"http://www.w3.org/2001/XMLSchema#int": "int",
"https://galaxyproject.org/gxformat2/v19_09#GalaxyType/integer": "integer",
"http://www.w3.org/2001/XMLSchema#long": "long",
"https://w3id.org/cwl/salad#null": "null",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause": "pause",
"https://w3id.org/cwl/salad#record": "record",
"http://www.w3.org/2001/XMLSchema#string": "string",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow": "subworkflow",
"https://galaxyproject.org/gxformat2/v19_09#GalaxyType/text": "text",
"https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool": "tool",
}
strtype = _PrimitiveLoader(str)
inttype = _PrimitiveLoader(int)
floattype = _PrimitiveLoader(float)
booltype = _PrimitiveLoader(bool)
None_type = _PrimitiveLoader(type(None))
Any_type = _AnyLoader()
PrimitiveTypeLoader = _EnumLoader(
(
"null",
"boolean",
"int",
"long",
"float",
"double",
"string",
),
"PrimitiveType",
)
AnyLoader = _EnumLoader(("Any",), "Any")
RecordFieldLoader = _RecordLoader(RecordField)
RecordSchemaLoader = _RecordLoader(RecordSchema)
EnumSchemaLoader = _RecordLoader(EnumSchema)
ArraySchemaLoader = _RecordLoader(ArraySchema)
StepPositionLoader = _RecordLoader(StepPosition)
ToolShedRepositoryLoader = _RecordLoader(ToolShedRepository)
GalaxyTypeLoader = _EnumLoader(
(
"null",
"boolean",
"int",
"long",
"float",
"double",
"string",
"integer",
"text",
"File",
"data",
"collection",
),
"GalaxyType",
)
WorkflowStepTypeLoader = _EnumLoader(
(
"tool",
"subworkflow",
"pause",
),
"WorkflowStepType",
)
WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter)
WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter)
WorkflowStepLoader = _RecordLoader(WorkflowStep)
WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput)
ReportLoader = _RecordLoader(Report)
WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput)
GalaxyWorkflowLoader = _RecordLoader(GalaxyWorkflow)
array_of_strtype = _ArrayLoader(strtype)
union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader(
(
None_type,
strtype,
array_of_strtype,
)
)
uri_strtype_True_False_None = _URILoader(strtype, True, False, None)
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader(
(
PrimitiveTypeLoader,
RecordSchemaLoader,
EnumSchemaLoader,
ArraySchemaLoader,
strtype,
)
)
array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype
)
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader(
(
PrimitiveTypeLoader,
RecordSchemaLoader,
EnumSchemaLoader,
ArraySchemaLoader,
strtype,
array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,
)
)
typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,
2,
)
array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader)
union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader(
(
None_type,
array_of_RecordFieldLoader,
)
)
idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(
union_of_None_type_or_array_of_RecordFieldLoader, "name", "type"
)
enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader(
("record",), "enum_d9cba076fca539106791a4f46d198c7fcfbdb779"
)
typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader(
enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2
)
uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None)
enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader(
("enum",), "enum_d961d79c225752b9fadb617367615ab176b47d77"
)
typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader(
enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2
)
uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(
union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,
False,
True,
2,
)
enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader(
("array",), "enum_d062602be0b4b8fd33e69e29a841317b6ab665bc"
)
typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader(
enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2
)
union_of_None_type_or_strtype = _UnionLoader(
(
None_type,
strtype,
)
)
uri_union_of_None_type_or_strtype_True_False_None = _URILoader(
union_of_None_type_or_strtype, True, False, None
)
union_of_None_type_or_Any_type = _UnionLoader(
(
None_type,
Any_type,
)
)
union_of_WorkflowInputParameterLoader = _UnionLoader((WorkflowInputParameterLoader,))
array_of_union_of_WorkflowInputParameterLoader = _ArrayLoader(
union_of_WorkflowInputParameterLoader
)
idmap_inputs_array_of_union_of_WorkflowInputParameterLoader = _IdMapLoader(
array_of_union_of_WorkflowInputParameterLoader, "id", "type"
)
union_of_WorkflowOutputParameterLoader = _UnionLoader((WorkflowOutputParameterLoader,))
array_of_union_of_WorkflowOutputParameterLoader = _ArrayLoader(
union_of_WorkflowOutputParameterLoader
)
idmap_outputs_array_of_union_of_WorkflowOutputParameterLoader = _IdMapLoader(
array_of_union_of_WorkflowOutputParameterLoader, "id", "type"
)
union_of_None_type_or_StepPositionLoader = _UnionLoader(
(
None_type,
StepPositionLoader,
)
)
union_of_floattype_or_inttype = _UnionLoader(
(
floattype,
inttype,
)
)
union_of_None_type_or_ToolShedRepositoryLoader = _UnionLoader(
(
None_type,
ToolShedRepositoryLoader,
)
)
union_of_GalaxyTypeLoader_or_strtype_or_None_type = _UnionLoader(
(
GalaxyTypeLoader,
strtype,
None_type,
)
)
typedsl_union_of_GalaxyTypeLoader_or_strtype_or_None_type_2 = _TypeDSLLoader(
union_of_GalaxyTypeLoader_or_strtype_or_None_type, 2
)
union_of_booltype_or_None_type = _UnionLoader(
(
booltype,
None_type,
)
)
union_of_None_type_or_array_of_strtype = _UnionLoader(
(
None_type,
array_of_strtype,
)
)
union_of_None_type_or_GalaxyTypeLoader = _UnionLoader(
(
None_type,
GalaxyTypeLoader,
)
)
typedsl_union_of_None_type_or_GalaxyTypeLoader_2 = _TypeDSLLoader(
union_of_None_type_or_GalaxyTypeLoader, 2
)
array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader)
union_of_None_type_or_array_of_WorkflowStepInputLoader = _UnionLoader(
(
None_type,
array_of_WorkflowStepInputLoader,
)
)
idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader = _IdMapLoader(
union_of_None_type_or_array_of_WorkflowStepInputLoader, "id", "source"
)
union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader(
(
strtype,
WorkflowStepOutputLoader,
)
)
array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader(
union_of_strtype_or_WorkflowStepOutputLoader
)
union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = (
_UnionLoader(
(
array_of_union_of_strtype_or_WorkflowStepOutputLoader,
None_type,
)
)
)
idmap_out_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = _IdMapLoader(
union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type,
"id",
"source",
)
union_of_None_type_or_WorkflowStepTypeLoader = _UnionLoader(
(
None_type,
WorkflowStepTypeLoader,
)
)
typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2 = _TypeDSLLoader(
union_of_None_type_or_WorkflowStepTypeLoader, 2
)
union_of_None_type_or_GalaxyWorkflowLoader = _UnionLoader(
(
None_type,
GalaxyWorkflowLoader,
)
)
uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None = _URILoader(
union_of_None_type_or_GalaxyWorkflowLoader, False, False, None
)
uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader(
union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2
)
union_of_None_type_or_booltype = _UnionLoader(
(
None_type,
booltype,
)
)
uri_strtype_False_True_None = _URILoader(strtype, False, True, None)
array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader)
idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader(
array_of_WorkflowInputParameterLoader, "id", "type"
)
array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader)
idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader(
array_of_WorkflowOutputParameterLoader, "id", "type"
)
array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader)
union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,))
idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader(
union_of_array_of_WorkflowStepLoader, "id", "None"
)
union_of_None_type_or_ReportLoader = _UnionLoader(
(
None_type,
ReportLoader,
)
)
union_of_array_of_strtype_or_None_type = _UnionLoader(
(
array_of_strtype,
None_type,
)
)
union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader,))
array_of_union_of_GalaxyWorkflowLoader = _ArrayLoader(union_of_GalaxyWorkflowLoader)
union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader = _UnionLoader(
(
GalaxyWorkflowLoader,
array_of_union_of_GalaxyWorkflowLoader,
)
)
[docs]def load_document(
doc: Any,
baseuri: Optional[str] = None,
loadingOptions: Optional[LoadingOptions] = None,
) -> Any:
if baseuri is None:
baseuri = file_uri(os.getcwd()) + "/"
if loadingOptions is None:
loadingOptions = LoadingOptions()
result, metadata = _document_load(
union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader,
doc,
baseuri,
loadingOptions,
)
return result
[docs]def load_document_by_string(
string: Any,
uri: str,
loadingOptions: Optional[LoadingOptions] = None,
) -> Any:
yaml = yaml_no_ts()
result = yaml.load(string)
add_lc_filename(result, uri)
if loadingOptions is None:
loadingOptions = LoadingOptions(fileuri=uri)
result, metadata = _document_load(
union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader,
result,
uri,
loadingOptions,
)
return result
[docs]def load_document_by_yaml(
yaml: Any,
uri: str,
loadingOptions: Optional[LoadingOptions] = None,
) -> Any:
"""
Shortcut to load via a YAML object.
yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True
"""
add_lc_filename(yaml, uri)
if loadingOptions is None:
loadingOptions = LoadingOptions(fileuri=uri)
result, metadata = _document_load(
union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader,
yaml,
uri,
loadingOptions,
)
return result