# # This file was autogenerated using schema-salad-tool --codegen=python # The code itself is released under the Apache 2.0 license and the help text is # subject to the license of the original schema. import copy import logging import os import pathlib import tempfile import uuid as _uuid__ # pylint: disable=unused-import # noqa: F401 import xml.sax # nosec from abc import ABC, abstractmethod from io import StringIO from itertools import chain from typing import ( Any, Dict, List, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast, ) from urllib.parse import quote, urldefrag, urlparse, urlsplit, urlunsplit from urllib.request import pathname2url from rdflib import Graph from rdflib.plugins.parsers.notation3 import BadSyntax from ruamel.yaml.comments import CommentedMap from schema_salad.exceptions import SchemaSaladException, ValidationException from schema_salad.fetcher import DefaultFetcher, Fetcher, MemoryCachingFetcher from schema_salad.sourceline import SourceLine, add_lc_filename from schema_salad.utils import CacheType, yaml_no_ts # requires schema-salad v8.2+ _vocab: Dict[str, str] = {} _rvocab: Dict[str, str] = {} _logger = logging.getLogger("salad") IdxType = MutableMapping[str, Tuple[Any, "LoadingOptions"]] class LoadingOptions: idx: IdxType fileuri: Optional[str] baseuri: str namespaces: MutableMapping[str, str] schemas: MutableSequence[str] original_doc: Optional[Any] addl_metadata: MutableMapping[str, Any] fetcher: Fetcher vocab: Dict[str, str] rvocab: Dict[str, str] cache: CacheType imports: List[str] includes: List[str] no_link_check: Optional[bool] container: Optional[str] def __init__( self, fetcher: Optional[Fetcher] = None, namespaces: Optional[Dict[str, str]] = None, schemas: Optional[List[str]] = None, fileuri: Optional[str] = None, copyfrom: Optional["LoadingOptions"] = None, original_doc: Optional[Any] = None, addl_metadata: Optional[Dict[str, str]] = None, baseuri: Optional[str] = None, idx: Optional[IdxType] = None, imports: Optional[List[str]] = None, includes: Optional[List[str]] = None, no_link_check: Optional[bool] = None, container: Optional[str] = None, ) -> None: """Create a LoadingOptions object.""" self.original_doc = original_doc if idx is not None: self.idx = idx else: self.idx = copyfrom.idx if copyfrom is not None else {} if fileuri is not None: self.fileuri = fileuri else: self.fileuri = copyfrom.fileuri if copyfrom is not None else None if baseuri is not None: self.baseuri = baseuri else: self.baseuri = copyfrom.baseuri if copyfrom is not None else "" if namespaces is not None: self.namespaces = namespaces else: self.namespaces = copyfrom.namespaces if copyfrom is not None else {} if schemas is not None: self.schemas = schemas else: self.schemas = copyfrom.schemas if copyfrom is not None else [] if addl_metadata is not None: self.addl_metadata = addl_metadata else: self.addl_metadata = copyfrom.addl_metadata if copyfrom is not None else {} if imports is not None: self.imports = imports else: self.imports = copyfrom.imports if copyfrom is not None else [] if includes is not None: self.includes = includes else: self.includes = copyfrom.includes if copyfrom is not None else [] if no_link_check is not None: self.no_link_check = no_link_check else: self.no_link_check = copyfrom.no_link_check if copyfrom is not None else False if container is not None: self.container = container else: self.container = copyfrom.container if copyfrom is not None else None if fetcher is not None: self.fetcher = fetcher elif copyfrom is not None: self.fetcher = copyfrom.fetcher else: import requests from cachecontrol.caches import FileCache from cachecontrol.wrapper import CacheControl root = pathlib.Path(os.environ.get("HOME", tempfile.gettempdir())) session = CacheControl( requests.Session(), cache=FileCache(root / ".cache" / "salad"), ) self.fetcher: Fetcher = DefaultFetcher({}, session) self.cache = self.fetcher.cache if isinstance(self.fetcher, MemoryCachingFetcher) else {} self.vocab = _vocab self.rvocab = _rvocab if self.namespaces is not None: self.vocab = self.vocab.copy() self.rvocab = self.rvocab.copy() for k, v in self.namespaces.items(): self.vocab[k] = v self.rvocab[v] = k @property def graph(self) -> Graph: """Generate a merged rdflib.Graph from all entries in self.schemas.""" graph = Graph() if not self.schemas: return graph key = str(hash(tuple(self.schemas))) if key in self.cache: return cast(Graph, self.cache[key]) for schema in self.schemas: fetchurl = ( self.fetcher.urljoin(self.fileuri, schema) if self.fileuri is not None else pathlib.Path(schema).resolve().as_uri() ) if fetchurl not in self.cache or self.cache[fetchurl] is True: _logger.debug("Getting external schema %s", fetchurl) try: content = self.fetcher.fetch_text(fetchurl) except Exception as e: _logger.warning("Could not load extension schema %s: %s", fetchurl, str(e)) continue newGraph = Graph() err_msg = "unknown error" for fmt in ["xml", "turtle"]: try: newGraph.parse(data=content, format=fmt, publicID=str(fetchurl)) self.cache[fetchurl] = newGraph graph += newGraph break except (xml.sax.SAXParseException, TypeError, BadSyntax) as e: err_msg = str(e) else: _logger.warning("Could not load extension schema %s: %s", fetchurl, err_msg) self.cache[key] = graph return graph class Saveable(ABC): """Mark classes than have a save() and fromDoc() function.""" @classmethod @abstractmethod def fromDoc( cls, _doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, ) -> "Saveable": """Construct this object from the result of yaml.load().""" @abstractmethod def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: """Convert this object to a JSON/YAML friendly dictionary.""" def load_field( val: Union[str, Dict[str, str]], fieldtype: "_Loader", baseuri: str, loadingOptions: LoadingOptions, lc: Optional[List[Any]] = None, ) -> Any: """Load field.""" if isinstance(val, MutableMapping): if "$import" in val: if loadingOptions.fileuri is None: raise SchemaSaladException("Cannot load $import without fileuri") url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]) result, metadata = _document_load_by_url( fieldtype, url, loadingOptions, ) loadingOptions.imports.append(url) return result if "$include" in val: if loadingOptions.fileuri is None: raise SchemaSaladException("Cannot load $import without fileuri") url = loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) val = loadingOptions.fetcher.fetch_text(url) loadingOptions.includes.append(url) return fieldtype.load(val, baseuri, loadingOptions, lc=lc) save_type = Optional[Union[MutableMapping[str, Any], MutableSequence[Any], int, float, bool, str]] def extract_type(val_type: Type[Any]) -> str: """Take a type of value, and extracts the value as a string.""" val_str = str(val_type) return val_str.split("'")[1] def convert_typing(val_type: str) -> str: """Normalize type names to schema-salad types.""" if "None" in val_type: return "null" if "CommentedSeq" in val_type or "list" in val_type: return "array" if "CommentedMap" in val_type or "dict" in val_type: return "object" if "False" in val_type or "True" in val_type: return "boolean" return val_type def parse_errors(error_message: str) -> Tuple[str, str, str]: """Parse error messages from several loaders into one error message.""" if not error_message.startswith("Expected"): return error_message, "", "" vals = error_message.split("\n") if len(vals) == 1: return error_message, "", "" types = set() for val in vals: individual_vals = val.split(" ") if val == "": continue if individual_vals[1] == "one": individual_vals = val.split("(")[1].split(",") for t in individual_vals: types.add(t.strip(" ").strip(")\n")) elif individual_vals[2] == "").replace("'", "")) elif individual_vals[0] == "Value": types.add(individual_vals[-1].strip(".")) else: types.add(individual_vals[1].replace(",", "")) types = set(val for val in types if val != "NoneType") if "str" in types: types = set(convert_typing(val) for val in types if "'" not in val) to_print = "" for val in types: if "'" in val: to_print = "value" if len(types) == 1 else "values" if to_print == "": to_print = "type" if len(types) == 1 else "types" verb_tensage = "is" if len(types) == 1 else "are" return str(types).replace("{", "(").replace("}", ")").replace("'", ""), to_print, verb_tensage def save( val: Any, top: bool = True, base_url: str = "", relative_uris: bool = True, ) -> save_type: if isinstance(val, Saveable): return val.save(top=top, base_url=base_url, relative_uris=relative_uris) if isinstance(val, MutableSequence): return [save(v, top=False, base_url=base_url, relative_uris=relative_uris) for v in val] if isinstance(val, MutableMapping): newdict = {} for key in val: newdict[key] = save(val[key], top=False, base_url=base_url, relative_uris=relative_uris) return newdict if val is None or isinstance(val, (int, float, bool, str)): return val raise Exception("Not Saveable: %s" % type(val)) def save_with_metadata( val: Any, valLoadingOpts: LoadingOptions, top: bool = True, base_url: str = "", relative_uris: bool = True, ) -> save_type: """Save and set $namespaces, $schemas, $base and any other metadata fields at the top level.""" saved_val = save(val, top, base_url, relative_uris) newdict: MutableMapping[str, Any] = {} if isinstance(saved_val, MutableSequence): newdict = {"$graph": saved_val} elif isinstance(saved_val, MutableMapping): newdict = saved_val if valLoadingOpts.namespaces: newdict["$namespaces"] = valLoadingOpts.namespaces if valLoadingOpts.schemas: newdict["$schemas"] = valLoadingOpts.schemas if valLoadingOpts.baseuri: newdict["$base"] = valLoadingOpts.baseuri for k, v in valLoadingOpts.addl_metadata.items(): if k not in newdict: newdict[k] = v return newdict def expand_url( url: str, base_url: str, loadingOptions: LoadingOptions, scoped_id: bool = False, vocab_term: bool = False, scoped_ref: Optional[int] = None, ) -> str: if url in ("@id", "@type"): return url if vocab_term and url in loadingOptions.vocab: return url if bool(loadingOptions.vocab) and ":" in url: prefix = url.split(":")[0] if prefix in loadingOptions.vocab: url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] split = urlsplit(url) if ( (bool(split.scheme) and split.scheme in loadingOptions.fetcher.supported_schemes()) or url.startswith("$(") or url.startswith("${") ): pass elif scoped_id and not bool(split.fragment): splitbase = urlsplit(base_url) frg = "" if bool(splitbase.fragment): frg = splitbase.fragment + "/" + split.path else: frg = split.path pt = splitbase.path if splitbase.path != "" else "/" url = urlunsplit((splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg)) elif scoped_ref is not None and not bool(split.fragment): splitbase = urlsplit(base_url) sp = splitbase.fragment.split("/") n = scoped_ref while n > 0 and len(sp) > 0: sp.pop() n -= 1 sp.append(url) url = urlunsplit( ( splitbase.scheme, splitbase.netloc, splitbase.path, splitbase.query, "/".join(sp), ) ) else: url = loadingOptions.fetcher.urljoin(base_url, url) if vocab_term: split = urlsplit(url) if bool(split.scheme): if url in loadingOptions.rvocab: return loadingOptions.rvocab[url] else: raise ValidationException(f"Term {url!r} not in vocabulary") return url class _Loader: def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: pass class _AnyLoader(_Loader): def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if doc is not None: return doc raise ValidationException("Expected non-null") class _PrimitiveLoader(_Loader): def __init__(self, tp: Union[type, Tuple[Type[str], Type[str]]]) -> None: self.tp = tp def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if not isinstance(doc, self.tp): raise ValidationException( "Expected a {} but got {}".format(self.tp, doc.__class__.__name__) ) return doc def __repr__(self) -> str: return str(self.tp) class _ArrayLoader(_Loader): def __init__(self, items: _Loader) -> None: self.items = items def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if not isinstance(doc, MutableSequence): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " f"but valid type for this field is an array." ) r: List[Any] = [] errors: List[SchemaSaladException] = [] fields: List[str] = [] for i in range(0, len(doc)): try: lf = load_field( doc[i], _UnionLoader(([self, self.items])), baseuri, loadingOptions, lc=lc ) flatten = loadingOptions.container != "@list" if flatten and isinstance(lf, MutableSequence): r.extend(lf) else: r.append(lf) if isinstance(doc[i], CommentedMap): if doc[i].get("id") is not None: if doc[i].get("id") in fields: errors.append( ValidationException( f"Duplicate field {doc[i].get('id')!r}", SourceLine(doc[i], "id", str), [], ) ) else: fields.append(doc[i].get("id")) except ValidationException as e: e = ValidationException( "array item is invalid because", SourceLine(doc, i, str), [e] ) errors.append(e) if errors: raise ValidationException("", None, errors) return r def __repr__(self) -> str: return f"array<{self.items}>" class _MapLoader(_Loader): def __init__( self, values: _Loader, name: Optional[str] = None, container: Optional[str] = None, no_link_check: Optional[bool] = None, ) -> None: self.values = values self.name = name self.container = container self.no_link_check = no_link_check def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if not isinstance(doc, MutableMapping): raise ValidationException(f"Expected a map, was {type(doc)}") if self.container is not None or self.no_link_check is not None: loadingOptions = LoadingOptions( copyfrom=loadingOptions, container=self.container, no_link_check=self.no_link_check ) r: Dict[str, Any] = {} errors: List[SchemaSaladException] = [] for k, v in doc.items(): try: lf = load_field(v, self.values, baseuri, loadingOptions, lc) r[k] = lf except ValidationException as e: errors.append(e.with_sourceline(SourceLine(doc, k, str))) if errors: raise ValidationException("", None, errors) return r def __repr__(self) -> str: return self.name if self.name is not None else f"map" class _EnumLoader(_Loader): def __init__(self, symbols: Sequence[str], name: str) -> None: self.symbols = symbols self.name = name def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if doc in self.symbols: return doc raise ValidationException(f"Expected one of {self.symbols}") def __repr__(self) -> str: return self.name class _SecondaryDSLLoader(_Loader): def __init__(self, inner: _Loader) -> None: self.inner = inner def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: r: List[Dict[str, Any]] = [] if isinstance(doc, MutableSequence): for d in doc: if isinstance(d, str): if d.endswith("?"): r.append({"pattern": d[:-1], "required": False}) else: r.append({"pattern": d}) elif isinstance(d, dict): new_dict: Dict[str, Any] = {} dict_copy = copy.deepcopy(d) if "pattern" in dict_copy: new_dict["pattern"] = dict_copy.pop("pattern") else: raise ValidationException( f"Missing pattern in secondaryFiles specification entry: {d}" ) new_dict["required"] = ( dict_copy.pop("required") if "required" in dict_copy else None ) if len(dict_copy): raise ValidationException( "Unallowed values in secondaryFiles specification entry: {}".format( dict_copy ) ) r.append(new_dict) else: raise ValidationException( "Expected a string or sequence of (strings or mappings)." ) elif isinstance(doc, MutableMapping): new_dict = {} doc_copy = copy.deepcopy(doc) if "pattern" in doc_copy: new_dict["pattern"] = doc_copy.pop("pattern") else: raise ValidationException( f"Missing pattern in secondaryFiles specification entry: {doc}" ) new_dict["required"] = doc_copy.pop("required") if "required" in doc_copy else None if len(doc_copy): raise ValidationException( f"Unallowed values in secondaryFiles specification entry: {doc_copy}" ) r.append(new_dict) elif isinstance(doc, str): if doc.endswith("?"): r.append({"pattern": doc[:-1], "required": False}) else: r.append({"pattern": doc}) else: raise ValidationException("Expected str or sequence of str") return self.inner.load(r, baseuri, loadingOptions, docRoot, lc=lc) class _RecordLoader(_Loader): def __init__( self, classtype: Type[Saveable], container: Optional[str] = None, no_link_check: Optional[bool] = None, ) -> None: self.classtype = classtype self.container = container self.no_link_check = no_link_check def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if not isinstance(doc, MutableMapping): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " f"but valid type for this field is an object." ) if self.container is not None or self.no_link_check is not None: loadingOptions = LoadingOptions( copyfrom=loadingOptions, container=self.container, no_link_check=self.no_link_check ) return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) def __repr__(self) -> str: return str(self.classtype.__name__) class _ExpressionLoader(_Loader): def __init__(self, items: Type[str]) -> None: self.items = items def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if not isinstance(doc, str): raise ValidationException( f"Value is a {convert_typing(extract_type(type(doc)))}, " f"but valid type for this field is a str." ) return doc class _UnionLoader(_Loader): def __init__(self, alternates: Sequence[_Loader], name: Optional[str] = None) -> None: self.alternates = alternates self.name = name def add_loaders(self, loaders: Sequence[_Loader]) -> None: self.alternates = tuple(loader for loader in chain(self.alternates, loaders)) def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: errors = [] if lc is None: lc = [] for t in self.alternates: try: return t.load(doc, baseuri, loadingOptions, docRoot=docRoot, lc=lc) except ValidationException as e: if isinstance(t, _ArrayLoader) and len(self.alternates) > 1: continue if isinstance(doc, (CommentedMap, dict)): if "class" in doc: if str(doc.get("class")) == str(t): errors.append( ValidationException( f"Object `{baseuri.split('/')[-1]}` is not valid because:", SourceLine(doc, next(iter(doc)), str), [e], ) ) else: if "array" in str(t): continue else: if "id" in doc: id = baseuri.split("/")[-1] + "#" + str(doc.get("id")) if "id" in lc: errors.append( ValidationException( f"checking object `{id}`", SourceLine(lc, "id", str), [e], ) ) else: errors.append( ValidationException( f"checking object `{id}`", SourceLine(lc, doc.get("id"), str), [e], ) ) else: if not isinstance( t, (_PrimitiveLoader) ): # avoids 'tried was {x}' errors errors.append( ValidationException(f"tried `{t}` but", None, [e]) ) else: # avoids "tried but x" and instead returns the values for parsing errors.append(ValidationException("", None, [e])) if isinstance(doc, (CommentedMap, dict)) and "class" in doc: if str(doc.get("class")) not in str(self.alternates): errors.append( ValidationException( "Field `class` contains undefined reference to " + "`" + "/".join(baseuri.split("/")[0:-1]) + "/" + str(doc.get("class")) + "`", SourceLine(doc, "class", str), [], ) ) raise ValidationException("", None, errors, "*") def __repr__(self) -> str: return self.name if self.name is not None else " | ".join(str(a) for a in self.alternates) class _URILoader(_Loader): def __init__( self, inner: _Loader, scoped_id: bool, vocab_term: bool, scoped_ref: Optional[int], no_link_check: Optional[bool], ) -> None: self.inner = inner self.scoped_id = scoped_id self.vocab_term = vocab_term self.scoped_ref = scoped_ref self.no_link_check = no_link_check def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if self.no_link_check is not None: loadingOptions = LoadingOptions( copyfrom=loadingOptions, no_link_check=self.no_link_check ) if isinstance(doc, MutableSequence): newdoc = [] for i in doc: if isinstance(i, str): newdoc.append( expand_url( i, baseuri, loadingOptions, self.scoped_id, self.vocab_term, self.scoped_ref, ) ) else: newdoc.append(i) doc = newdoc elif isinstance(doc, str): doc = expand_url( doc, baseuri, loadingOptions, self.scoped_id, self.vocab_term, self.scoped_ref, ) if isinstance(doc, str): if not loadingOptions.no_link_check: errors = [] try: if not loadingOptions.fetcher.check_exists(doc): errors.append( ValidationException(f"contains undefined reference to `{doc}`") ) except ValidationException: pass if len(errors) > 0: raise ValidationException("", None, errors) return self.inner.load(doc, baseuri, loadingOptions, lc=lc) class _TypeDSLLoader(_Loader): def __init__(self, inner: _Loader, refScope: Optional[int], salad_version: str) -> None: self.inner = inner self.refScope = refScope self.salad_version = salad_version def resolve( self, doc: str, baseuri: str, loadingOptions: LoadingOptions, ) -> Union[List[Union[Dict[str, Any], str]], Dict[str, Any], str]: doc_ = doc optional = False if doc_.endswith("?"): optional = True doc_ = doc_[0:-1] if doc_.endswith("[]"): salad_versions = [int(v) for v in self.salad_version[1:].split(".")] items: Union[List[Union[Dict[str, Any], str]], Dict[str, Any], str] = "" rest = doc_[0:-2] if salad_versions < [1, 3]: if rest.endswith("[]"): # To show the error message with the original type return doc else: items = expand_url(rest, baseuri, loadingOptions, False, True, self.refScope) else: items = self.resolve(rest, baseuri, loadingOptions) if isinstance(items, str): items = expand_url(items, baseuri, loadingOptions, False, True, self.refScope) expanded: Union[Dict[str, Any], str] = {"type": "array", "items": items} else: expanded = expand_url(doc_, baseuri, loadingOptions, False, True, self.refScope) if optional: return ["null", expanded] else: return expanded def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if isinstance(doc, MutableSequence): r: List[Any] = [] for d in doc: if isinstance(d, str): resolved = self.resolve(d, baseuri, loadingOptions) if isinstance(resolved, MutableSequence): for i in resolved: if i not in r: r.append(i) else: if resolved not in r: r.append(resolved) else: r.append(d) doc = r elif isinstance(doc, str): doc = self.resolve(doc, baseuri, loadingOptions) return self.inner.load(doc, baseuri, loadingOptions, lc=lc) class _IdMapLoader(_Loader): def __init__(self, inner: _Loader, mapSubject: str, mapPredicate: Optional[str]) -> None: self.inner = inner self.mapSubject = mapSubject self.mapPredicate = mapPredicate def load( self, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None, lc: Optional[List[Any]] = None, ) -> Any: if isinstance(doc, MutableMapping): r: List[Any] = [] for k in sorted(doc.keys()): val = doc[k] if isinstance(val, CommentedMap): v = copy.copy(val) v.lc.data = val.lc.data v.lc.filename = val.lc.filename v[self.mapSubject] = k r.append(v) elif isinstance(val, MutableMapping): v2 = copy.copy(val) v2[self.mapSubject] = k r.append(v2) else: if self.mapPredicate: v3 = {self.mapPredicate: val} v3[self.mapSubject] = k r.append(v3) else: raise ValidationException("No mapPredicate") doc = r return self.inner.load(doc, baseuri, loadingOptions, lc=lc) def _document_load( loader: _Loader, doc: Union[str, MutableMapping[str, Any], MutableSequence[Any]], baseuri: str, loadingOptions: LoadingOptions, addl_metadata_fields: Optional[MutableSequence[str]] = None, ) -> Tuple[Any, LoadingOptions]: if isinstance(doc, str): return _document_load_by_url( loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions, addl_metadata_fields=addl_metadata_fields, ) if isinstance(doc, MutableMapping): addl_metadata = {} if addl_metadata_fields is not None: for mf in addl_metadata_fields: if mf in doc: addl_metadata[mf] = doc[mf] docuri = baseuri if "$base" in doc: baseuri = doc["$base"] loadingOptions = LoadingOptions( copyfrom=loadingOptions, namespaces=doc.get("$namespaces", None), schemas=doc.get("$schemas", None), baseuri=doc.get("$base", None), addl_metadata=addl_metadata, ) doc = copy.copy(doc) if "$namespaces" in doc: doc.pop("$namespaces") if "$schemas" in doc: doc.pop("$schemas") if "$base" in doc: doc.pop("$base") if "$graph" in doc: loadingOptions.idx[baseuri] = ( loader.load(doc["$graph"], baseuri, loadingOptions), loadingOptions, ) else: loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions, docRoot=baseuri), loadingOptions, ) if docuri != baseuri: loadingOptions.idx[docuri] = loadingOptions.idx[baseuri] return loadingOptions.idx[baseuri] if isinstance(doc, MutableSequence): loadingOptions.idx[baseuri] = ( loader.load(doc, baseuri, loadingOptions), loadingOptions, ) return loadingOptions.idx[baseuri] raise ValidationException( "Expected URI string, MutableMapping or MutableSequence, got %s" % type(doc) ) def _document_load_by_url( loader: _Loader, url: str, loadingOptions: LoadingOptions, addl_metadata_fields: Optional[MutableSequence[str]] = None, ) -> Tuple[Any, LoadingOptions]: if url in loadingOptions.idx: return loadingOptions.idx[url] doc_url, frg = urldefrag(url) text = loadingOptions.fetcher.fetch_text(doc_url) textIO = StringIO(text) textIO.name = str(doc_url) yaml = yaml_no_ts() result = yaml.load(textIO) add_lc_filename(result, doc_url) loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=doc_url) _document_load( loader, result, doc_url, loadingOptions, addl_metadata_fields=addl_metadata_fields, ) return loadingOptions.idx[url] def file_uri(path: str, split_frag: bool = False) -> str: """Transform a file path into a URL with file scheme.""" if path.startswith("file://"): return path if split_frag: pathsp = path.split("#", 2) frag = "#" + quote(str(pathsp[1])) if len(pathsp) == 2 else "" urlpath = pathname2url(str(pathsp[0])) else: urlpath = pathname2url(path) frag = "" if urlpath.startswith("//"): return f"file:{urlpath}{frag}" return f"file://{urlpath}{frag}" def prefix_url(url: str, namespaces: Dict[str, str]) -> str: """Expand short forms into full URLs using the given namespace dictionary.""" for k, v in namespaces.items(): if url.startswith(v): return k + ":" + url[len(v) :] return url def save_relative_uri( uri: Any, base_url: str, scoped_id: bool, ref_scope: Optional[int], relative_uris: bool, ) -> Any: """Convert any URI to a relative one, obeying the scoping rules.""" if isinstance(uri, MutableSequence): return [save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) for u in uri] elif isinstance(uri, str): if not relative_uris or uri == base_url: return uri urisplit = urlsplit(uri) basesplit = urlsplit(base_url) if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: if urisplit.path != basesplit.path: p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) if urisplit.fragment: p = p + "#" + urisplit.fragment return p basefrag = basesplit.fragment + "/" if ref_scope: sp = basefrag.split("/") i = 0 while i < ref_scope: sp.pop() i += 1 basefrag = "/".join(sp) if urisplit.fragment.startswith(basefrag): return urisplit.fragment[len(basefrag) :] return urisplit.fragment return uri else: return save(uri, top=False, base_url=base_url, relative_uris=relative_uris) def shortname(inputid: str) -> str: """ Compute the shortname of a fully qualified identifier. See https://w3id.org/cwl/v1.2/SchemaSalad.html#Short_names. """ parsed_id = urlparse(inputid) if parsed_id.fragment: return parsed_id.fragment.split("/")[-1] return parsed_id.path.split("/")[-1] def parser_info() -> str: return "org.w3id.cwl.salad" class Documented(Saveable): pass class RecordField(Documented): """ A field of a record. """ def __init__( self, name: Any, type_: Any, doc: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.doc = doc self.name = name self.type_ = type_ def __eq__(self, other: Any) -> bool: if isinstance(other, RecordField): return bool( self.doc == other.doc and self.name == other.name and self.type_ == other.type_ ) return False def __hash__(self) -> int: return hash((self.doc, self.name, self.type_)) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "RecordField": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: _errors__.append(ValidationException("missing name")) if not __original_name_is_none: baseuri = cast(str, name) doc = None if "doc" in _doc: try: doc = load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("doc")))) _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `doc`, `name`, `type`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( doc=doc, name=name, type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["doc", "name", "type"]) class RecordSchema(Saveable): def __init__( self, type_: Any, fields: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.fields = fields self.type_ = type_ def __eq__(self, other: Any) -> bool: if isinstance(other, RecordSchema): return bool(self.fields == other.fields and self.type_ == other.type_) return False def __hash__(self) -> int: return hash((self.fields, self.type_)) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "RecordSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] fields = None if "fields" in _doc: try: fields = load_field( _doc.get("fields"), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `fields`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("fields")))) _errors__.append( ValidationException( "the `fields` field is not valid because:", SourceLine(_doc, "fields", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `fields` field is not valid because:", SourceLine(_doc, "fields", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `fields`, `type`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( fields=fields, type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.fields is not None: r["fields"] = save( self.fields, top=False, base_url=base_url, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["fields", "type"]) class EnumSchema(Saveable): """ Define an enumerated type. """ def __init__( self, symbols: Any, type_: Any, name: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.name = name self.symbols = symbols self.type_ = type_ def __eq__(self, other: Any) -> bool: if isinstance(other, EnumSchema): return bool( self.name == other.name and self.symbols == other.symbols and self.type_ == other.type_ ) return False def __hash__(self) -> int: return hash((self.name, self.symbols, self.type_)) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "EnumSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) try: if _doc.get("symbols") is None: raise ValidationException("missing required field `symbols`", None, []) symbols = load_field( _doc.get("symbols"), uri_array_of_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("symbols") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `symbols`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("symbols")))) _errors__.append( ValidationException( "the `symbols` field is not valid because:", SourceLine(_doc, "symbols", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `symbols` field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Enum_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `name`, `symbols`, `type`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( name=name, symbols=symbols, type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.symbols is not None: u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["name", "symbols", "type"]) class ArraySchema(Saveable): def __init__( self, items: Any, type_: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.items = items self.type_ = type_ def __eq__(self, other: Any) -> bool: if isinstance(other, ArraySchema): return bool(self.items == other.items and self.type_ == other.type_) return False def __hash__(self) -> int: return hash((self.items, self.type_)) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "ArraySchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] try: if _doc.get("items") is None: raise ValidationException("missing required field `items`", None, []) items = load_field( _doc.get("items"), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("items") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `items`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("items")))) _errors__.append( ValidationException( "the `items` field is not valid because:", SourceLine(_doc, "items", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `items` field is not valid because:", SourceLine(_doc, "items", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Array_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `items`, `type`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( items=items, type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.items is not None: u = save_relative_uri(self.items, base_url, False, 2, relative_uris) r["items"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["items", "type"]) class MapSchema(Saveable): def __init__( self, type_: Any, values: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.type_ = type_ self.values = values def __eq__(self, other: Any) -> bool: if isinstance(other, MapSchema): return bool(self.type_ == other.type_ and self.values == other.values) return False def __hash__(self) -> int: return hash((self.type_, self.values)) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "MapSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Map_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) try: if _doc.get("values") is None: raise ValidationException("missing required field `values`", None, []) values = load_field( _doc.get("values"), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("values") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `values`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("values")))) _errors__.append( ValidationException( "the `values` field is not valid because:", SourceLine(_doc, "values", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `values` field is not valid because:", SourceLine(_doc, "values", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `type`, `values`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( type_=type_, values=values, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=base_url, relative_uris=relative_uris ) if self.values is not None: u = save_relative_uri(self.values, base_url, False, 2, relative_uris) r["values"] = u # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["type", "values"]) class UnionSchema(Saveable): def __init__( self, names: Any, type_: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.names = names self.type_ = type_ def __eq__(self, other: Any) -> bool: if isinstance(other, UnionSchema): return bool(self.names == other.names and self.type_ == other.type_) return False def __hash__(self) -> int: return hash((self.names, self.type_)) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "UnionSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] try: if _doc.get("names") is None: raise ValidationException("missing required field `names`", None, []) names = load_field( _doc.get("names"), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("names") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `names`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("names")))) _errors__.append( ValidationException( "the `names` field is not valid because:", SourceLine(_doc, "names", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `names` field is not valid because:", SourceLine(_doc, "names", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Union_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `names`, `type`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( names=names, type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.names is not None: u = save_relative_uri(self.names, base_url, False, 2, relative_uris) r["names"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["names", "type"]) class JsonldPredicate(Saveable): """ Attached to a record field to define how the parent record field is handled for URI resolution and JSON-LD context generation. """ def __init__( self, _id: Optional[Any] = None, _type: Optional[Any] = None, _container: Optional[Any] = None, identity: Optional[Any] = None, noLinkCheck: Optional[Any] = None, mapSubject: Optional[Any] = None, mapPredicate: Optional[Any] = None, refScope: Optional[Any] = None, typeDSL: Optional[Any] = None, secondaryFilesDSL: Optional[Any] = None, subscope: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self._id = _id self._type = _type self._container = _container self.identity = identity self.noLinkCheck = noLinkCheck self.mapSubject = mapSubject self.mapPredicate = mapPredicate self.refScope = refScope self.typeDSL = typeDSL self.secondaryFilesDSL = secondaryFilesDSL self.subscope = subscope def __eq__(self, other: Any) -> bool: if isinstance(other, JsonldPredicate): return bool( self._id == other._id and self._type == other._type and self._container == other._container and self.identity == other.identity and self.noLinkCheck == other.noLinkCheck and self.mapSubject == other.mapSubject and self.mapPredicate == other.mapPredicate and self.refScope == other.refScope and self.typeDSL == other.typeDSL and self.secondaryFilesDSL == other.secondaryFilesDSL and self.subscope == other.subscope ) return False def __hash__(self) -> int: return hash( ( self._id, self._type, self._container, self.identity, self.noLinkCheck, self.mapSubject, self.mapPredicate, self.refScope, self.typeDSL, self.secondaryFilesDSL, self.subscope, ) ) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "JsonldPredicate": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] _id = None if "_id" in _doc: try: _id = load_field( _doc.get("_id"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("_id") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `_id`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("_id")))) _errors__.append( ValidationException( "the `_id` field is not valid because:", SourceLine(_doc, "_id", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `_id` field is not valid because:", SourceLine(_doc, "_id", str), [e], ) ) _type = None if "_type" in _doc: try: _type = load_field( _doc.get("_type"), union_of_None_type_or_strtype, baseuri, loadingOptions, lc=_doc.get("_type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `_type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("_type")))) _errors__.append( ValidationException( "the `_type` field is not valid because:", SourceLine(_doc, "_type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `_type` field is not valid because:", SourceLine(_doc, "_type", str), [e], ) ) _container = None if "_container" in _doc: try: _container = load_field( _doc.get("_container"), union_of_None_type_or_strtype, baseuri, loadingOptions, lc=_doc.get("_container") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `_container`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("_container")))) _errors__.append( ValidationException( "the `_container` field is not valid because:", SourceLine(_doc, "_container", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `_container` field is not valid because:", SourceLine(_doc, "_container", str), [e], ) ) identity = None if "identity" in _doc: try: identity = load_field( _doc.get("identity"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("identity") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `identity`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("identity")))) _errors__.append( ValidationException( "the `identity` field is not valid because:", SourceLine(_doc, "identity", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `identity` field is not valid because:", SourceLine(_doc, "identity", str), [e], ) ) noLinkCheck = None if "noLinkCheck" in _doc: try: noLinkCheck = load_field( _doc.get("noLinkCheck"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("noLinkCheck") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `noLinkCheck`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("noLinkCheck")))) _errors__.append( ValidationException( "the `noLinkCheck` field is not valid because:", SourceLine(_doc, "noLinkCheck", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `noLinkCheck` field is not valid because:", SourceLine(_doc, "noLinkCheck", str), [e], ) ) mapSubject = None if "mapSubject" in _doc: try: mapSubject = load_field( _doc.get("mapSubject"), union_of_None_type_or_strtype, baseuri, loadingOptions, lc=_doc.get("mapSubject") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `mapSubject`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("mapSubject")))) _errors__.append( ValidationException( "the `mapSubject` field is not valid because:", SourceLine(_doc, "mapSubject", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `mapSubject` field is not valid because:", SourceLine(_doc, "mapSubject", str), [e], ) ) mapPredicate = None if "mapPredicate" in _doc: try: mapPredicate = load_field( _doc.get("mapPredicate"), union_of_None_type_or_strtype, baseuri, loadingOptions, lc=_doc.get("mapPredicate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `mapPredicate`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("mapPredicate")))) _errors__.append( ValidationException( "the `mapPredicate` field is not valid because:", SourceLine(_doc, "mapPredicate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `mapPredicate` field is not valid because:", SourceLine(_doc, "mapPredicate", str), [e], ) ) refScope = None if "refScope" in _doc: try: refScope = load_field( _doc.get("refScope"), union_of_None_type_or_inttype, baseuri, loadingOptions, lc=_doc.get("refScope") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `refScope`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("refScope")))) _errors__.append( ValidationException( "the `refScope` field is not valid because:", SourceLine(_doc, "refScope", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `refScope` field is not valid because:", SourceLine(_doc, "refScope", str), [e], ) ) typeDSL = None if "typeDSL" in _doc: try: typeDSL = load_field( _doc.get("typeDSL"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("typeDSL") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `typeDSL`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("typeDSL")))) _errors__.append( ValidationException( "the `typeDSL` field is not valid because:", SourceLine(_doc, "typeDSL", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `typeDSL` field is not valid because:", SourceLine(_doc, "typeDSL", str), [e], ) ) secondaryFilesDSL = None if "secondaryFilesDSL" in _doc: try: secondaryFilesDSL = load_field( _doc.get("secondaryFilesDSL"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("secondaryFilesDSL") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `secondaryFilesDSL`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("secondaryFilesDSL")))) _errors__.append( ValidationException( "the `secondaryFilesDSL` field is not valid because:", SourceLine(_doc, "secondaryFilesDSL", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `secondaryFilesDSL` field is not valid because:", SourceLine(_doc, "secondaryFilesDSL", str), [e], ) ) subscope = None if "subscope" in _doc: try: subscope = load_field( _doc.get("subscope"), union_of_None_type_or_strtype, baseuri, loadingOptions, lc=_doc.get("subscope") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `subscope`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("subscope")))) _errors__.append( ValidationException( "the `subscope` field is not valid because:", SourceLine(_doc, "subscope", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `subscope` field is not valid because:", SourceLine(_doc, "subscope", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `_id`, `_type`, `_container`, `identity`, `noLinkCheck`, `mapSubject`, `mapPredicate`, `refScope`, `typeDSL`, `secondaryFilesDSL`, `subscope`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( _id=_id, _type=_type, _container=_container, identity=identity, noLinkCheck=noLinkCheck, mapSubject=mapSubject, mapPredicate=mapPredicate, refScope=refScope, typeDSL=typeDSL, secondaryFilesDSL=secondaryFilesDSL, subscope=subscope, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self._id is not None: u = save_relative_uri(self._id, base_url, True, None, relative_uris) r["_id"] = u if self._type is not None: r["_type"] = save( self._type, top=False, base_url=base_url, relative_uris=relative_uris ) if self._container is not None: r["_container"] = save( self._container, top=False, base_url=base_url, relative_uris=relative_uris, ) if self.identity is not None: r["identity"] = save( self.identity, top=False, base_url=base_url, relative_uris=relative_uris ) if self.noLinkCheck is not None: r["noLinkCheck"] = save( self.noLinkCheck, top=False, base_url=base_url, relative_uris=relative_uris, ) if self.mapSubject is not None: r["mapSubject"] = save( self.mapSubject, top=False, base_url=base_url, relative_uris=relative_uris, ) if self.mapPredicate is not None: r["mapPredicate"] = save( self.mapPredicate, top=False, base_url=base_url, relative_uris=relative_uris, ) if self.refScope is not None: r["refScope"] = save( self.refScope, top=False, base_url=base_url, relative_uris=relative_uris ) if self.typeDSL is not None: r["typeDSL"] = save( self.typeDSL, top=False, base_url=base_url, relative_uris=relative_uris ) if self.secondaryFilesDSL is not None: r["secondaryFilesDSL"] = save( self.secondaryFilesDSL, top=False, base_url=base_url, relative_uris=relative_uris, ) if self.subscope is not None: r["subscope"] = save( self.subscope, top=False, base_url=base_url, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset( [ "_id", "_type", "_container", "identity", "noLinkCheck", "mapSubject", "mapPredicate", "refScope", "typeDSL", "secondaryFilesDSL", "subscope", ] ) class SpecializeDef(Saveable): def __init__( self, specializeFrom: Any, specializeTo: Any, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.specializeFrom = specializeFrom self.specializeTo = specializeTo def __eq__(self, other: Any) -> bool: if isinstance(other, SpecializeDef): return bool( self.specializeFrom == other.specializeFrom and self.specializeTo == other.specializeTo ) return False def __hash__(self) -> int: return hash((self.specializeFrom, self.specializeTo)) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "SpecializeDef": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] try: if _doc.get("specializeFrom") is None: raise ValidationException("missing required field `specializeFrom`", None, []) specializeFrom = load_field( _doc.get("specializeFrom"), uri_strtype_False_False_1_None, baseuri, loadingOptions, lc=_doc.get("specializeFrom") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `specializeFrom`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("specializeFrom")))) _errors__.append( ValidationException( "the `specializeFrom` field is not valid because:", SourceLine(_doc, "specializeFrom", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `specializeFrom` field is not valid because:", SourceLine(_doc, "specializeFrom", str), [e], ) ) try: if _doc.get("specializeTo") is None: raise ValidationException("missing required field `specializeTo`", None, []) specializeTo = load_field( _doc.get("specializeTo"), uri_strtype_False_False_1_None, baseuri, loadingOptions, lc=_doc.get("specializeTo") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `specializeTo`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("specializeTo")))) _errors__.append( ValidationException( "the `specializeTo` field is not valid because:", SourceLine(_doc, "specializeTo", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `specializeTo` field is not valid because:", SourceLine(_doc, "specializeTo", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `specializeFrom`, `specializeTo`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( specializeFrom=specializeFrom, specializeTo=specializeTo, extension_fields=extension_fields, loadingOptions=loadingOptions, ) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.specializeFrom is not None: u = save_relative_uri( self.specializeFrom, base_url, False, 1, relative_uris ) r["specializeFrom"] = u if self.specializeTo is not None: u = save_relative_uri(self.specializeTo, base_url, False, 1, relative_uris) r["specializeTo"] = u # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["specializeFrom", "specializeTo"]) class NamedType(Saveable): pass class DocType(Documented): pass class SchemaDefinedType(DocType): """ Abstract base for schema-defined types. """ pass class SaladRecordField(RecordField): """ A field of a record. """ def __init__( self, name: Any, type_: Any, doc: Optional[Any] = None, jsonldPredicate: Optional[Any] = None, default: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.doc = doc self.name = name self.type_ = type_ self.jsonldPredicate = jsonldPredicate self.default = default def __eq__(self, other: Any) -> bool: if isinstance(other, SaladRecordField): return bool( self.doc == other.doc and self.name == other.name and self.type_ == other.type_ and self.jsonldPredicate == other.jsonldPredicate and self.default == other.default ) return False def __hash__(self) -> int: return hash( (self.doc, self.name, self.type_, self.jsonldPredicate, self.default) ) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "SaladRecordField": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: _errors__.append(ValidationException("missing name")) if not __original_name_is_none: baseuri = cast(str, name) doc = None if "doc" in _doc: try: doc = load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("doc")))) _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) jsonldPredicate = None if "jsonldPredicate" in _doc: try: jsonldPredicate = load_field( _doc.get("jsonldPredicate"), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions, lc=_doc.get("jsonldPredicate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `jsonldPredicate`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("jsonldPredicate")))) _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [e], ) ) default = None if "default" in _doc: try: default = load_field( _doc.get("default"), union_of_None_type_or_Any_type, baseuri, loadingOptions, lc=_doc.get("default") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `default`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("default")))) _errors__.append( ValidationException( "the `default` field is not valid because:", SourceLine(_doc, "default", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `default` field is not valid because:", SourceLine(_doc, "default", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `doc`, `name`, `type`, `jsonldPredicate`, `default`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( doc=doc, name=name, type_=type_, jsonldPredicate=jsonldPredicate, default=default, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) if self.jsonldPredicate is not None: r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, base_url=self.name, relative_uris=relative_uris, ) if self.default is not None: r["default"] = save( self.default, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset(["doc", "name", "type", "jsonldPredicate", "default"]) class SaladRecordSchema(NamedType, RecordSchema, SchemaDefinedType): def __init__( self, name: Any, type_: Any, inVocab: Optional[Any] = None, fields: Optional[Any] = None, doc: Optional[Any] = None, docParent: Optional[Any] = None, docChild: Optional[Any] = None, docAfter: Optional[Any] = None, jsonldPredicate: Optional[Any] = None, documentRoot: Optional[Any] = None, abstract: Optional[Any] = None, extends: Optional[Any] = None, specialize: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.name = name self.inVocab = inVocab self.fields = fields self.type_ = type_ self.doc = doc self.docParent = docParent self.docChild = docChild self.docAfter = docAfter self.jsonldPredicate = jsonldPredicate self.documentRoot = documentRoot self.abstract = abstract self.extends = extends self.specialize = specialize def __eq__(self, other: Any) -> bool: if isinstance(other, SaladRecordSchema): return bool( self.name == other.name and self.inVocab == other.inVocab and self.fields == other.fields and self.type_ == other.type_ and self.doc == other.doc and self.docParent == other.docParent and self.docChild == other.docChild and self.docAfter == other.docAfter and self.jsonldPredicate == other.jsonldPredicate and self.documentRoot == other.documentRoot and self.abstract == other.abstract and self.extends == other.extends and self.specialize == other.specialize ) return False def __hash__(self) -> int: return hash( ( self.name, self.inVocab, self.fields, self.type_, self.doc, self.docParent, self.docChild, self.docAfter, self.jsonldPredicate, self.documentRoot, self.abstract, self.extends, self.specialize, ) ) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "SaladRecordSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: _errors__.append(ValidationException("missing name")) if not __original_name_is_none: baseuri = cast(str, name) inVocab = None if "inVocab" in _doc: try: inVocab = load_field( _doc.get("inVocab"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("inVocab") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `inVocab`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("inVocab")))) _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [e], ) ) fields = None if "fields" in _doc: try: fields = load_field( _doc.get("fields"), idmap_fields_union_of_None_type_or_array_of_SaladRecordFieldLoader, baseuri, loadingOptions, lc=_doc.get("fields") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `fields`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("fields")))) _errors__.append( ValidationException( "the `fields` field is not valid because:", SourceLine(_doc, "fields", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `fields` field is not valid because:", SourceLine(_doc, "fields", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Record_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) doc = None if "doc" in _doc: try: doc = load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("doc")))) _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [e], ) ) docParent = None if "docParent" in _doc: try: docParent = load_field( _doc.get("docParent"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docParent") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docParent`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docParent")))) _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [e], ) ) docChild = None if "docChild" in _doc: try: docChild = load_field( _doc.get("docChild"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docChild") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docChild`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docChild")))) _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [e], ) ) docAfter = None if "docAfter" in _doc: try: docAfter = load_field( _doc.get("docAfter"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docAfter") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docAfter`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docAfter")))) _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [e], ) ) jsonldPredicate = None if "jsonldPredicate" in _doc: try: jsonldPredicate = load_field( _doc.get("jsonldPredicate"), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions, lc=_doc.get("jsonldPredicate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `jsonldPredicate`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("jsonldPredicate")))) _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [e], ) ) documentRoot = None if "documentRoot" in _doc: try: documentRoot = load_field( _doc.get("documentRoot"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("documentRoot") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `documentRoot`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("documentRoot")))) _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [e], ) ) abstract = None if "abstract" in _doc: try: abstract = load_field( _doc.get("abstract"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("abstract") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `abstract`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("abstract")))) _errors__.append( ValidationException( "the `abstract` field is not valid because:", SourceLine(_doc, "abstract", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `abstract` field is not valid because:", SourceLine(_doc, "abstract", str), [e], ) ) extends = None if "extends" in _doc: try: extends = load_field( _doc.get("extends"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, baseuri, loadingOptions, lc=_doc.get("extends") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `extends`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("extends")))) _errors__.append( ValidationException( "the `extends` field is not valid because:", SourceLine(_doc, "extends", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `extends` field is not valid because:", SourceLine(_doc, "extends", str), [e], ) ) specialize = None if "specialize" in _doc: try: specialize = load_field( _doc.get("specialize"), idmap_specialize_union_of_None_type_or_array_of_SpecializeDefLoader, baseuri, loadingOptions, lc=_doc.get("specialize") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `specialize`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("specialize")))) _errors__.append( ValidationException( "the `specialize` field is not valid because:", SourceLine(_doc, "specialize", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `specialize` field is not valid because:", SourceLine(_doc, "specialize", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `name`, `inVocab`, `fields`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `abstract`, `extends`, `specialize`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( name=name, inVocab=inVocab, fields=fields, type_=type_, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, jsonldPredicate=jsonldPredicate, documentRoot=documentRoot, abstract=abstract, extends=extends, specialize=specialize, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.inVocab is not None: r["inVocab"] = save( self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris ) if self.fields is not None: r["fields"] = save( self.fields, top=False, base_url=self.name, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u if self.jsonldPredicate is not None: r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, base_url=self.name, relative_uris=relative_uris, ) if self.documentRoot is not None: r["documentRoot"] = save( self.documentRoot, top=False, base_url=self.name, relative_uris=relative_uris, ) if self.abstract is not None: r["abstract"] = save( self.abstract, top=False, base_url=self.name, relative_uris=relative_uris, ) if self.extends is not None: u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) r["extends"] = u if self.specialize is not None: r["specialize"] = save( self.specialize, top=False, base_url=self.name, relative_uris=relative_uris, ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset( [ "name", "inVocab", "fields", "type", "doc", "docParent", "docChild", "docAfter", "jsonldPredicate", "documentRoot", "abstract", "extends", "specialize", ] ) class SaladEnumSchema(NamedType, EnumSchema, SchemaDefinedType): """ Define an enumerated type. """ def __init__( self, symbols: Any, type_: Any, name: Optional[Any] = None, inVocab: Optional[Any] = None, doc: Optional[Any] = None, docParent: Optional[Any] = None, docChild: Optional[Any] = None, docAfter: Optional[Any] = None, jsonldPredicate: Optional[Any] = None, documentRoot: Optional[Any] = None, extends: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.name = name self.inVocab = inVocab self.symbols = symbols self.type_ = type_ self.doc = doc self.docParent = docParent self.docChild = docChild self.docAfter = docAfter self.jsonldPredicate = jsonldPredicate self.documentRoot = documentRoot self.extends = extends def __eq__(self, other: Any) -> bool: if isinstance(other, SaladEnumSchema): return bool( self.name == other.name and self.inVocab == other.inVocab and self.symbols == other.symbols and self.type_ == other.type_ and self.doc == other.doc and self.docParent == other.docParent and self.docChild == other.docChild and self.docAfter == other.docAfter and self.jsonldPredicate == other.jsonldPredicate and self.documentRoot == other.documentRoot and self.extends == other.extends ) return False def __hash__(self) -> int: return hash( ( self.name, self.inVocab, self.symbols, self.type_, self.doc, self.docParent, self.docChild, self.docAfter, self.jsonldPredicate, self.documentRoot, self.extends, ) ) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "SaladEnumSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_union_of_None_type_or_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: name = "_:" + str(_uuid__.uuid4()) if not __original_name_is_none: baseuri = cast(str, name) inVocab = None if "inVocab" in _doc: try: inVocab = load_field( _doc.get("inVocab"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("inVocab") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `inVocab`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("inVocab")))) _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [e], ) ) try: if _doc.get("symbols") is None: raise ValidationException("missing required field `symbols`", None, []) symbols = load_field( _doc.get("symbols"), uri_array_of_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("symbols") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `symbols`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("symbols")))) _errors__.append( ValidationException( "the `symbols` field is not valid because:", SourceLine(_doc, "symbols", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `symbols` field is not valid because:", SourceLine(_doc, "symbols", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Enum_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) doc = None if "doc" in _doc: try: doc = load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("doc")))) _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [e], ) ) docParent = None if "docParent" in _doc: try: docParent = load_field( _doc.get("docParent"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docParent") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docParent`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docParent")))) _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [e], ) ) docChild = None if "docChild" in _doc: try: docChild = load_field( _doc.get("docChild"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docChild") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docChild`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docChild")))) _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [e], ) ) docAfter = None if "docAfter" in _doc: try: docAfter = load_field( _doc.get("docAfter"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docAfter") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docAfter`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docAfter")))) _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [e], ) ) jsonldPredicate = None if "jsonldPredicate" in _doc: try: jsonldPredicate = load_field( _doc.get("jsonldPredicate"), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions, lc=_doc.get("jsonldPredicate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `jsonldPredicate`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("jsonldPredicate")))) _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [e], ) ) documentRoot = None if "documentRoot" in _doc: try: documentRoot = load_field( _doc.get("documentRoot"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("documentRoot") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `documentRoot`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("documentRoot")))) _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [e], ) ) extends = None if "extends" in _doc: try: extends = load_field( _doc.get("extends"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None, baseuri, loadingOptions, lc=_doc.get("extends") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `extends`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("extends")))) _errors__.append( ValidationException( "the `extends` field is not valid because:", SourceLine(_doc, "extends", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `extends` field is not valid because:", SourceLine(_doc, "extends", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `name`, `inVocab`, `symbols`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`, `extends`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( name=name, inVocab=inVocab, symbols=symbols, type_=type_, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, jsonldPredicate=jsonldPredicate, documentRoot=documentRoot, extends=extends, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.inVocab is not None: r["inVocab"] = save( self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris ) if self.symbols is not None: u = save_relative_uri(self.symbols, self.name, True, None, relative_uris) r["symbols"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u if self.jsonldPredicate is not None: r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, base_url=self.name, relative_uris=relative_uris, ) if self.documentRoot is not None: r["documentRoot"] = save( self.documentRoot, top=False, base_url=self.name, relative_uris=relative_uris, ) if self.extends is not None: u = save_relative_uri(self.extends, self.name, False, 1, relative_uris) r["extends"] = u # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset( [ "name", "inVocab", "symbols", "type", "doc", "docParent", "docChild", "docAfter", "jsonldPredicate", "documentRoot", "extends", ] ) class SaladMapSchema(NamedType, MapSchema, SchemaDefinedType): """ Define a map type. """ def __init__( self, name: Any, type_: Any, values: Any, inVocab: Optional[Any] = None, doc: Optional[Any] = None, docParent: Optional[Any] = None, docChild: Optional[Any] = None, docAfter: Optional[Any] = None, jsonldPredicate: Optional[Any] = None, documentRoot: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.name = name self.inVocab = inVocab self.type_ = type_ self.values = values self.doc = doc self.docParent = docParent self.docChild = docChild self.docAfter = docAfter self.jsonldPredicate = jsonldPredicate self.documentRoot = documentRoot def __eq__(self, other: Any) -> bool: if isinstance(other, SaladMapSchema): return bool( self.name == other.name and self.inVocab == other.inVocab and self.type_ == other.type_ and self.values == other.values and self.doc == other.doc and self.docParent == other.docParent and self.docChild == other.docChild and self.docAfter == other.docAfter and self.jsonldPredicate == other.jsonldPredicate and self.documentRoot == other.documentRoot ) return False def __hash__(self) -> int: return hash( ( self.name, self.inVocab, self.type_, self.values, self.doc, self.docParent, self.docChild, self.docAfter, self.jsonldPredicate, self.documentRoot, ) ) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "SaladMapSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: _errors__.append(ValidationException("missing name")) if not __original_name_is_none: baseuri = cast(str, name) inVocab = None if "inVocab" in _doc: try: inVocab = load_field( _doc.get("inVocab"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("inVocab") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `inVocab`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("inVocab")))) _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Map_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) try: if _doc.get("values") is None: raise ValidationException("missing required field `values`", None, []) values = load_field( _doc.get("values"), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("values") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `values`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("values")))) _errors__.append( ValidationException( "the `values` field is not valid because:", SourceLine(_doc, "values", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `values` field is not valid because:", SourceLine(_doc, "values", str), [e], ) ) doc = None if "doc" in _doc: try: doc = load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("doc")))) _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [e], ) ) docParent = None if "docParent" in _doc: try: docParent = load_field( _doc.get("docParent"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docParent") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docParent`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docParent")))) _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [e], ) ) docChild = None if "docChild" in _doc: try: docChild = load_field( _doc.get("docChild"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docChild") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docChild`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docChild")))) _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [e], ) ) docAfter = None if "docAfter" in _doc: try: docAfter = load_field( _doc.get("docAfter"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docAfter") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docAfter`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docAfter")))) _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [e], ) ) jsonldPredicate = None if "jsonldPredicate" in _doc: try: jsonldPredicate = load_field( _doc.get("jsonldPredicate"), union_of_None_type_or_strtype_or_JsonldPredicateLoader, baseuri, loadingOptions, lc=_doc.get("jsonldPredicate") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `jsonldPredicate`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("jsonldPredicate")))) _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `jsonldPredicate` field is not valid because:", SourceLine(_doc, "jsonldPredicate", str), [e], ) ) documentRoot = None if "documentRoot" in _doc: try: documentRoot = load_field( _doc.get("documentRoot"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("documentRoot") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `documentRoot`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("documentRoot")))) _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `name`, `inVocab`, `type`, `values`, `doc`, `docParent`, `docChild`, `docAfter`, `jsonldPredicate`, `documentRoot`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( name=name, inVocab=inVocab, type_=type_, values=values, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, jsonldPredicate=jsonldPredicate, documentRoot=documentRoot, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.inVocab is not None: r["inVocab"] = save( self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris ) if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) if self.values is not None: u = save_relative_uri(self.values, self.name, False, 2, relative_uris) r["values"] = u if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u if self.jsonldPredicate is not None: r["jsonldPredicate"] = save( self.jsonldPredicate, top=False, base_url=self.name, relative_uris=relative_uris, ) if self.documentRoot is not None: r["documentRoot"] = save( self.documentRoot, top=False, base_url=self.name, relative_uris=relative_uris, ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset( [ "name", "inVocab", "type", "values", "doc", "docParent", "docChild", "docAfter", "jsonldPredicate", "documentRoot", ] ) class SaladUnionSchema(NamedType, UnionSchema, DocType): """ Define a union type. """ def __init__( self, name: Any, names: Any, type_: Any, inVocab: Optional[Any] = None, doc: Optional[Any] = None, docParent: Optional[Any] = None, docChild: Optional[Any] = None, docAfter: Optional[Any] = None, documentRoot: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.name = name self.inVocab = inVocab self.names = names self.type_ = type_ self.doc = doc self.docParent = docParent self.docChild = docChild self.docAfter = docAfter self.documentRoot = documentRoot def __eq__(self, other: Any) -> bool: if isinstance(other, SaladUnionSchema): return bool( self.name == other.name and self.inVocab == other.inVocab and self.names == other.names and self.type_ == other.type_ and self.doc == other.doc and self.docParent == other.docParent and self.docChild == other.docChild and self.docAfter == other.docAfter and self.documentRoot == other.documentRoot ) return False def __hash__(self) -> int: return hash( ( self.name, self.inVocab, self.names, self.type_, self.doc, self.docParent, self.docChild, self.docAfter, self.documentRoot, ) ) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "SaladUnionSchema": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: _errors__.append(ValidationException("missing name")) if not __original_name_is_none: baseuri = cast(str, name) inVocab = None if "inVocab" in _doc: try: inVocab = load_field( _doc.get("inVocab"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("inVocab") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `inVocab`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("inVocab")))) _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [e], ) ) try: if _doc.get("names") is None: raise ValidationException("missing required field `names`", None, []) names = load_field( _doc.get("names"), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None, baseuri, loadingOptions, lc=_doc.get("names") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `names`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("names")))) _errors__.append( ValidationException( "the `names` field is not valid because:", SourceLine(_doc, "names", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `names` field is not valid because:", SourceLine(_doc, "names", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Union_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) doc = None if "doc" in _doc: try: doc = load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("doc")))) _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [e], ) ) docParent = None if "docParent" in _doc: try: docParent = load_field( _doc.get("docParent"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docParent") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docParent`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docParent")))) _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [e], ) ) docChild = None if "docChild" in _doc: try: docChild = load_field( _doc.get("docChild"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docChild") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docChild`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docChild")))) _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [e], ) ) docAfter = None if "docAfter" in _doc: try: docAfter = load_field( _doc.get("docAfter"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docAfter") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docAfter`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docAfter")))) _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [e], ) ) documentRoot = None if "documentRoot" in _doc: try: documentRoot = load_field( _doc.get("documentRoot"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("documentRoot") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `documentRoot`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("documentRoot")))) _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `documentRoot` field is not valid because:", SourceLine(_doc, "documentRoot", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `name`, `inVocab`, `names`, `type`, `doc`, `docParent`, `docChild`, `docAfter`, `documentRoot`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( name=name, inVocab=inVocab, names=names, type_=type_, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, documentRoot=documentRoot, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.inVocab is not None: r["inVocab"] = save( self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris ) if self.names is not None: u = save_relative_uri(self.names, self.name, False, 2, relative_uris) r["names"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u if self.documentRoot is not None: r["documentRoot"] = save( self.documentRoot, top=False, base_url=self.name, relative_uris=relative_uris, ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset( [ "name", "inVocab", "names", "type", "doc", "docParent", "docChild", "docAfter", "documentRoot", ] ) class Documentation(NamedType, DocType): """ A documentation section. This type exists to facilitate self-documenting schemas but has no role in formal validation. """ def __init__( self, name: Any, type_: Any, inVocab: Optional[Any] = None, doc: Optional[Any] = None, docParent: Optional[Any] = None, docChild: Optional[Any] = None, docAfter: Optional[Any] = None, extension_fields: Optional[Dict[str, Any]] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> None: if extension_fields: self.extension_fields = extension_fields else: self.extension_fields = CommentedMap() if loadingOptions: self.loadingOptions = loadingOptions else: self.loadingOptions = LoadingOptions() self.name = name self.inVocab = inVocab self.doc = doc self.docParent = docParent self.docChild = docChild self.docAfter = docAfter self.type_ = type_ def __eq__(self, other: Any) -> bool: if isinstance(other, Documentation): return bool( self.name == other.name and self.inVocab == other.inVocab and self.doc == other.doc and self.docParent == other.docParent and self.docChild == other.docChild and self.docAfter == other.docAfter and self.type_ == other.type_ ) return False def __hash__(self) -> int: return hash( ( self.name, self.inVocab, self.doc, self.docParent, self.docChild, self.docAfter, self.type_, ) ) @classmethod def fromDoc( cls, doc: Any, baseuri: str, loadingOptions: LoadingOptions, docRoot: Optional[str] = None ) -> "Documentation": _doc = copy.copy(doc) if hasattr(doc, "lc"): _doc.lc.data = doc.lc.data _doc.lc.filename = doc.lc.filename _errors__ = [] name = None if "name" in _doc: try: name = load_field( _doc.get("name"), uri_strtype_True_False_None_None, baseuri, loadingOptions, lc=_doc.get("name") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `name`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("name")))) _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `name` field is not valid because:", SourceLine(_doc, "name", str), [e], ) ) __original_name_is_none = name is None if name is None: if docRoot is not None: name = docRoot else: _errors__.append(ValidationException("missing name")) if not __original_name_is_none: baseuri = cast(str, name) inVocab = None if "inVocab" in _doc: try: inVocab = load_field( _doc.get("inVocab"), union_of_None_type_or_booltype, baseuri, loadingOptions, lc=_doc.get("inVocab") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `inVocab`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("inVocab")))) _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `inVocab` field is not valid because:", SourceLine(_doc, "inVocab", str), [e], ) ) doc = None if "doc" in _doc: try: doc = load_field( _doc.get("doc"), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions, lc=_doc.get("doc") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `doc`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("doc")))) _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `doc` field is not valid because:", SourceLine(_doc, "doc", str), [e], ) ) docParent = None if "docParent" in _doc: try: docParent = load_field( _doc.get("docParent"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docParent") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docParent`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docParent")))) _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docParent` field is not valid because:", SourceLine(_doc, "docParent", str), [e], ) ) docChild = None if "docChild" in _doc: try: docChild = load_field( _doc.get("docChild"), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docChild") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docChild`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docChild")))) _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docChild` field is not valid because:", SourceLine(_doc, "docChild", str), [e], ) ) docAfter = None if "docAfter" in _doc: try: docAfter = load_field( _doc.get("docAfter"), uri_union_of_None_type_or_strtype_False_False_None_None, baseuri, loadingOptions, lc=_doc.get("docAfter") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `docAfter`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("docAfter")))) _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `docAfter` field is not valid because:", SourceLine(_doc, "docAfter", str), [e], ) ) try: if _doc.get("type") is None: raise ValidationException("missing required field `type`", None, []) type_ = load_field( _doc.get("type"), typedsl_Documentation_nameLoader_2, baseuri, loadingOptions, lc=_doc.get("type") ) except ValidationException as e: error_message, to_print, verb_tensage = parse_errors(str(e)) if str(e) == "missing required field `type`": _errors__.append( ValidationException( str(e), None ) ) else: if error_message != str(e): val_type = convert_typing(extract_type(type(_doc.get("type")))) _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [ValidationException(f"Value is a {val_type}, " f"but valid {to_print} for this field " f"{verb_tensage} {error_message}")], ) ) else: _errors__.append( ValidationException( "the `type` field is not valid because:", SourceLine(_doc, "type", str), [e], ) ) extension_fields: Dict[str, Any] = {} for k in _doc.keys(): if k not in cls.attrs: if not k: _errors__.append( ValidationException("mapping with implicit null key") ) elif ":" in k: ex = expand_url( k, "", loadingOptions, scoped_id=False, vocab_term=False ) extension_fields[ex] = _doc[k] else: _errors__.append( ValidationException( "invalid field `{}`, expected one of: `name`, `inVocab`, `doc`, `docParent`, `docChild`, `docAfter`, `type`".format( k ), SourceLine(_doc, k, str), ) ) if _errors__: raise ValidationException("", None, _errors__, "*") _constructed = cls( name=name, inVocab=inVocab, doc=doc, docParent=docParent, docChild=docChild, docAfter=docAfter, type_=type_, extension_fields=extension_fields, loadingOptions=loadingOptions, ) loadingOptions.idx[cast(str, name)] = (_constructed, loadingOptions) return _constructed def save( self, top: bool = False, base_url: str = "", relative_uris: bool = True ) -> Dict[str, Any]: r: Dict[str, Any] = {} if relative_uris: for ef in self.extension_fields: r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] else: for ef in self.extension_fields: r[ef] = self.extension_fields[ef] if self.name is not None: u = save_relative_uri(self.name, base_url, True, None, relative_uris) r["name"] = u if self.inVocab is not None: r["inVocab"] = save( self.inVocab, top=False, base_url=self.name, relative_uris=relative_uris ) if self.doc is not None: r["doc"] = save( self.doc, top=False, base_url=self.name, relative_uris=relative_uris ) if self.docParent is not None: u = save_relative_uri(self.docParent, self.name, False, None, relative_uris) r["docParent"] = u if self.docChild is not None: u = save_relative_uri(self.docChild, self.name, False, None, relative_uris) r["docChild"] = u if self.docAfter is not None: u = save_relative_uri(self.docAfter, self.name, False, None, relative_uris) r["docAfter"] = u if self.type_ is not None: r["type"] = save( self.type_, top=False, base_url=self.name, relative_uris=relative_uris ) # top refers to the directory level if top: if self.loadingOptions.namespaces: r["$namespaces"] = self.loadingOptions.namespaces if self.loadingOptions.schemas: r["$schemas"] = self.loadingOptions.schemas return r attrs = frozenset( ["name", "inVocab", "doc", "docParent", "docChild", "docAfter", "type"] ) _vocab = { "Any": "https://w3id.org/cwl/salad#Any", "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", "DocType": "https://w3id.org/cwl/salad#DocType", "Documentation": "https://w3id.org/cwl/salad#Documentation", "Documented": "https://w3id.org/cwl/salad#Documented", "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", "JsonldPredicate": "https://w3id.org/cwl/salad#JsonldPredicate", "MapSchema": "https://w3id.org/cwl/salad#MapSchema", "NamedType": "https://w3id.org/cwl/salad#NamedType", "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", "RecordField": "https://w3id.org/cwl/salad#RecordField", "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", "SaladEnumSchema": "https://w3id.org/cwl/salad#SaladEnumSchema", "SaladMapSchema": "https://w3id.org/cwl/salad#SaladMapSchema", "SaladRecordField": "https://w3id.org/cwl/salad#SaladRecordField", "SaladRecordSchema": "https://w3id.org/cwl/salad#SaladRecordSchema", "SaladUnionSchema": "https://w3id.org/cwl/salad#SaladUnionSchema", "SchemaDefinedType": "https://w3id.org/cwl/salad#SchemaDefinedType", "SpecializeDef": "https://w3id.org/cwl/salad#SpecializeDef", "UnionSchema": "https://w3id.org/cwl/salad#UnionSchema", "array": "https://w3id.org/cwl/salad#array", "boolean": "http://www.w3.org/2001/XMLSchema#boolean", "documentation": "https://w3id.org/cwl/salad#documentation", "double": "http://www.w3.org/2001/XMLSchema#double", "enum": "https://w3id.org/cwl/salad#enum", "float": "http://www.w3.org/2001/XMLSchema#float", "int": "http://www.w3.org/2001/XMLSchema#int", "long": "http://www.w3.org/2001/XMLSchema#long", "map": "https://w3id.org/cwl/salad#map", "null": "https://w3id.org/cwl/salad#null", "record": "https://w3id.org/cwl/salad#record", "string": "http://www.w3.org/2001/XMLSchema#string", "union": "https://w3id.org/cwl/salad#union", } _rvocab = { "https://w3id.org/cwl/salad#Any": "Any", "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", "https://w3id.org/cwl/salad#DocType": "DocType", "https://w3id.org/cwl/salad#Documentation": "Documentation", "https://w3id.org/cwl/salad#Documented": "Documented", "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", "https://w3id.org/cwl/salad#JsonldPredicate": "JsonldPredicate", "https://w3id.org/cwl/salad#MapSchema": "MapSchema", "https://w3id.org/cwl/salad#NamedType": "NamedType", "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", "https://w3id.org/cwl/salad#RecordField": "RecordField", "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", "https://w3id.org/cwl/salad#SaladEnumSchema": "SaladEnumSchema", "https://w3id.org/cwl/salad#SaladMapSchema": "SaladMapSchema", "https://w3id.org/cwl/salad#SaladRecordField": "SaladRecordField", "https://w3id.org/cwl/salad#SaladRecordSchema": "SaladRecordSchema", "https://w3id.org/cwl/salad#SaladUnionSchema": "SaladUnionSchema", "https://w3id.org/cwl/salad#SchemaDefinedType": "SchemaDefinedType", "https://w3id.org/cwl/salad#SpecializeDef": "SpecializeDef", "https://w3id.org/cwl/salad#UnionSchema": "UnionSchema", "https://w3id.org/cwl/salad#array": "array", "http://www.w3.org/2001/XMLSchema#boolean": "boolean", "https://w3id.org/cwl/salad#documentation": "documentation", "http://www.w3.org/2001/XMLSchema#double": "double", "https://w3id.org/cwl/salad#enum": "enum", "http://www.w3.org/2001/XMLSchema#float": "float", "http://www.w3.org/2001/XMLSchema#int": "int", "http://www.w3.org/2001/XMLSchema#long": "long", "https://w3id.org/cwl/salad#map": "map", "https://w3id.org/cwl/salad#null": "null", "https://w3id.org/cwl/salad#record": "record", "http://www.w3.org/2001/XMLSchema#string": "string", "https://w3id.org/cwl/salad#union": "union", } strtype = _PrimitiveLoader(str) inttype = _PrimitiveLoader(int) floattype = _PrimitiveLoader(float) booltype = _PrimitiveLoader(bool) None_type = _PrimitiveLoader(type(None)) Any_type = _AnyLoader() PrimitiveTypeLoader = _EnumLoader( ( "null", "boolean", "int", "long", "float", "double", "string", ), "PrimitiveType", ) """ Names of salad data types (based on Avro schema declarations). Refer to the [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for detailed information. null: no value boolean: a binary value int: 32-bit signed integer long: 64-bit signed integer float: single precision (32-bit) IEEE 754 floating-point number double: double precision (64-bit) IEEE 754 floating-point number string: Unicode character sequence """ AnyLoader = _EnumLoader(("Any",), "Any") """ The **Any** type validates for any non-null value. """ RecordFieldLoader = _RecordLoader(RecordField, None, None) RecordSchemaLoader = _RecordLoader(RecordSchema, None, None) EnumSchemaLoader = _RecordLoader(EnumSchema, None, None) ArraySchemaLoader = _RecordLoader(ArraySchema, None, None) MapSchemaLoader = _RecordLoader(MapSchema, None, None) UnionSchemaLoader = _RecordLoader(UnionSchema, None, None) JsonldPredicateLoader = _RecordLoader(JsonldPredicate, None, None) SpecializeDefLoader = _RecordLoader(SpecializeDef, None, None) SaladRecordFieldLoader = _RecordLoader(SaladRecordField, None, None) SaladRecordSchemaLoader = _RecordLoader(SaladRecordSchema, None, None) SaladEnumSchemaLoader = _RecordLoader(SaladEnumSchema, None, None) SaladMapSchemaLoader = _RecordLoader(SaladMapSchema, None, None) SaladUnionSchemaLoader = _RecordLoader(SaladUnionSchema, None, None) DocumentationLoader = _RecordLoader(Documentation, None, None) array_of_strtype = _ArrayLoader(strtype) union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader( ( None_type, strtype, array_of_strtype, ) ) uri_strtype_True_False_None_None = _URILoader(strtype, True, False, None, None) union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, MapSchemaLoader, UnionSchemaLoader, strtype, ) ) array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _ArrayLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype ) union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype = _UnionLoader( ( PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, MapSchemaLoader, UnionSchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, ) ) typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_2 = _TypeDSLLoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, 2, "v1.1", ) array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader( ( None_type, array_of_RecordFieldLoader, ) ) idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader( union_of_None_type_or_array_of_RecordFieldLoader, "name", "type" ) Record_nameLoader = _EnumLoader(("record",), "Record_name") typedsl_Record_nameLoader_2 = _TypeDSLLoader(Record_nameLoader, 2, "v1.1") union_of_None_type_or_strtype = _UnionLoader( ( None_type, strtype, ) ) uri_union_of_None_type_or_strtype_True_False_None_None = _URILoader( union_of_None_type_or_strtype, True, False, None, None ) uri_array_of_strtype_True_False_None_None = _URILoader( array_of_strtype, True, False, None, None ) Enum_nameLoader = _EnumLoader(("enum",), "Enum_name") typedsl_Enum_nameLoader_2 = _TypeDSLLoader(Enum_nameLoader, 2, "v1.1") uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_False_True_2_None = _URILoader( union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_MapSchemaLoader_or_UnionSchemaLoader_or_strtype, False, True, 2, None, ) Array_nameLoader = _EnumLoader(("array",), "Array_name") typedsl_Array_nameLoader_2 = _TypeDSLLoader(Array_nameLoader, 2, "v1.1") Map_nameLoader = _EnumLoader(("map",), "Map_name") typedsl_Map_nameLoader_2 = _TypeDSLLoader(Map_nameLoader, 2, "v1.1") Union_nameLoader = _EnumLoader(("union",), "Union_name") typedsl_Union_nameLoader_2 = _TypeDSLLoader(Union_nameLoader, 2, "v1.1") union_of_None_type_or_booltype = _UnionLoader( ( None_type, booltype, ) ) union_of_None_type_or_inttype = _UnionLoader( ( None_type, inttype, ) ) uri_strtype_False_False_1_None = _URILoader(strtype, False, False, 1, None) uri_union_of_None_type_or_strtype_False_False_None_None = _URILoader( union_of_None_type_or_strtype, False, False, None, None ) uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_None_None = ( _URILoader( union_of_None_type_or_strtype_or_array_of_strtype, False, False, None, None ) ) union_of_None_type_or_strtype_or_JsonldPredicateLoader = _UnionLoader( ( None_type, strtype, JsonldPredicateLoader, ) ) union_of_None_type_or_Any_type = _UnionLoader( ( None_type, Any_type, ) ) array_of_SaladRecordFieldLoader = _ArrayLoader(SaladRecordFieldLoader) union_of_None_type_or_array_of_SaladRecordFieldLoader = _UnionLoader( ( None_type, array_of_SaladRecordFieldLoader, ) ) idmap_fields_union_of_None_type_or_array_of_SaladRecordFieldLoader = _IdMapLoader( union_of_None_type_or_array_of_SaladRecordFieldLoader, "name", "type" ) uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_1_None = _URILoader( union_of_None_type_or_strtype_or_array_of_strtype, False, False, 1, None ) array_of_SpecializeDefLoader = _ArrayLoader(SpecializeDefLoader) union_of_None_type_or_array_of_SpecializeDefLoader = _UnionLoader( ( None_type, array_of_SpecializeDefLoader, ) ) idmap_specialize_union_of_None_type_or_array_of_SpecializeDefLoader = _IdMapLoader( union_of_None_type_or_array_of_SpecializeDefLoader, "specializeFrom", "specializeTo" ) Documentation_nameLoader = _EnumLoader(("documentation",), "Documentation_name") typedsl_Documentation_nameLoader_2 = _TypeDSLLoader(Documentation_nameLoader, 2, "v1.1") union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader = _UnionLoader( ( SaladRecordSchemaLoader, SaladEnumSchemaLoader, SaladMapSchemaLoader, SaladUnionSchemaLoader, DocumentationLoader, ) ) array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader = _ArrayLoader( union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader ) union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader = _UnionLoader( ( SaladRecordSchemaLoader, SaladEnumSchemaLoader, SaladMapSchemaLoader, SaladUnionSchemaLoader, DocumentationLoader, array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader, ) ) def load_document( doc: Any, baseuri: Optional[str] = None, loadingOptions: Optional[LoadingOptions] = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions() result, metadata = _document_load( union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader, doc, baseuri, loadingOptions, ) return result def load_document_with_metadata( doc: Any, baseuri: Optional[str] = None, loadingOptions: Optional[LoadingOptions] = None, addl_metadata_fields: Optional[MutableSequence[str]] = None, ) -> Any: if baseuri is None: baseuri = file_uri(os.getcwd()) + "/" if loadingOptions is None: loadingOptions = LoadingOptions(fileuri=baseuri) return _document_load( union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader, doc, baseuri, loadingOptions, addl_metadata_fields=addl_metadata_fields, ) def load_document_by_string( string: Any, uri: str, loadingOptions: Optional[LoadingOptions] = None, ) -> Any: yaml = yaml_no_ts() result = yaml.load(string) add_lc_filename(result, uri) if loadingOptions is None: loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader, result, uri, loadingOptions, ) return result def load_document_by_yaml( yaml: Any, uri: str, loadingOptions: Optional[LoadingOptions] = None, ) -> Any: """ Shortcut to load via a YAML object. yaml: must be from ruamel.yaml.main.YAML.load with preserve_quotes=True """ add_lc_filename(yaml, uri) if loadingOptions is None: loadingOptions = LoadingOptions(fileuri=uri) result, metadata = _document_load( union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader_or_array_of_union_of_SaladRecordSchemaLoader_or_SaladEnumSchemaLoader_or_SaladMapSchemaLoader_or_SaladUnionSchemaLoader_or_DocumentationLoader, yaml, uri, loadingOptions, ) return result