diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1b08d489..a5ba8211 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: - id: check-yaml - id: check-added-large-files - repo: https://github.com/psf/black-pre-commit-mirror - rev: 26.3.0 + rev: 26.3.1 hooks: - id: black - repo: https://github.com/PyCQA/isort diff --git a/dandischema/models.py b/dandischema/models.py index 58e57355..6815ca49 100644 --- a/dandischema/models.py +++ b/dandischema/models.py @@ -1,2005 +1,9 @@ -from __future__ import annotations +from .models_linkml import * # noqa: F401,F403 +from .models_orig import DANDI_INSTANCE_URL_PATTERN # noqa: F401 -from datetime import date, datetime -from enum import Enum -import re -from typing import ( - TYPE_CHECKING, - Annotated, - Any, - Dict, - List, - Literal, - Optional, - Sequence, - Type, - TypeVar, - Union, -) -from warnings import warn +# TODO: temporary imports of consts etc which might need to be 'redone' +# so we do not duplicate them -from pydantic import ( - UUID4, - AnyHttpUrl, - BaseModel, - ConfigDict, - EmailStr, - Field, - GetJsonSchemaHandler, - SerializerFunctionWrapHandler, - StringConstraints, - ValidationInfo, - field_serializer, - field_validator, - model_validator, -) -from pydantic.json_schema import JsonSchemaValue -from pydantic_core import CoreSchema -from zarr_checksum.checksum import InvalidZarrChecksum, ZarrDirectoryDigest -from dandischema.conf import ( - DEFAULT_INSTANCE_NAME, - UNVENDORED_DOI_PREFIX_PATTERN, - UNVENDORED_ID_PATTERN, - get_instance_config, -) - -from .consts import DANDI_SCHEMA_VERSION -from .digests.dandietag import DandiETag -from .types import ByteSizeJsonSchema -from .utils import name2title - -try: - from anys import AnyBase -except ImportError: - _has_anys = False -else: - _has_anys = True - -# Load needed configurations into constants -_INSTANCE_CONFIG = get_instance_config() - -# Regex pattern for the prefix of identifiers -ID_PATTERN = ( - _INSTANCE_CONFIG.instance_name - if _INSTANCE_CONFIG.instance_name != DEFAULT_INSTANCE_NAME - else UNVENDORED_ID_PATTERN -) - -# The pattern that a DOI prefix of a dandiset must conform to -DOI_PREFIX_PATTERN = ( - re.escape(_INSTANCE_CONFIG.doi_prefix) - if _INSTANCE_CONFIG.doi_prefix is not None - else UNVENDORED_DOI_PREFIX_PATTERN -) - -# The pattern of the DANDI instance URL -DANDI_INSTANCE_URL_PATTERN = ( - ".*" - if _INSTANCE_CONFIG.instance_url is None - else re.escape(str(_INSTANCE_CONFIG.instance_url).rstrip("/")) -) - -NAME_PATTERN = r"^([\w\s\-\.']+),\s+([\w\s\-\.']+)$" -UUID_PATTERN = ( - "[a-f0-9]{8}[-]*[a-f0-9]{4}[-]*" "[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{12}$" -) -ASSET_UUID_PATTERN = r"^dandiasset:" + UUID_PATTERN -VERSION_NUM_PATTERN = r"\d+\.\d+\.\d+" -VERSION_PATTERN = rf"\d{{6}}/{VERSION_NUM_PATTERN}" -_INNER_DANDI_DOI_PATTERN = ( - rf"{DOI_PREFIX_PATTERN}/{ID_PATTERN.lower()}\.{VERSION_PATTERN}" -) -DANDI_DOI_PATTERN = ( - rf"^{_INNER_DANDI_DOI_PATTERN}$" - if _INSTANCE_CONFIG.doi_prefix is not None - else rf"^({_INNER_DANDI_DOI_PATTERN}|)$" # This matches an empty string as well -) -DANDI_PUBID_PATTERN = rf"^{ID_PATTERN}:{VERSION_PATTERN}$" -DANDI_NSKEY = "dandi" # Namespace for DANDI ontology - -PUBLISHED_VERSION_URL_PATTERN = ( - rf"^{DANDI_INSTANCE_URL_PATTERN}/dandiset/{VERSION_PATTERN}$" -) -MD5_PATTERN = r"[0-9a-f]{32}" -SHA256_PATTERN = r"[0-9a-f]{64}" - -M = TypeVar("M", bound=BaseModel) - - -def diff_models(model1: M, model2: M) -> None: - """Perform a field-wise diff""" - for field in model1.model_fields: - if getattr(model1, field) != getattr(model2, field): - print(f"{field} is different") - - -if TYPE_CHECKING: - # This is just a placeholder for static type checking - class LicenseType(Enum): - ... # fmt: skip - -else: - LicenseType = Enum( - "LicenseType", - [(license_.name, license_.value) for license_ in _INSTANCE_CONFIG.licenses], - ) - r""" - An enumeration of supported licenses - - The value of each member is a string that matches the regex pattern of - `^([^:\s]+):(\S+)$` in which the first group matches the license scheme such - as `"spdx"`, and the second group matches the license identifier such as - `"CC-BY-4.0"`. - """ - - -class AccessType(Enum): - """An enumeration of access status options""" - - #: The dandiset is openly accessible - OpenAccess = f"{DANDI_NSKEY}:OpenAccess" - - #: The dandiset is embargoed - EmbargoedAccess = f"{DANDI_NSKEY}:EmbargoedAccess" - - """ - Uncomment when restricted access is implemented: - #: The dandiset is restricted - RestrictedAccess = f"{DANDI_NSKEY}:RestrictedAccess" - """ - - -class DigestType(Enum): - """An enumeration of checksum types""" - - #: MD5 checksum - md5 = f"{DANDI_NSKEY}:md5" - - #: SHA1 checksum - sha1 = f"{DANDI_NSKEY}:sha1" - - #: SHA2-256 checksum - sha2_256 = f"{DANDI_NSKEY}:sha2-256" - - #: SHA3-256 checksum - sha3_256 = f"{DANDI_NSKEY}:sha3-256" - - #: BLAKE2B-256 checksum - blake2b_256 = f"{DANDI_NSKEY}:blake2b-256" - - #: BLAKE3-256 checksum - blake3 = f"{DANDI_NSKEY}:blake3" - - #: S3-style ETag - dandi_etag = f"{DANDI_NSKEY}:dandi-etag" - - #: DANDI Zarr checksum - dandi_zarr_checksum = f"{DANDI_NSKEY}:dandi-zarr-checksum" - - -class IdentifierType(Enum): - """An enumeration of identifiers""" - - doi = f"{DANDI_NSKEY}:doi" - orcid = f"{DANDI_NSKEY}:orcid" - ror = f"{DANDI_NSKEY}:ror" - dandi = f"{DANDI_NSKEY}:dandi" - rrid = f"{DANDI_NSKEY}:rrid" - - -class RelationType(Enum): - """An enumeration of resource relations""" - - #: Indicates that B includes A in a citation - IsCitedBy = "dcite:IsCitedBy" - - #: Indicates that A includes B in a citation - Cites = "dcite:Cites" - - #: Indicates that A is a supplement to B - IsSupplementTo = "dcite:IsSupplementTo" - - #: Indicates that B is a supplement to A - IsSupplementedBy = "dcite:IsSupplementedBy" - - #: Indicates A is continued by the work B - IsContinuedBy = "dcite:IsContinuedBy" - - #: Indicates A is a continuation of the work B - Continues = "dcite:Continues" - - #: Indicates A describes B - Describes = "dcite:Describes" - - #: Indicates A is described by B - IsDescribedBy = "dcite:IsDescribedBy" - - #: Indicates resource A has additional metadata B - HasMetadata = "dcite:HasMetadata" - - #: Indicates additional metadata A for a resource B - IsMetadataFor = "dcite:IsMetadataFor" - - #: Indicates A has a version (B) - HasVersion = "dcite:HasVersion" - - #: Indicates A is a version of B - IsVersionOf = "dcite:IsVersionOf" - - #: Indicates A is a new edition of B - IsNewVersionOf = "dcite:IsNewVersionOf" - - #: Indicates A is a previous edition of B - IsPreviousVersionOf = "dcite:IsPreviousVersionOf" - - #: Indicates A is a portion of B - IsPartOf = "dcite:IsPartOf" - - #: Indicates A includes the part B - HasPart = "dcite:HasPart" - - #: Indicates A is used as a source of information by B - IsReferencedBy = "dcite:IsReferencedBy" - - #: Indicates B is used as a source of information for A - References = "dcite:References" - - #: Indicates B is documentation about/explaining A - IsDocumentedBy = "dcite:IsDocumentedBy" - - #: Indicates A is documentation about B - Documents = "dcite:Documents" - - #: Indicates B is used to compile or create A - IsCompiledBy = "dcite:IsCompiledBy" - - #: Indicates B is the result of a compile or creation event using A - Compiles = "dcite:Compiles" - - #: Indicates A is a variant or different form of B - IsVariantFormOf = "dcite:IsVariantFormOf" - - #: Indicates A is the original form of B - IsOriginalFormOf = "dcite:IsOriginalFormOf" - - #: Indicates that A is identical to B - IsIdenticalTo = "dcite:IsIdenticalTo" - - #: Indicates that A is reviewed by B - IsReviewedBy = "dcite:IsReviewedBy" - - #: Indicates that A is a review of B - Reviews = "dcite:Reviews" - - #: Indicates B is a source upon which A is based - IsDerivedFrom = "dcite:IsDerivedFrom" - - #: Indicates A is a source upon which B is based - IsSourceOf = "dcite:IsSourceOf" - - #: Indicates A is required by B - IsRequiredBy = "dcite:IsRequiredBy" - - #: Indicates A requires B - Requires = "dcite:Requires" - - #: Indicates A replaces B - Obsoletes = "dcite:Obsoletes" - - #: Indicates A is replaced by B - IsObsoletedBy = "dcite:IsObsoletedBy" - - #: Indicates A is published in B - IsPublishedIn = "dcite:IsPublishedIn" - - -class ParticipantRelationType(Enum): - """An enumeration of participant relations""" - - #: Indicates that A is a child of B - isChildOf = f"{DANDI_NSKEY}:isChildOf" - - #: Indicates that A is a parent of B - isParentOf = f"{DANDI_NSKEY}:isParentOf" - - #: Indicates that A is a sibling of B - isSiblingOf = f"{DANDI_NSKEY}:isSiblingOf" - - #: Indicates that A is a monozygotic twin of B - isMonozygoticTwinOf = f"{DANDI_NSKEY}:isMonozygoticTwinOf" - - #: Indicates that A is a dizygotic twin of B - isDizygoticTwinOf = f"{DANDI_NSKEY}:isDizygoticTwinOf" - - -class RoleType(Enum): - """An enumeration of roles""" - - #: Author - Author = "dcite:Author" - - #: Conceptualization - Conceptualization = "dcite:Conceptualization" - - #: Contact Person - ContactPerson = "dcite:ContactPerson" - - #: Data Collector - DataCollector = "dcite:DataCollector" - - #: Data Curator - DataCurator = "dcite:DataCurator" - - #: Data Manager - DataManager = "dcite:DataManager" - - #: Formal Analysis - FormalAnalysis = "dcite:FormalAnalysis" - - #: Funding Acquisition - FundingAcquisition = "dcite:FundingAcquisition" - - #: Investigation - Investigation = "dcite:Investigation" - - #: Maintainer - Maintainer = "dcite:Maintainer" - - #: Methodology - Methodology = "dcite:Methodology" - - #: Producer - Producer = "dcite:Producer" - - #: Project Leader - ProjectLeader = "dcite:ProjectLeader" - - #: Project Manager - ProjectManager = "dcite:ProjectManager" - - #: Project Member - ProjectMember = "dcite:ProjectMember" - - #: Project Administration - ProjectAdministration = "dcite:ProjectAdministration" - - #: Researcher - Researcher = "dcite:Researcher" - - #: Resources - Resources = "dcite:Resources" - - #: Software - Software = "dcite:Software" - - #: Supervision - Supervision = "dcite:Supervision" - - #: Validation - Validation = "dcite:Validation" - - #: Visualization - Visualization = "dcite:Visualization" - - #: Funder - Funder = "dcite:Funder" - - #: Sponsor - Sponsor = "dcite:Sponsor" - - #: Participant in a study - StudyParticipant = "dcite:StudyParticipant" - - #: Affiliated with an entity - Affiliation = "dcite:Affiliation" - - #: Approved ethics protocol - EthicsApproval = "dcite:EthicsApproval" - - #: Other - Other = "dcite:Other" - - -class ResourceType(Enum): - """An enumeration of resource types""" - - #: Audiovisual: A series of visual representations imparting an impression of motion - # when shown in succession. May or may not include sound. - Audiovisual = "dcite:Audiovisual" - - #: Book: A medium for recording information in the form of writing or images, - # typically composed of many pages bound together and protected by a cover. - Book = "dcite:Book" - - #: BookChapter: One of the main divisions of a book. - BookChapter = "dcite:BookChapter" - - #: Collection: An aggregation of resources, which may encompass collections of one - # resourceType as well as those of mixed types. A collection is described as a - # group; its parts may also be separately described. - Collection = "dcite:Collection" - - #: ComputationalNotebook: A virtual notebook environment used for literate - # programming. - ComputationalNotebook = "dcite:ComputationalNotebook" - - #: ConferencePaper: Article that is written with the goal of being accepted to a - # conference. - ConferencePaper = "dcite:ConferencePaper" - - #: ConferenceProceeding: Collection of academic papers published in the context of - # an academic conference. - ConferenceProceeding = "dcite:ConferenceProceeding" - - #: DataPaper: A factual and objective publication with a focused intent to identify - # and describe specific data, sets of data, or data collections to facilitate - # discoverability. - DataPaper = "dcite:DataPaper" - - #: Dataset: Data encoded in a defined structure. - Dataset = "dcite:Dataset" - - #: Dissertation: A written essay, treatise, or thesis, especially one written by a - # candidate for the degree of Doctor of Philosophy. - Dissertation = "dcite:Dissertation" - - #: Event: A non-persistent, time-based occurrence. - Event = "dcite:Event" - - #: Image: A visual representation other than text. - Image = "dcite:Image" - - #: Instrument: A device, tool or apparatus used to obtain, measure and/or analyze - # data. - Instrument = "dcite:Instrument" - - #: InteractiveResource: A resource requiring interaction from the user to be - # understood, executed, or experienced. - InteractiveResource = "dcite:InteractiveResource" - - #: Journal: A scholarly publication consisting of articles that is published - # regularly throughout the year. - Journal = "dcite:Journal" - - #: JournalArticle: A written composition on a topic of interest, which forms a - # separate part of a journal. - JournalArticle = "dcite:JournalArticle" - - #: Model: An abstract, conceptual, graphical, mathematical or visualization model - # that represents empirical objects, phenomena, or physical processes. - Model = "dcite:Model" - - #: OutputManagementPlan: A formal document that outlines how research outputs are to - # be handled both during a research project and after the project is completed. - OutputManagementPlan = "dcite:OutputManagementPlan" - - #: PeerReview: Evaluation of scientific, academic, or professional work by others - # working in the same field. - PeerReview = "dcite:PeerReview" - - #: PhysicalObject: A physical object or substance. - PhysicalObject = "dcite:PhysicalObject" - - #: Preprint: A version of a scholarly or scientific paper that precedes formal peer - # review and publication in a peer-reviewed scholarly or scientific journal. - Preprint = "dcite:Preprint" - - #: Report: A document that presents information in an organized format for a - # specific audience and purpose. - Report = "dcite:Report" - - #: Service: An organized system of apparatus, appliances, staff, etc., for supplying - # some function(s) required by end users. - Service = "dcite:Service" - - #: Software: A computer program other than a computational notebook, in either - # source code (text) or compiled form. Use this type for general software components - # supporting scholarly research. Use the “ComputationalNotebook” value for virtual - # notebooks. - Software = "dcite:Software" - - #: Sound: A resource primarily intended to be heard. - Sound = "dcite:Sound" - - #: Standard: Something established by authority, custom, or general consent as a - # model, example, or point of reference. - Standard = "dcite:Standard" - - #: StudyRegistration: A detailed, time-stamped description of a research plan, often - # openly shared in a registry or published in a journal before the study is - # conducted to lend accountability and transparency in the hypothesis generating and - # testing process. - StudyRegistration = "dcite:StudyRegistration" - - #: Text: A resource consisting primarily of words for reading that is not covered by - # any other textual resource type in this list. - Text = "dcite:Text" - - #: Workflow: A structured series of steps which can be executed to produce a final - # outcome, allowing users a means to specify and enact their work in a more - # reproducible manner. - Workflow = "dcite:Workflow" - - #: Other: A resource that does not fit into any of the other categories. - Other = "dcite:Other" - - -class AgeReferenceType(Enum): - """An enumeration of age reference""" - - #: Age since Birth - BirthReference = f"{DANDI_NSKEY}:BirthReference" - - #: Age of a pregnancy (https://en.wikipedia.org/wiki/Gestational_age) - GestationalReference = f"{DANDI_NSKEY}:GestationalReference" - - -class DandiBaseModel(BaseModel): - id: Optional[str] = Field( - default=None, - description="Uniform resource identifier", - json_schema_extra={"readOnly": True}, - ) - schemaKey: str = Field( - "DandiBaseModel", validate_default=True, json_schema_extra={"readOnly": True} - ) - - def json_dict(self) -> dict: - """ - Recursively convert the instance to a `dict` of JSONable values, - including converting enum values to strings. `None` fields - are omitted. - """ - warn( - "`DandiBaseModel.json_dict()` is deprecated. Use " - "`pydantic.BaseModel.model_dump(mode='json', exclude_none=True)` instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.model_dump(mode="json", exclude_none=True) - - if _has_anys: - - @field_serializer("*", mode="wrap") - def preserve_anys_values( - self, value: Any, handler: SerializerFunctionWrapHandler - ) -> Any: - return value if isinstance(value, AnyBase) else handler(value) - - @field_validator("schemaKey") - @classmethod - def ensure_schemakey(cls, val: str) -> str: - tempval = val - if "Published" in cls.__name__: - tempval = "Published" + tempval - elif "BareAsset" == cls.__name__: - tempval = "Bare" + tempval - if tempval != cls.__name__: - raise ValueError( - f"schemaKey {tempval} does not match classname {cls.__name__}" - ) - return val - - @classmethod - def unvalidated(__pydantic_cls__: Type[M], **data: Any) -> M: - """Allow model to be returned without validation""" - - warn( - "`DandiBaseModel.unvalidated()` is deprecated. " - "Use `pydantic.BaseModel.model_construct()` instead.", - DeprecationWarning, - stacklevel=2, - ) - - return __pydantic_cls__.model_construct(**data) - - @classmethod - def to_dictrepr(__pydantic_cls__: Type["DandiBaseModel"]) -> str: - return ( - __pydantic_cls__.model_construct() - .__repr__() - .replace(__pydantic_cls__.__name__, "dict") - ) - - @classmethod - def __get_pydantic_json_schema__( - cls, - core_schema_: CoreSchema, - handler: GetJsonSchemaHandler, - ) -> JsonSchemaValue: - schema = handler(core_schema_) - schema = handler.resolve_ref_schema(schema) - - if schema["title"] == "PropertyValue": - schema["required"] = sorted({"value"}.union(schema.get("required", []))) - schema["title"] = name2title(schema["title"]) - if schema["type"] == "object": - schema["required"] = sorted({"schemaKey"}.union(schema.get("required", []))) - for prop, value in schema.get("properties", {}).items(): - if schema["title"] == "Person": - if prop == "name": - # JSON schema doesn't support validating unicode - # characters using the \w pattern, but Python does. So - # we are dropping the regex pattern for the schema. - del value["pattern"] - if value.get("title") is None or value["title"] == prop.title(): - value["title"] = name2title(prop) - if re.match("\\^https?://", value.get("pattern", "")): - # triggers only for ROR in identifier - value["format"] = "uri" - if value.get("format", None) == "uri": - value["maxLength"] = 1000 - allOf = value.get("allOf") - anyOf = value.get("anyOf") - items = value.get("items") - if allOf is not None: - if len(allOf) == 1 and "$ref" in allOf[0]: - value["$ref"] = allOf[0]["$ref"] - del value["allOf"] - elif len(allOf) > 1: - value["oneOf"] = value["allOf"] - value["type"] = "object" - del value["allOf"] - if anyOf is not None: - if len(anyOf) > 1 and any(["$ref" in val for val in anyOf]): - value["type"] = "object" - if items is not None: - anyOf = items.get("anyOf") - if ( - anyOf is not None - and len(anyOf) > 1 - and any(["$ref" in val for val in anyOf]) - ): - value["items"]["type"] = "object" - # In pydantic 1.8+ all Literals are mapped on to enum - # This presently breaks the schema editor UI. Revert - # to const when generating the schema. - # Note: this no longer happens with custom metaclass - if prop == "schemaKey": - if "enum" in value and len(value["enum"]) == 1: - value["const"] = value["enum"][0] - del value["enum"] - else: - value["const"] = value["default"] - if "readOnly" in value: - del value["readOnly"] - - return schema - - -class PropertyValue(DandiBaseModel): - maxValue: Optional[float] = Field(None, json_schema_extra={"nskey": "schema"}) - minValue: Optional[float] = Field(None, json_schema_extra={"nskey": "schema"}) - unitText: Optional[str] = Field(None, json_schema_extra={"nskey": "schema"}) - value: Union[Any, List[Any]] = Field( - None, - validate_default=True, - json_schema_extra={"nskey": "schema"}, - description="The value associated with this property.", - ) - valueReference: Optional["PropertyValue"] = Field( - None, json_schema_extra={"nskey": "schema"} - ) # Note: recursive (circular or not) - propertyID: Optional[Union[IdentifierType, AnyHttpUrl]] = Field( - None, - description="A commonly used identifier for " - "the characteristic represented by the property. " - "For example, a known prefix like DOI or a full URL.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["PropertyValue"] = Field( - "PropertyValue", validate_default=True, json_schema_extra={"readOnly": True} - ) - - @field_validator("value") - @classmethod - def ensure_value(cls, val: Union[Any, List[Any]]) -> Union[Any, List[Any]]: - if not val: - raise ValueError( - "The value field of a PropertyValue cannot be None or empty." - ) - return val - - _ldmeta = {"nskey": "schema"} - - -# This is mostly not needed at all since self-referencing models -# are automatically resolved by Pydantic in a pretty consistent way even in Pydantic V1 -# https://docs.pydantic.dev/1.10/usage/postponed_annotations/#self-referencing-models -# and continue to be so in Pydantic V2 -# https://docs.pydantic.dev/latest/concepts/postponed_annotations/#self-referencing-or-recursive-models -PropertyValue.model_rebuild() - -Identifier = str -ORCID = str -RORID = str -DANDI = str -RRID = str - - -class BaseType(DandiBaseModel): - """Base class for enumerated types""" - - identifier: Optional[ - Annotated[ - Union[ - AnyHttpUrl, - Annotated[ - str, StringConstraints(pattern=r"^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\._]+$") - ], - ], - Field(union_mode="left_to_right"), - ] - ] = Field( - None, - description="The identifier can be any url or a compact URI, preferably" - " supported by identifiers.org.", - json_schema_extra={"nskey": "schema"}, - ) - name: Optional[str] = Field( - None, - description="The name of the item.", - max_length=150, - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: str = Field( - "BaseType", validate_default=True, json_schema_extra={"readOnly": True} - ) - _ldmeta = {"rdfs:subClassOf": ["prov:Entity", "schema:Thing"], "nskey": DANDI_NSKEY} - - @classmethod - def __get_pydantic_json_schema__( - cls, - core_schema_: CoreSchema, - handler: GetJsonSchemaHandler, - ) -> JsonSchemaValue: - schema = super().__get_pydantic_json_schema__(core_schema_, handler) - - for prop, value in schema.get("properties", {}).items(): - # This check removes the anyOf field from the identifier property - # in the schema generation. This relates to a UI issue where two - # basic properties, in this case "string", is dropped from the UI. - if prop == "identifier": - for option in value.pop("anyOf", []): - if option.get("format", "") == "uri": - value.update(**option) - value["maxLength"] = 1000 - - return schema - - -class AssayType(BaseType): - """OBI based identifier for the assay(s) used""" - - schemaKey: Literal["AssayType"] = Field( - "AssayType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class SampleType(BaseType): - """OBI based identifier for the sample type used""" - - schemaKey: Literal["SampleType"] = Field( - "SampleType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class Anatomy(BaseType): - """UBERON or other identifier for anatomical part studied""" - - schemaKey: Literal["Anatomy"] = Field( - "Anatomy", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class StrainType(BaseType): - """Identifier for the strain of the sample""" - - schemaKey: Literal["StrainType"] = Field( - "StrainType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class SexType(BaseType): - """Identifier for the sex of the sample""" - - schemaKey: Literal["SexType"] = Field( - "SexType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class SpeciesType(BaseType): - """Identifier for species of the sample""" - - schemaKey: Literal["SpeciesType"] = Field( - "SpeciesType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class Disorder(BaseType): - """Biolink, SNOMED, or other identifier for disorder studied""" - - dxdate: Optional[List[Union[date, datetime]]] = Field( - None, - title="Dates of diagnosis", - description="Dates of diagnosis", - json_schema_extra={"nskey": DANDI_NSKEY, "rangeIncludes": "schema:Date"}, - ) - schemaKey: Literal["Disorder"] = Field( - "Disorder", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class GenericType(BaseType): - """An object to capture any type for about""" - - schemaKey: Literal["GenericType"] = Field( - "GenericType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class ApproachType(BaseType): - """Identifier for approach used""" - - schemaKey: Literal["ApproachType"] = Field( - "ApproachType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class MeasurementTechniqueType(BaseType): - """Identifier for measurement technique used""" - - schemaKey: Literal["MeasurementTechniqueType"] = Field( - "MeasurementTechniqueType", - validate_default=True, - json_schema_extra={"readOnly": True}, - ) - - -class StandardsType(BaseType): - """Identifier for data standard used""" - - schemaKey: Literal["StandardsType"] = Field( - "StandardsType", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -nwb_standard = StandardsType( - name="Neurodata Without Borders (NWB)", - identifier="RRID:SCR_015242", -).model_dump(mode="json", exclude_none=True) - -bids_standard = StandardsType( - name="Brain Imaging Data Structure (BIDS)", - identifier="RRID:SCR_016124", -).model_dump(mode="json", exclude_none=True) - -ome_ngff_standard = StandardsType( - name="OME/NGFF Standard", - identifier="DOI:10.25504/FAIRsharing.9af712", -).model_dump(mode="json", exclude_none=True) - - -class ContactPoint(DandiBaseModel): - email: Optional[EmailStr] = Field( - None, - description="Email address of contact.", - json_schema_extra={"nskey": "schema"}, - ) - url: Optional[AnyHttpUrl] = Field( - None, - description="A Web page to find information on how to contact.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["ContactPoint"] = Field( - "ContactPoint", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = {"nskey": "schema"} - - -class Contributor(DandiBaseModel): - identifier: Optional[Identifier] = Field( - None, - title="A common identifier", - description="Use a common identifier such as ORCID (orcid.org) for " - "people or ROR (ror.org) for institutions.", - json_schema_extra={"nskey": "schema"}, - ) - name: Optional[str] = Field(None, json_schema_extra={"nskey": "schema"}) - email: Optional[EmailStr] = Field(None, json_schema_extra={"nskey": "schema"}) - url: Optional[AnyHttpUrl] = Field(None, json_schema_extra={"nskey": "schema"}) - roleName: Optional[List[RoleType]] = Field( - None, - title="Role", - description="Role(s) of the contributor. Multiple roles can be selected.", - json_schema_extra={"nskey": "schema"}, - ) - includeInCitation: bool = Field( - True, - title="Include contributor in citation", - description="A flag to indicate whether a contributor should be included " - "when generating a citation for the item.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - awardNumber: Optional[Identifier] = Field( - None, - title="Identifier for an award", - description="Identifier associated with a sponsored or gift award.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - schemaKey: Literal["Contributor", "Organization", "Person"] = Field( - "Contributor", validate_default=True, json_schema_extra={"readOnly": True} - ) - - @model_validator(mode="after") - def ensure_contact_person_has_email(self) -> Contributor: - role_names = self.roleName - - if role_names is not None and RoleType.ContactPerson in role_names: - if self.email is None: - raise ValueError("Contact person must have an email address.") - - return self - - -class Organization(Contributor): - identifier: Optional[RORID] = Field( - None, - title="A ror.org identifier", - description="Use an ror.org identifier for institutions.", - pattern=r"^https://ror.org/[a-z0-9]+$", - json_schema_extra={"nskey": "schema"}, - ) - - includeInCitation: bool = Field( - False, - title="Include contributor in citation", - description="A flag to indicate whether a contributor should be included " - "when generating a citation for the item", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - contactPoint: Optional[List[ContactPoint]] = Field( - None, - title="Organization contact information", - description="Contact for the organization", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["Organization"] = Field( - "Organization", validate_default=True, json_schema_extra={"readOnly": True} - ) - _ldmeta = { - "rdfs:subClassOf": ["schema:Organization", "prov:Organization"], - "nskey": DANDI_NSKEY, - } - - -class Affiliation(DandiBaseModel): - identifier: Optional[RORID] = Field( - None, - title="A ror.org identifier", - description="Use an ror.org identifier for institutions.", - pattern=r"^https://ror.org/[a-z0-9]+$", - json_schema_extra={"nskey": "schema"}, - ) - name: str = Field( - json_schema_extra={"nskey": "schema"}, description="Name of organization" - ) - schemaKey: Literal["Affiliation"] = Field( - "Affiliation", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:Organization", "prov:Organization"], - "nskey": DANDI_NSKEY, - } - - -class Person(Contributor): - identifier: Optional[ORCID] = Field( - None, - title="An ORCID identifier", - description="An ORCID (orcid.org) identifier for an individual.", - pattern=r"^\d{4}-\d{4}-\d{4}-(\d{3}X|\d{4})$", - json_schema_extra={"nskey": "schema"}, - ) - name: str = Field( - title="Use Last, First. Example: Lovelace, Augusta Ada", - description="Use the format: familyname, given names ...", - pattern=NAME_PATTERN, - json_schema_extra={"nskey": "schema"}, - ) - affiliation: Optional[List[Affiliation]] = Field( - None, - description="An organization that this person is affiliated with.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["Person"] = Field( - "Person", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:Person", "prov:Person"], - "nskey": DANDI_NSKEY, - } - - -class Software(DandiBaseModel): - identifier: Optional[RRID] = Field( - None, - pattern=r"^RRID:.*", - title="Research resource identifier", - description="RRID of the software from scicrunch.org.", - json_schema_extra={"nskey": "schema"}, - ) - name: str = Field(json_schema_extra={"nskey": "schema"}) - version: str = Field(json_schema_extra={"nskey": "schema"}) - url: Optional[AnyHttpUrl] = Field( - None, - description="Web page for the software.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["Software"] = Field( - "Software", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:SoftwareApplication", "prov:Software"], - "nskey": DANDI_NSKEY, - } - - -class Agent(DandiBaseModel): - identifier: Optional[Identifier] = Field( - None, - title="Identifier", - description="Identifier for an agent.", - json_schema_extra={"nskey": "schema"}, - ) - name: str = Field( - json_schema_extra={"nskey": "schema"}, - ) - url: Optional[AnyHttpUrl] = Field(None, json_schema_extra={"nskey": "schema"}) - schemaKey: Literal["Agent"] = Field( - "Agent", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["prov:Agent"], - "nskey": DANDI_NSKEY, - } - - -class EthicsApproval(DandiBaseModel): - """Information about ethics committee approval for project""" - - identifier: Identifier = Field( - json_schema_extra={"nskey": "schema"}, - title="Approved protocol identifier", - description="Approved Protocol identifier, often a number or alphanumeric string.", - ) - contactPoint: Optional[ContactPoint] = Field( - None, - description="Information about the ethics approval committee.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["EthicsApproval"] = Field( - "EthicsApproval", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = {"rdfs:subClassOf": ["schema:Thing", "prov:Entity"], "nskey": DANDI_NSKEY} - - -class Resource(DandiBaseModel): - identifier: Optional[Identifier] = Field( - None, json_schema_extra={"nskey": "schema"} - ) - name: Optional[str] = Field( - None, title="A title of the resource", json_schema_extra={"nskey": "schema"} - ) - url: Optional[AnyHttpUrl] = Field( - None, title="URL of the resource", json_schema_extra={"nskey": "schema"} - ) - repository: Optional[str] = Field( - None, - title="Name of the repository", - description="Name of the repository in which the resource is housed.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - relation: RelationType = Field( - title="Resource relation", - description="Indicates how the resource is related to the dataset. " - "This relation should satisfy: dandiset resource.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - resourceType: Optional[ResourceType] = Field( - default=None, - title="Resource type", - description="The type of resource.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - - schemaKey: Literal["Resource"] = Field( - "Resource", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], - "rdfs:comment": "A resource related to the project (e.g., another " - "dataset, publication, Webpage)", - "nskey": DANDI_NSKEY, - } - - @model_validator(mode="after") - def identifier_or_url(self) -> "Resource": - identifier, url = self.identifier, self.url - if identifier is None and url is None: - raise ValueError("Both identifier and url cannot be None") - return self - - -class AccessRequirements(DandiBaseModel): - """Information about access options for the dataset""" - - status: AccessType = Field( - title="Access status", - description="The access status of the item.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - contactPoint: Optional[ContactPoint] = Field( - None, - description="Who or where to look for information about access.", - json_schema_extra={"nskey": "schema"}, - ) - description: Optional[str] = Field( - None, - description="Information about access requirements when embargoed or restricted", - json_schema_extra={"nskey": "schema"}, - ) - embargoedUntil: Optional[date] = Field( - None, - title="Embargo end date", - description="Date on which embargo ends.", - json_schema_extra={ - "readOnly": True, - "nskey": DANDI_NSKEY, - "rangeIncludes": "schema:Date", - }, - ) - schemaKey: Literal["AccessRequirements"] = Field( - "AccessRequirements", - validate_default=True, - json_schema_extra={"readOnly": True}, - ) - - _ldmeta = {"rdfs:subClassOf": ["schema:Thing", "prov:Entity"], "nskey": DANDI_NSKEY} - - @model_validator(mode="after") - def open_or_embargoed(self) -> "AccessRequirements": - status, embargoed = self.status, self.embargoedUntil - if status == AccessType.EmbargoedAccess and embargoed is None: - raise ValueError( - "An embargo end date is required for NIH awards to be in " - "compliance with NIH resource sharing policy." - ) - return self - - -class AssetsSummary(DandiBaseModel): - """Summary over assets contained in a dandiset (published or not)""" - - # stats which are not stats - numberOfBytes: int = Field( - json_schema_extra={"readOnly": True, "sameas": "schema:contentSize"} - ) - numberOfFiles: int = Field(json_schema_extra={"readOnly": True}) # universe - numberOfSubjects: Optional[int] = Field( - None, json_schema_extra={"readOnly": True} - ) # NWB + BIDS - numberOfSamples: Optional[int] = Field( - None, json_schema_extra={"readOnly": True} - ) # more of NWB - numberOfCells: Optional[int] = Field(None, json_schema_extra={"readOnly": True}) - - dataStandard: Optional[List[StandardsType]] = Field( - None, json_schema_extra={"readOnly": True} - ) - # Web UI: icons per each modality? - approach: Optional[List[ApproachType]] = Field( - None, json_schema_extra={"readOnly": True} - ) - # Web UI: could be an icon with number, which if hovered on show a list? - measurementTechnique: Optional[List[MeasurementTechniqueType]] = Field( - None, json_schema_extra={"readOnly": True, "nskey": "schema"} - ) - variableMeasured: Optional[List[str]] = Field( - None, json_schema_extra={"readOnly": True, "nskey": "schema"} - ) - - species: Optional[List[SpeciesType]] = Field( - None, json_schema_extra={"readOnly": True} - ) - schemaKey: Literal["AssetsSummary"] = Field( - "AssetsSummary", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], - "nskey": DANDI_NSKEY, - } - - -class Equipment(DandiBaseModel): - identifier: Optional[Identifier] = Field( - None, json_schema_extra={"nskey": "schema"} - ) - name: str = Field( - title="Title", - description="A name for the equipment.", - max_length=150, - json_schema_extra={"nskey": "schema"}, - ) - description: Optional[str] = Field( - None, - description="The description of the equipment.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["Equipment"] = Field( - "Equipment", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], - "nskey": DANDI_NSKEY, - } - - -class Activity(DandiBaseModel): - """Information about the Project activity""" - - identifier: Optional[Identifier] = Field( - None, json_schema_extra={"nskey": "schema"} - ) - name: str = Field( - title="Title", - description="The name of the activity.", - max_length=150, - json_schema_extra={"nskey": "schema"}, - ) - description: Optional[str] = Field( - None, - description="The description of the activity.", - json_schema_extra={"nskey": "schema"}, - ) - startDate: Optional[datetime] = Field(None, json_schema_extra={"nskey": "schema"}) - endDate: Optional[datetime] = Field(None, json_schema_extra={"nskey": "schema"}) - - # isPartOf: Optional["Activity"] = Field(None, json_schema_extra={"nskey": "schema"}) - # hasPart: Optional["Activity"] = Field(None, json_schema_extra={"nskey": "schema"}) - wasAssociatedWith: Optional[ - List[ - Annotated[ - Union[Person, Organization, Software, Agent], - Field(discriminator="schemaKey"), - ] - ] - ] = Field(None, json_schema_extra={"nskey": "prov"}) - used: Optional[List[Equipment]] = Field( - None, - description="A listing of equipment used for the activity.", - json_schema_extra={"nskey": "prov"}, - ) - schemaKey: Literal["Activity", "Project", "Session", "PublishActivity"] = Field( - "Activity", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["prov:Activity", "schema:Thing"], - "nskey": DANDI_NSKEY, - } - - -class Project(Activity): - name: str = Field( - title="Name of project", - description="The name of the project that generated this Dandiset or asset.", - max_length=150, - json_schema_extra={"nskey": "schema"}, - ) - description: Optional[str] = Field( - None, - description="A brief description of the project.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["Project"] = Field( - "Project", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class Session(Activity): - name: str = Field( - title="Name of session", - description="The name of the logical session associated with the asset.", - max_length=150, - json_schema_extra={"nskey": "schema"}, - ) - description: Optional[str] = Field( - None, - description="A brief description of the session.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["Session"] = Field( - "Session", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class PublishActivity(Activity): - schemaKey: Literal["PublishActivity"] = Field( - "PublishActivity", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class Locus(DandiBaseModel): - identifier: Union[Identifier, List[Identifier]] = Field( - description="Identifier for genotyping locus.", - json_schema_extra={"nskey": "schema"}, - ) - locusType: Optional[str] = Field(None) - schemaKey: Literal["Locus"] = Field( - "Locus", validate_default=True, json_schema_extra={"readOnly": True} - ) - _ldmeta = {"nskey": DANDI_NSKEY} - - -class Allele(DandiBaseModel): - identifier: Union[Identifier, List[Identifier]] = Field( - description="Identifier for genotyping allele.", - json_schema_extra={"nskey": "schema"}, - ) - alleleSymbol: Optional[str] = Field(None) - alleleType: Optional[str] = Field(None) - schemaKey: Literal["Allele"] = Field( - "Allele", validate_default=True, json_schema_extra={"readOnly": True} - ) - _ldmeta = {"nskey": DANDI_NSKEY} - - -class GenotypeInfo(DandiBaseModel): - locus: Locus = Field(description="Locus at which information was extracted.") - alleles: List[Allele] = Field(description="Information about alleles at the locus.") - wasGeneratedBy: Optional[List["Session"]] = Field( - None, - description="Information about session activity used to determine genotype.", - json_schema_extra={"nskey": "prov"}, - ) - schemaKey: Literal["GenotypeInfo"] = Field( - "GenotypeInfo", validate_default=True, json_schema_extra={"readOnly": True} - ) - _ldmeta = {"nskey": DANDI_NSKEY} - - -class RelatedParticipant(DandiBaseModel): - identifier: Optional[Identifier] = Field( - None, json_schema_extra={"nskey": "schema"} - ) - name: Optional[str] = Field( - None, - title="Name of the participant or subject", - json_schema_extra={"nskey": "schema"}, - ) - url: Optional[AnyHttpUrl] = Field( - None, - title="URL of the related participant or subject", - json_schema_extra={"nskey": "schema"}, - ) - relation: ParticipantRelationType = Field( - title="Participant or subject relation", - description="Indicates how the current participant or subject is related " - "to the other participant or subject. This relation should " - "satisfy: Participant/Subject relatedParticipant/Subject.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - schemaKey: Literal["RelatedParticipant"] = Field( - "RelatedParticipant", - validate_default=True, - json_schema_extra={"readOnly": True}, - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], - "rdfs:comment": "Another participant or subject related to the current " - "participant or subject (e.g., another parent, sibling, child).", - "nskey": DANDI_NSKEY, - } - - -class Participant(DandiBaseModel): - """Description about the Participant or Subject studied. - - The Participant or Subject can be any individual or synthesized Agent. The - properties of the Participant or Subject refers to information at the timepoint - when the Participant or Subject engaged in the production of data being described. - """ - - identifier: Identifier = Field(json_schema_extra={"nskey": "schema"}) - altName: Optional[List[Identifier]] = Field( - None, json_schema_extra={"nskey": DANDI_NSKEY} - ) - - strain: Optional[StrainType] = Field( - None, - description="Identifier for the strain of the participant or subject.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - cellLine: Optional[Identifier] = Field( - None, - description="Cell line associated with the participant or subject.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - vendor: Optional[Organization] = Field( - None, json_schema_extra={"nskey": DANDI_NSKEY} - ) - age: Optional[PropertyValue] = Field( - None, - description="A representation of age using ISO 8601 duration. This " - "should include a valueReference if anything other than " - "date of birth is used.", - json_schema_extra={"nskey": DANDI_NSKEY, "rangeIncludes": "schema:Duration"}, - ) - - sex: Optional[SexType] = Field( - None, - description="Identifier for sex of the participant or subject if " - "available. (e.g. from OBI)", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - genotype: Optional[Union[List[GenotypeInfo], Identifier]] = Field( - None, - description="Genotype descriptor of participant or subject if available", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - species: Optional[SpeciesType] = Field( - None, - description="An identifier indicating the taxonomic classification of " - "the participant or subject.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - disorder: Optional[List[Disorder]] = Field( - None, - description="Any current diagnosed disease or disorder associated with " - "the participant or subject.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - - relatedParticipant: Optional[List[RelatedParticipant]] = Field( - None, - description="Information about related participants or subjects in a " - "study or across studies.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - sameAs: Optional[List[Identifier]] = Field( - None, - description="An identifier to link participants or subjects across datasets.", - json_schema_extra={"nskey": "schema"}, - ) - schemaKey: Literal["Participant"] = Field( - "Participant", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["prov:Agent"], - "rdfs:label": "Information about the participant or subject.", - "nskey": DANDI_NSKEY, - } - - -class BioSample(DandiBaseModel): - """Description of the sample that was studied""" - - identifier: Identifier = Field(json_schema_extra={"nskey": "schema"}) - sampleType: SampleType = Field( - description="Identifier for the sample characteristics (e.g., from OBI, Encode).", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - assayType: Optional[List[AssayType]] = Field( - None, - description="Identifier for the assay(s) used (e.g., OBI).", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - anatomy: Optional[List[Anatomy]] = Field( - None, - description="Identifier for what organ the sample belongs " - "to. Use the most specific descriptor from sources such as UBERON.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - - wasDerivedFrom: Optional[List["BioSample"]] = Field( - None, - description="Describes the hierarchy of sample derivation or aggregation.", - json_schema_extra={"nskey": "prov"}, - ) - wasAttributedTo: Optional[List[Participant]] = Field( - None, - description="Participant(s) or Subject(s) associated with this sample.", - json_schema_extra={"nskey": "prov"}, - ) - sameAs: Optional[List[Identifier]] = Field( - None, json_schema_extra={"nskey": "schema"} - ) - hasMember: Optional[List[Identifier]] = Field( - None, json_schema_extra={"nskey": "prov"} - ) - schemaKey: Literal["BioSample"] = Field( - "BioSample", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:Thing", "prov:Entity"], - "rdfs:label": "Information about the biosample.", - "nskey": DANDI_NSKEY, - } - - -# This is mostly not needed at all since self-referencing models -# are automatically resolved by Pydantic in a pretty consistent way even in Pydantic V1 -# https://docs.pydantic.dev/1.10/usage/postponed_annotations/#self-referencing-models -# and continue to be so in Pydantic V2 -# https://docs.pydantic.dev/latest/concepts/postponed_annotations/#self-referencing-or-recursive-models -BioSample.model_rebuild() - - -class CommonModel(DandiBaseModel): - schemaVersion: str = Field( - default=DANDI_SCHEMA_VERSION, - json_schema_extra={"readOnly": True, "nskey": "schema"}, - ) - name: Optional[str] = Field( - None, - title="Title", - description="The name of the item.", - max_length=150, - json_schema_extra={"nskey": "schema"}, - ) - description: Optional[str] = Field( - None, - description="A description of the item.", - json_schema_extra={"nskey": "schema"}, - ) - contributor: Optional[ - List[Annotated[Union[Person, Organization], Field(discriminator="schemaKey")]] - ] = Field( - None, - title="Contributors", - description="Contributors to this item: persons or organizations.", - json_schema_extra={"nskey": "schema"}, - ) - about: Optional[ - List[ - Annotated[ - Union[Disorder, Anatomy, GenericType], Field(discriminator="schemaKey") - ] - ] - ] = Field( - None, - title="Subject matter of the dataset", - description="The subject matter of the content, such as disorders, brain anatomy.", - json_schema_extra={"nskey": "schema"}, - ) - studyTarget: Optional[List[str]] = Field( - None, - description="Objectives or specific questions of the study.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - license: Optional[List[LicenseType]] = Field( - None, - description="Licenses associated with the item. DANDI only supports a " - "subset of Creative Commons Licenses (creativecommons.org) " - "applicable to datasets.", - json_schema_extra={"nskey": "schema"}, - ) - protocol: Optional[List[AnyHttpUrl]] = Field( - None, - description="A list of persistent URLs describing the protocol (e.g. " - "protocols.io, or other DOIs).", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - ethicsApproval: Optional[List[EthicsApproval]] = Field( - None, title="Ethics approvals", json_schema_extra={"nskey": DANDI_NSKEY} - ) - keywords: Optional[List[str]] = Field( - None, - description="Keywords used to describe this content.", - json_schema_extra={"nskey": "schema"}, - ) - acknowledgement: Optional[str] = Field( - None, - description="Any acknowledgments not covered by contributors or external resources.", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - - # Linking to this dandiset or the larger thing - access: List[AccessRequirements] = Field( - title="Access information", - default_factory=lambda: [AccessRequirements(status=AccessType.OpenAccess)], - json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True}, - ) - url: Optional[AnyHttpUrl] = Field( - None, - description="permalink to the item", - json_schema_extra={"readOnly": True, "nskey": "schema"}, - ) - repository: Optional[AnyHttpUrl] = Field( - default=_INSTANCE_CONFIG.instance_url, - description="location of the item", - json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True}, - ) - relatedResource: Optional[List[Resource]] = Field( - None, json_schema_extra={"nskey": DANDI_NSKEY} - ) - - wasGeneratedBy: Optional[Sequence[Activity]] = Field( - None, json_schema_extra={"nskey": "prov"} - ) - schemaKey: str = Field( - "CommonModel", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -class Dandiset(CommonModel): - """A body of structured information describing a DANDI dataset.""" - - model_config = ConfigDict(extra="allow") - - @field_validator("contributor") - @classmethod - def contributor_musthave_contact( - cls, values: List[Union[Person, Organization]] - ) -> List[Union[Person, Organization]]: - contacts = [] - for val in values: - if val.roleName and RoleType.ContactPerson in val.roleName: - contacts.append(val) - if len(contacts) == 0: - raise ValueError("At least one contributor must have role ContactPerson") - return values - - id: str = Field( - description="Uniform resource identifier", - pattern=( - rf"^({ID_PATTERN}|{ID_PATTERN.lower()}):\d{{6}}(/(draft|{VERSION_NUM_PATTERN}))$" - ), - json_schema_extra={"readOnly": True}, - ) - - identifier: DANDI = Field( - title="Dandiset identifier", - description="A Dandiset identifier that can be resolved by identifiers.org.", - pattern=rf"^{ID_PATTERN}:\d{{6}}$", - json_schema_extra={"readOnly": True, "nskey": "schema"}, - ) - - sameAs: Annotated[ - Optional[ - list[ - Annotated[ - str, - StringConstraints( - pattern=( - rf"^dandi://{UNVENDORED_ID_PATTERN}/\d{{6}}" - rf"(@(draft|{VERSION_NUM_PATTERN}))?(/\S+)?$" - ) - ), - ] - ] - ], - Field( - default=None, - description="Known DANDI URLs of the Dandiset at other DANDI instances.", - json_schema_extra={"nskey": "schema"}, - ), - ] - - name: str = Field( - title="Dandiset title", - description="A title associated with the Dandiset.", - max_length=150, - json_schema_extra={"nskey": "schema"}, - ) - description: str = Field( - description="A description of the Dandiset", - max_length=10000, - json_schema_extra={"nskey": "schema"}, - ) - contributor: List[ - Annotated[Union[Person, Organization], Field(discriminator="schemaKey")] - ] = Field( - title="Dandiset contributors", - description="People or Organizations that have contributed to this Dandiset.", - json_schema_extra={"nskey": "schema"}, - min_length=1, - ) - dateCreated: Optional[datetime] = Field( - None, - json_schema_extra={"nskey": "schema", "readOnly": True}, - title="Dandiset creation date and time.", - ) - dateModified: Optional[datetime] = Field( - None, - json_schema_extra={"nskey": "schema", "readOnly": True}, - title="Last modification date and time.", - ) - - license: List[LicenseType] = Field( - min_length=1, - description="Licenses associated with the item. DANDI only supports a " - "subset of Creative Commons Licenses (creativecommons.org) " - "applicable to datasets.", - json_schema_extra={"nskey": "schema"}, - ) - - citation: str = Field(json_schema_extra={"readOnly": True, "nskey": "schema"}) - - # From assets - assetsSummary: AssetsSummary = Field( - json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True} - ) - - # From server (requested by users even for drafts) - manifestLocation: List[AnyHttpUrl] = Field( - min_length=1, json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True} - ) - - version: str = Field(json_schema_extra={"nskey": "schema", "readOnly": True}) - - wasGeneratedBy: Optional[Sequence[Project]] = Field( - None, - title="Associated projects", - description="Project(s) that generated this Dandiset.", - json_schema_extra={"nskey": "prov"}, - ) - - schemaKey: Literal["Dandiset"] = Field( - "Dandiset", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:Dataset", "prov:Entity"], - "rdfs:label": "Information about the dataset", - "nskey": DANDI_NSKEY, - } - - -class BareAsset(CommonModel): - """Metadata used to describe an asset anywhere (local or server). - - Derived from C2M2 (Level 0 and 1) and schema.org - """ - - contentSize: ByteSizeJsonSchema = Field(json_schema_extra={"nskey": "schema"}) - encodingFormat: Union[AnyHttpUrl, str] = Field( - title="File encoding format", json_schema_extra={"nskey": "schema"} - ) - digest: Dict[DigestType, str] = Field( - title="A map of dandi digests to their values", - json_schema_extra={"nskey": DANDI_NSKEY}, - ) - path: str = Field(json_schema_extra={"nskey": DANDI_NSKEY}) - - dateModified: Optional[datetime] = Field( - None, - json_schema_extra={"nskey": "schema"}, - title="Asset (file or metadata) modification date and time", - ) - blobDateModified: Optional[datetime] = Field( - None, - json_schema_extra={"nskey": DANDI_NSKEY}, - title="Asset file modification date and time.", - ) - # overload to restrict with max_items=1 - access: List[AccessRequirements] = Field( - title="Access information", - default_factory=lambda: [AccessRequirements(status=AccessType.OpenAccess)], - json_schema_extra={"nskey": DANDI_NSKEY}, - max_length=1, - ) - - # this is from C2M2 level 1 - using EDAM vocabularies - in our case we would - # need to come up with things for neurophys - # TODO: waiting on input - dataType: Optional[AnyHttpUrl] = Field( - None, json_schema_extra={"nskey": DANDI_NSKEY} - ) - - sameAs: Optional[List[AnyHttpUrl]] = Field( - None, json_schema_extra={"nskey": "schema"} - ) - - # TODO - approach: Optional[List[ApproachType]] = Field( - None, json_schema_extra={"readOnly": True, "nskey": DANDI_NSKEY} - ) - measurementTechnique: Optional[List[MeasurementTechniqueType]] = Field( - None, json_schema_extra={"readOnly": True, "nskey": "schema"} - ) - variableMeasured: Optional[List[PropertyValue]] = Field( - None, json_schema_extra={"readOnly": True, "nskey": "schema"} - ) - - wasDerivedFrom: Optional[List[BioSample]] = Field( - None, json_schema_extra={"nskey": "prov"} - ) - wasAttributedTo: Optional[List[Participant]] = Field( - None, - description="Associated participant(s) or subject(s).", - json_schema_extra={"nskey": "prov"}, - ) - wasGeneratedBy: Optional[List[Union[Session, Project, Activity]]] = Field( - None, - title="Name of the session, project or activity.", - description="Describe the session, project or activity that generated this asset.", - json_schema_extra={"nskey": "prov"}, - ) - - # Bare asset is to be just Asset. - schemaKey: Literal["Asset"] = Field( - "Asset", validate_default=True, json_schema_extra={"readOnly": True} - ) - - _ldmeta = { - "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], - "rdfs:label": "Information about the asset", - "nskey": DANDI_NSKEY, - } - - @field_validator("digest") - @classmethod - def digest_check( - cls, v: Dict[DigestType, str], info: ValidationInfo - ) -> Dict[DigestType, str]: - values = info.data - if values.get("encodingFormat") == "application/x-zarr": - if DigestType.dandi_zarr_checksum not in v: - raise ValueError("A zarr asset must have a zarr checksum.") - if v.get(DigestType.dandi_etag): - raise ValueError("Digest cannot have both etag and zarr checksums.") - digest = v[DigestType.dandi_zarr_checksum] - try: - chksum = ZarrDirectoryDigest.parse(digest) - except InvalidZarrChecksum: - raise ValueError( - "Digest must have an appropriate dandi-zarr-checksum value." - f" Got {digest}" - ) - zarr_size = chksum.size - content_size = values.get("contentSize") - if content_size != zarr_size: - raise ValueError( - f"contentSize {content_size} is not equal to the checksum size {zarr_size}." - ) - else: - if DigestType.dandi_etag not in v: - raise ValueError("A non-zarr asset must have a dandi-etag.") - if v.get(DigestType.dandi_zarr_checksum): - raise ValueError("Digest cannot have both etag and zarr checksums.") - digest = v[DigestType.dandi_etag] - if not re.fullmatch(DandiETag.REGEX, digest): - raise ValueError( - f"Digest must have an appropriate dandi-etag value. " - f"Got {digest}" - ) - return v - - -class Asset(BareAsset): - """Metadata used to describe an asset on the server.""" - - # all of the following are set by server - id: str = Field( - json_schema_extra={"readOnly": True}, description="Uniform resource identifier." - ) - identifier: UUID4 = Field(json_schema_extra={"readOnly": True, "nskey": "schema"}) - contentUrl: List[AnyHttpUrl] = Field( - json_schema_extra={"readOnly": True, "nskey": "schema"} - ) - - -class Publishable(DandiBaseModel): - publishedBy: Union[AnyHttpUrl, PublishActivity] = Field( - description="The URL should contain the provenance of the publishing process.", - json_schema_extra={"readOnly": True, "nskey": DANDI_NSKEY}, - ) - datePublished: datetime = Field( - json_schema_extra={"readOnly": True, "nskey": "schema"} - ) - schemaKey: Literal["Publishable", "Dandiset", "Asset"] = Field( - "Publishable", validate_default=True, json_schema_extra={"readOnly": True} - ) - - -_doi_field_kwargs: dict[str, Any] = { - "title": "DOI", - "pattern": DANDI_DOI_PATTERN, - "json_schema_extra": {"readOnly": True, "nskey": DANDI_NSKEY}, -} -if _INSTANCE_CONFIG.doi_prefix is None: - _doi_field_kwargs["default"] = "" - - -class PublishedDandiset(Dandiset, Publishable): - id: str = Field( - description="Uniform resource identifier.", - pattern=DANDI_PUBID_PATTERN, - json_schema_extra={"readOnly": True}, - ) - doi: str = Field(**_doi_field_kwargs) - """ - The DOI of the published Dandiset - - The value of the empty string indicates that there is no DOI for the published - Dandiset. - """ - - url: AnyHttpUrl = Field( - description="Permalink to the Dandiset.", - json_schema_extra={"readOnly": True, "nskey": "schema"}, - ) - releaseNotes: Optional[str] = Field( - None, - description="The description of the release", - json_schema_extra={"readOnly": True, "nskey": "schema"}, - ) - - schemaKey: Literal["Dandiset"] = Field( - "Dandiset", validate_default=True, json_schema_extra={"readOnly": True} - ) - - @field_validator("assetsSummary") - @classmethod - def check_filesbytes(cls, values: AssetsSummary) -> AssetsSummary: - if values.numberOfBytes == 0 or values.numberOfFiles == 0: - raise ValueError( - "A Dandiset containing no files or zero bytes is not publishable" - ) - return values - - @field_validator("url") - @classmethod - def check_url(cls, url: AnyHttpUrl) -> AnyHttpUrl: - if not re.match(PUBLISHED_VERSION_URL_PATTERN, str(url)): - raise ValueError( - f'string does not match regex "{PUBLISHED_VERSION_URL_PATTERN}"' - ) - return url - - -class PublishedAsset(Asset, Publishable): - id: str = Field( - description="Uniform resource identifier.", - pattern=ASSET_UUID_PATTERN, - json_schema_extra={"readOnly": True}, - ) - - schemaKey: Literal["Asset"] = Field( - "Asset", validate_default=True, json_schema_extra={"readOnly": True} - ) - - @field_validator("digest") - @classmethod - def digest_sha256check( - cls, v: Dict[DigestType, str], info: ValidationInfo - ) -> Dict[DigestType, str]: - values = info.data - if values.get("encodingFormat") != "application/x-zarr": - if DigestType.sha2_256 not in v: - raise ValueError("A non-zarr asset must have a sha2_256.") - digest = v[DigestType.sha2_256] - if not re.fullmatch(SHA256_PATTERN, digest): - raise ValueError( - f"Digest must have an appropriate sha2_256 value. Got {digest}" - ) - return v - - -def get_schema_version() -> str: - return DANDI_SCHEMA_VERSION +# TODO: do the extra tune ups like linking extra validations etc, +# potentially copied from models_orig.py diff --git a/dandischema/models.yaml b/dandischema/models.yaml new file mode 100644 index 00000000..fa5365a9 --- /dev/null +++ b/dandischema/models.yaml @@ -0,0 +1,1493 @@ +name: dandi-schema +status: eunal:concept-status/DRAFT +id: https://schema.dandiarchive.org/s/dandi/v0.7 +version: 0.7.0 +imports: +- linkml:types +prefixes: + dandiasset: + prefix_reference: http://dandiarchive.org/asset/ + DANDI: + prefix_reference: http://dandiarchive.org/dandiset/ + dandi: + prefix_reference: http://schema.dandiarchive.org/ + dcite: + prefix_reference: http://schema.dandiarchive.org/datacite/ + dct: + prefix_reference: http://purl.org/dc/terms/ + linkml: + prefix_reference: https://w3id.org/linkml/ + nidm: + prefix_reference: http://purl.org/nidash/nidm# + ORCID: + prefix_reference: https://orcid.org/ + owl: + prefix_reference: http://www.w3.org/2002/07/owl# + PATO: + prefix_reference: http://purl.obolibrary.org/obo/PATO_ + pav: + prefix_reference: http://purl.org/pav/ + prov: + prefix_reference: http://www.w3.org/ns/prov# + rdfa: + prefix_reference: http://www.w3.org/ns/rdfa# + rdf: + prefix_reference: http://www.w3.org/1999/02/22-rdf-syntax-ns# + rdfs: + prefix_reference: http://www.w3.org/2000/01/rdf-schema# + ROR: + prefix_reference: https://ror.org/ + RRID: + prefix_reference: 'https://scicrunch.org/resolver/RRID:' + rs: + prefix_reference: http://schema.repronim.org/ + schema: + prefix_reference: http://schema.org/ + skos: + prefix_reference: http://www.w3.org/2004/02/skos/core# + spdx: + prefix_reference: http://spdx.org/licenses/ + uuid: + prefix_reference: http://uuid.repronim.org/ + xsd: + prefix_reference: http://www.w3.org/2001/XMLSchema# +default_prefix: dandi +default_range: string + +enums: + AccessType: + description: An enumeration of access status options + permissible_values: + dandi:OpenAccess: + meaning: dandi:OpenAccess + dandi:EmbargoedAccess: + meaning: dandi:EmbargoedAccess + AgeReferenceType: + description: An enumeration of age reference + permissible_values: + dandi:BirthReference: + meaning: dandi:BirthReference + dandi:GestationalReference: + meaning: dandi:GestationalReference + DigestType: + description: An enumeration of checksum types + permissible_values: + dandi:md5: + meaning: dandi:md5 + dandi:sha1: + meaning: dandi:sha1 + dandi:sha2-256: + meaning: dandi:sha2-256 + dandi:sha3-256: + meaning: dandi:sha3-256 + dandi:blake2b-256: + meaning: dandi:blake2b-256 + dandi:blake3: + meaning: dandi:blake3 + dandi:dandi-etag: + meaning: dandi:dandi-etag + dandi:dandi-zarr-checksum: + meaning: dandi:dandi-zarr-checksum + IdentifierType: + description: An enumeration of identifiers + permissible_values: + dandi:doi: + meaning: dandi:doi + dandi:orcid: + meaning: dandi:orcid + dandi:ror: + meaning: dandi:ror + dandi:dandi: + meaning: dandi:dandi + dandi:rrid: + meaning: dandi:rrid + LicenseType: + description: An enumeration. + permissible_values: + spdx:CC0-1.0: + meaning: spdx:CC0-1.0 + spdx:CC-BY-4.0: + meaning: spdx:CC-BY-4.0 + ParticipantRelationType: + description: An enumeration of participant relations + permissible_values: + dandi:isChildOf: + meaning: dandi:isChildOf + dandi:isParentOf: + meaning: dandi:isParentOf + dandi:isSiblingOf: + meaning: dandi:isSiblingOf + dandi:isMonozygoticTwinOf: + meaning: dandi:isMonozygoticTwinOf + dandi:isDizygoticTwinOf: + meaning: dandi:isDizygoticTwinOf + RelationType: + description: An enumeration of resource relations + permissible_values: + dcite:IsCitedBy: + meaning: dcite:IsCitedBy + dcite:Cites: + meaning: dcite:Cites + dcite:IsSupplementTo: + meaning: dcite:IsSupplementTo + dcite:IsSupplementedBy: + meaning: dcite:IsSupplementedBy + dcite:IsContinuedBy: + meaning: dcite:IsContinuedBy + dcite:Continues: + meaning: dcite:Continues + dcite:Describes: + meaning: dcite:Describes + dcite:IsDescribedBy: + meaning: dcite:IsDescribedBy + dcite:HasMetadata: + meaning: dcite:HasMetadata + dcite:IsMetadataFor: + meaning: dcite:IsMetadataFor + dcite:HasVersion: + meaning: dcite:HasVersion + dcite:IsVersionOf: + meaning: dcite:IsVersionOf + dcite:IsNewVersionOf: + meaning: dcite:IsNewVersionOf + dcite:IsPreviousVersionOf: + meaning: dcite:IsPreviousVersionOf + dcite:IsPartOf: + meaning: dcite:IsPartOf + dcite:HasPart: + meaning: dcite:HasPart + dcite:IsReferencedBy: + meaning: dcite:IsReferencedBy + dcite:References: + meaning: dcite:References + dcite:IsDocumentedBy: + meaning: dcite:IsDocumentedBy + dcite:Documents: + meaning: dcite:Documents + dcite:IsCompiledBy: + meaning: dcite:IsCompiledBy + dcite:Compiles: + meaning: dcite:Compiles + dcite:IsVariantFormOf: + meaning: dcite:IsVariantFormOf + dcite:IsOriginalFormOf: + meaning: dcite:IsOriginalFormOf + dcite:IsIdenticalTo: + meaning: dcite:IsIdenticalTo + dcite:IsReviewedBy: + meaning: dcite:IsReviewedBy + dcite:Reviews: + meaning: dcite:Reviews + dcite:IsDerivedFrom: + meaning: dcite:IsDerivedFrom + dcite:IsSourceOf: + meaning: dcite:IsSourceOf + dcite:IsRequiredBy: + meaning: dcite:IsRequiredBy + dcite:Requires: + meaning: dcite:Requires + dcite:Obsoletes: + meaning: dcite:Obsoletes + dcite:IsObsoletedBy: + meaning: dcite:IsObsoletedBy + dcite:IsPublishedIn: + meaning: dcite:IsPublishedIn + ResourceType: + description: An enumeration of resource types + permissible_values: + dcite:Audiovisual: + meaning: dcite:Audiovisual + dcite:Book: + meaning: dcite:Book + dcite:BookChapter: + meaning: dcite:BookChapter + dcite:Collection: + meaning: dcite:Collection + dcite:ComputationalNotebook: + meaning: dcite:ComputationalNotebook + dcite:ConferencePaper: + meaning: dcite:ConferencePaper + dcite:ConferenceProceeding: + meaning: dcite:ConferenceProceeding + dcite:DataPaper: + meaning: dcite:DataPaper + dcite:Dataset: + meaning: dcite:Dataset + dcite:Dissertation: + meaning: dcite:Dissertation + dcite:Event: + meaning: dcite:Event + dcite:Image: + meaning: dcite:Image + dcite:Instrument: + meaning: dcite:Instrument + dcite:InteractiveResource: + meaning: dcite:InteractiveResource + dcite:Journal: + meaning: dcite:Journal + dcite:JournalArticle: + meaning: dcite:JournalArticle + dcite:Model: + meaning: dcite:Model + dcite:OutputManagementPlan: + meaning: dcite:OutputManagementPlan + dcite:PeerReview: + meaning: dcite:PeerReview + dcite:PhysicalObject: + meaning: dcite:PhysicalObject + dcite:Preprint: + meaning: dcite:Preprint + dcite:Report: + meaning: dcite:Report + dcite:Service: + meaning: dcite:Service + dcite:Software: + meaning: dcite:Software + dcite:Sound: + meaning: dcite:Sound + dcite:Standard: + meaning: dcite:Standard + dcite:StudyRegistration: + meaning: dcite:StudyRegistration + dcite:Text: + meaning: dcite:Text + dcite:Workflow: + meaning: dcite:Workflow + dcite:Other: + meaning: dcite:Other + RoleType: + description: An enumeration of roles + permissible_values: + dcite:Author: + meaning: dcite:Author + dcite:Conceptualization: + meaning: dcite:Conceptualization + dcite:ContactPerson: + meaning: dcite:ContactPerson + dcite:DataCollector: + meaning: dcite:DataCollector + dcite:DataCurator: + meaning: dcite:DataCurator + dcite:DataManager: + meaning: dcite:DataManager + dcite:FormalAnalysis: + meaning: dcite:FormalAnalysis + dcite:FundingAcquisition: + meaning: dcite:FundingAcquisition + dcite:Investigation: + meaning: dcite:Investigation + dcite:Maintainer: + meaning: dcite:Maintainer + dcite:Methodology: + meaning: dcite:Methodology + dcite:Producer: + meaning: dcite:Producer + dcite:ProjectLeader: + meaning: dcite:ProjectLeader + dcite:ProjectManager: + meaning: dcite:ProjectManager + dcite:ProjectMember: + meaning: dcite:ProjectMember + dcite:ProjectAdministration: + meaning: dcite:ProjectAdministration + dcite:Researcher: + meaning: dcite:Researcher + dcite:Resources: + meaning: dcite:Resources + dcite:Software: + meaning: dcite:Software + dcite:Supervision: + meaning: dcite:Supervision + dcite:Validation: + meaning: dcite:Validation + dcite:Visualization: + meaning: dcite:Visualization + dcite:Funder: + meaning: dcite:Funder + dcite:Sponsor: + meaning: dcite:Sponsor + dcite:StudyParticipant: + meaning: dcite:StudyParticipant + dcite:Affiliation: + meaning: dcite:Affiliation + dcite:EthicsApproval: + meaning: dcite:EthicsApproval + dcite:Other: + meaning: dcite:Other + +slots: + about: + range: Any + required: false + multivalued: true + any_of: + - range: Disorder + - range: Anatomy + - range: GenericType + access: + notes: + - 'pydantic2linkml: Unable to express the default factory, + at 0xADDRESS>, in LinkML.' + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, .' + range: AccessRequirements + required: false + multivalued: true + acknowledgement: + range: string + required: false + affiliation: + range: Affiliation + required: false + multivalued: true + age: + range: PropertyValue + required: false + alleles: + range: Allele + required: true + multivalued: true + alleleSymbol: + range: string + required: false + alleleType: + range: string + required: false + altName: + range: string + required: false + multivalued: true + anatomy: + range: Anatomy + required: false + multivalued: true + approach: + range: ApproachType + required: false + multivalued: true + assayType: + range: AssayType + required: false + multivalued: true + assetsSummary: + range: AssetsSummary + required: true + awardNumber: + range: string + required: false + blobDateModified: + notes: + - 'pydantic2linkml: Unable to express the microseconds precision constraint of + truncate. LinkML lacks direct support for this restriction.' + range: datetime + required: false + cellLine: + range: string + required: false + citation: + range: string + required: true + contactPoint: + range: ContactPoint + required: false + contentSize: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, >.' + range: Any + required: true + any_of: + - range: string + pattern: ^\s*(\d*\.?\d+)\s*(\w+)? + - range: integer + minimum_value: 0 + contentUrl: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + required: true + multivalued: true + pattern: ^(?i:http|https)://[^\s]+$ + contributor: + range: Any + required: false + multivalued: true + any_of: + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, .' + range: Person + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, .' + range: Organization + dataStandard: + range: StandardsType + required: false + multivalued: true + dataType: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + required: false + pattern: ^(?i:http|https)://[^\s]+$ + dateCreated: + notes: + - 'pydantic2linkml: Unable to express the microseconds precision constraint of + truncate. LinkML lacks direct support for this restriction.' + range: datetime + required: false + dateModified: + notes: + - 'pydantic2linkml: Unable to express the microseconds precision constraint of + truncate. LinkML lacks direct support for this restriction.' + range: datetime + required: false + datePublished: + notes: + - 'pydantic2linkml: Unable to express the microseconds precision constraint of + truncate. LinkML lacks direct support for this restriction.' + range: datetime + required: true + description: + range: string + required: false + digest: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, >.' + - 'pydantic2linkml: Warning: The translation is incomplete. `dict` types are yet + to be supported.' + required: true + disorder: + range: Disorder + required: false + multivalued: true + doi: + ifabsent: string() + range: string + required: false + pattern: ^(10\.\d{4,}/[a-z][-a-z]*\.\d{6}/\d+\.\d+\.\d+|)$ + dxdate: + range: Any + required: false + multivalued: true + any_of: + - range: date + - notes: + - 'pydantic2linkml: Unable to express the microseconds precision constraint + of truncate. LinkML lacks direct support for this restriction.' + range: datetime + email: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, >.' + range: string + required: false + embargoedUntil: + range: date + required: false + encodingFormat: + range: Any + required: true + any_of: + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + pattern: ^(?i:http|https)://[^\s]+$ + - range: string + endDate: + notes: + - 'pydantic2linkml: Unable to express the microseconds precision constraint of + truncate. LinkML lacks direct support for this restriction.' + range: datetime + required: false + ethicsApproval: + range: EthicsApproval + required: false + multivalued: true + genotype: + range: Any + required: false + any_of: + - range: GenotypeInfo + multivalued: true + - range: string + hasMember: + range: string + required: false + multivalued: true + id: + range: string + required: false + identifier: {} + includeInCitation: + ifabsent: 'True' + range: boolean + required: false + keywords: + range: string + required: false + multivalued: true + license: + range: LicenseType + required: false + multivalued: true + locus: + range: Locus + required: true + locusType: + range: string + required: false + manifestLocation: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + required: true + multivalued: true + pattern: ^(?i:http|https)://[^\s]+$ + minimum_cardinality: 1 + maxValue: + notes: + - 'pydantic2linkml: LinkML does not have support for `''+inf''`, `''-inf''`, and + `''NaN''` values. Support for these values is not translated.' + range: float + required: false + measurementTechnique: + range: MeasurementTechniqueType + required: false + multivalued: true + minValue: + notes: + - 'pydantic2linkml: LinkML does not have support for `''+inf''`, `''-inf''`, and + `''NaN''` values. Support for these values is not translated.' + range: float + required: false + name: + range: string + numberOfBytes: + range: integer + required: true + numberOfCells: + range: integer + required: false + numberOfFiles: + range: integer + required: true + numberOfSamples: + range: integer + required: false + numberOfSubjects: + range: integer + required: false + path: + range: string + required: true + propertyID: + range: Any + required: false + any_of: + - range: IdentifierType + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + pattern: ^(?i:http|https)://[^\s]+$ + protocol: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + required: false + multivalued: true + pattern: ^(?i:http|https)://[^\s]+$ + publishedBy: + range: Any + required: true + any_of: + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + pattern: ^(?i:http|https)://[^\s]+$ + - range: PublishActivity + relatedParticipant: + range: RelatedParticipant + required: false + multivalued: true + relatedResource: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, .' + range: Resource + required: false + multivalued: true + relation: + required: true + releaseNotes: + range: string + required: false + repository: + required: false + resourceType: + range: ResourceType + required: false + roleName: + range: RoleType + required: false + multivalued: true + sameAs: + required: false + multivalued: true + sampleType: + range: SampleType + required: true + schemaKey: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, >.' + ifabsent: string(DandiBaseModel) + designates_type: true + range: string + required: false + schemaVersion: + ifabsent: string(0.7.0) + range: string + required: false + sex: + range: SexType + required: false + species: + range: SpeciesType + required: false + startDate: + notes: + - 'pydantic2linkml: Unable to express the microseconds precision constraint of + truncate. LinkML lacks direct support for this restriction.' + range: datetime + required: false + status: + range: AccessType + required: true + strain: + range: StrainType + required: false + studyTarget: + range: string + required: false + multivalued: true + unitText: + range: string + required: false + url: + range: uri + required: false + pattern: ^(?i:http|https)://[^\s]+$ + used: + range: Equipment + required: false + multivalued: true + value: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, >.' + range: Any + required: false + any_of: + - range: Any + - range: Any + multivalued: true + valueReference: + range: PropertyValue + required: false + variableMeasured: + required: false + multivalued: true + vendor: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, .' + range: Organization + required: false + version: + range: string + required: true + wasAssociatedWith: + range: Any + required: false + multivalued: true + any_of: + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, .' + range: Person + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the after validation + function, .' + range: Organization + - range: Software + - range: Agent + wasAttributedTo: + range: Participant + required: false + multivalued: true + wasDerivedFrom: + range: BioSample + required: false + multivalued: true + wasGeneratedBy: + required: false + multivalued: true + +classes: + Any: + description: Any object + class_uri: linkml:Any + AccessRequirements: + description: Information about access options for the dataset + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the AccessRequirements + Pydantic model has changes in value in meta slots: [''ifabsent'', ''notes'', + ''range''] .' + is_a: DandiBaseModel + slots: + - contactPoint + - description + - embargoedUntil + - status + Activity: + description: Information about the Project activity + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Activity Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - description + - endDate + - identifier + - name + - startDate + - used + - wasAssociatedWith + slot_usage: + identifier: + range: string + required: false + name: + notes: + - 'pydantic2linkml: LinkML does not have direct support for max length constraints. + The max length constraint of 150 is incorporated into the pattern of the + slot.' + required: true + pattern: ^(?=.{,150}$) + Affiliation: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Affiliation Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - identifier + - name + slot_usage: + identifier: + range: string + required: false + pattern: ^https://ror.org/[a-z0-9]+$ + name: + required: true + Agent: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Agent Pydantic model + has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] .' + is_a: DandiBaseModel + slots: + - identifier + - name + - url + slot_usage: + identifier: + range: string + required: false + name: + required: true + url: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + Allele: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Allele Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - alleleSymbol + - alleleType + - identifier + slot_usage: + identifier: + range: Any + required: true + any_of: + - range: string + - range: string + multivalued: true + Anatomy: + description: UBERON or other identifier for anatomical part studied + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Anatomy Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + ApproachType: + description: Identifier for approach used + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the ApproachType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + AssayType: + description: OBI based identifier for the assay(s) used + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the AssayType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + Asset: + description: Metadata used to describe an asset on the server. + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the id slot. The + slot representation of the id field in the Asset Pydantic model has changes + in value in meta slots: [''required''] .' + is_a: BareAsset + slots: + - contentUrl + - identifier + slot_usage: + identifier: + range: string + required: true + pattern: ^(?:urn:uuid:)?[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?4[0-9a-fA-F]{3}-?[89abAB][0-9a-fA-F]{3}-?[0-9a-fA-F]{12}$ + AssetsSummary: + description: Summary over assets contained in a dandiset (published or not) + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the AssetsSummary Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - approach + - dataStandard + - measurementTechnique + - numberOfBytes + - numberOfCells + - numberOfFiles + - numberOfSamples + - numberOfSubjects + - species + - variableMeasured + slot_usage: + species: + multivalued: true + variableMeasured: + range: string + BareAsset: + description: Metadata used to describe an asset anywhere (local or server). Derived + from C2M2 (Level 0 and 1) and schema.org + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the access slot. + The slot representation of the access field in the BareAsset Pydantic model + has changes in value in meta slots: [''notes''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the BareAsset Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the wasGeneratedBy + slot. The slot representation of the wasGeneratedBy field in the BareAsset Pydantic + model has changes in value in meta slots: [''range''] .' + is_a: CommonModel + slots: + - approach + - blobDateModified + - contentSize + - dataType + - dateModified + - digest + - encodingFormat + - measurementTechnique + - path + - sameAs + - variableMeasured + - wasAttributedTo + - wasDerivedFrom + slot_usage: + sameAs: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + pattern: ^(?i:http|https)://[^\s]+$ + variableMeasured: + range: PropertyValue + BaseType: + description: Base class for enumerated types + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the BaseType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes''] .' + is_a: DandiBaseModel + slots: + - identifier + - name + slot_usage: + identifier: + range: Any + required: false + any_of: + - notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap + validation function, .wrap_val + at 0xADDRESS>.' + range: uri + pattern: ^(?i:http|https)://[^\s]+$ + - range: string + pattern: ^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\._]+$ + name: + notes: + - 'pydantic2linkml: LinkML does not have direct support for max length constraints. + The max length constraint of 150 is incorporated into the pattern of the + slot.' + required: false + pattern: ^(?=.{,150}$) + BioSample: + description: Description of the sample that was studied + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the BioSample Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - anatomy + - assayType + - hasMember + - identifier + - sameAs + - sampleType + - wasAttributedTo + - wasDerivedFrom + slot_usage: + identifier: + range: string + required: true + sameAs: + range: string + CommonModel: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the CommonModel Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes''] .' + is_a: DandiBaseModel + slots: + - about + - access + - acknowledgement + - contributor + - description + - ethicsApproval + - keywords + - license + - name + - protocol + - relatedResource + - repository + - schemaVersion + - studyTarget + - url + - wasGeneratedBy + slot_usage: + name: + notes: + - 'pydantic2linkml: LinkML does not have direct support for max length constraints. + The max length constraint of 150 is incorporated into the pattern of the + slot.' + required: false + pattern: ^(?=.{,150}$) + repository: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + range: uri + pattern: ^(?i:http|https)://[^\s]+$ + url: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + wasGeneratedBy: + range: Activity + ContactPoint: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the ContactPoint Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - email + - url + slot_usage: + url: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + Contributor: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Contributor Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - awardNumber + - email + - identifier + - includeInCitation + - name + - roleName + - url + slot_usage: + identifier: + range: string + required: false + name: + required: false + url: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + DandiBaseModel: + slots: + - id + - schemaKey + Dandiset: + description: A body of structured information describing a DANDI dataset. + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the contributor + slot. The slot representation of the contributor field in the Dandiset Pydantic + model has changes in value in meta slots: [''required''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the description + slot. The slot representation of the description field in the Dandiset Pydantic + model has changes in value in meta slots: [''required''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the id slot. The + slot representation of the id field in the Dandiset Pydantic model has changes + in value in meta slots: [''required''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the license slot. + The slot representation of the license field in the Dandiset Pydantic model + has changes in value in meta slots: [''required''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the name slot. + The slot representation of the name field in the Dandiset Pydantic model has + changes in value in meta slots: [''required''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Dandiset Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the wasGeneratedBy + slot. The slot representation of the wasGeneratedBy field in the Dandiset Pydantic + model has changes in value in meta slots: [''range''] .' + is_a: CommonModel + slots: + - assetsSummary + - citation + - dateCreated + - dateModified + - identifier + - manifestLocation + - sameAs + - version + slot_usage: + identifier: + range: string + required: true + pattern: ^[A-Z][-A-Z]*:\d{6}$ + sameAs: + range: string + pattern: ^dandi://[A-Z][-A-Z]*/\d{6}(@(draft|\d+\.\d+\.\d+))?(/\S+)?$ + Disorder: + description: Biolink, SNOMED, or other identifier for disorder studied + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Disorder Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + slots: + - dxdate + Equipment: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Equipment Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - description + - identifier + - name + slot_usage: + identifier: + range: string + required: false + name: + notes: + - 'pydantic2linkml: LinkML does not have direct support for max length constraints. + The max length constraint of 150 is incorporated into the pattern of the + slot.' + required: true + pattern: ^(?=.{,150}$) + EthicsApproval: + description: Information about ethics committee approval for project + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the EthicsApproval Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - contactPoint + - identifier + slot_usage: + identifier: + range: string + required: true + GenericType: + description: An object to capture any type for about + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the GenericType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + GenotypeInfo: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the GenotypeInfo Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - alleles + - locus + - wasGeneratedBy + slot_usage: + wasGeneratedBy: + range: Session + Locus: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Locus Pydantic model + has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] .' + is_a: DandiBaseModel + slots: + - identifier + - locusType + slot_usage: + identifier: + range: Any + required: true + any_of: + - range: string + - range: string + multivalued: true + MeasurementTechniqueType: + description: Identifier for measurement technique used + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the MeasurementTechniqueType + Pydantic model has changes in value in meta slots: [''ifabsent'', ''notes'', + ''range''] .' + is_a: BaseType + Organization: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the includeInCitation + slot. The slot representation of the includeInCitation field in the Organization + Pydantic model has changes in value in meta slots: [''ifabsent''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Organization Pydantic + model has changes in value in meta slots: [''any_of'', ''ifabsent'', ''notes''] + .' + is_a: Contributor + slots: + - contactPoint + slot_usage: + contactPoint: + multivalued: true + identifier: + pattern: ^https://ror.org/[a-z0-9]+$ + Participant: + description: Description about the Participant or Subject studied. The Participant + or Subject can be any individual or synthesized Agent. The properties of the + Participant or Subject refers to information at the timepoint when the Participant + or Subject engaged in the production of data being described. + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Participant Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - age + - altName + - cellLine + - disorder + - genotype + - identifier + - relatedParticipant + - sameAs + - sex + - species + - strain + - vendor + slot_usage: + identifier: + range: string + required: true + sameAs: + range: string + Person: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the name slot. + The slot representation of the name field in the Person Pydantic model has changes + in value in meta slots: [''required''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Person Pydantic + model has changes in value in meta slots: [''any_of'', ''ifabsent'', ''notes''] + .' + is_a: Contributor + slots: + - affiliation + slot_usage: + identifier: + pattern: ^\d{4}-\d{4}-\d{4}-(\d{3}X|\d{4})$ + Project: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Project Pydantic + model has changes in value in meta slots: [''any_of'', ''ifabsent'', ''notes''] + .' + is_a: Activity + PropertyValue: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the PropertyValue Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - maxValue + - minValue + - propertyID + - unitText + - value + - valueReference + Publishable: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Publishable Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - datePublished + - publishedBy + PublishActivity: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the PublishActivity + Pydantic model has changes in value in meta slots: [''any_of'', ''ifabsent'', + ''notes''] .' + is_a: Activity + PublishedAsset: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the PublishedAsset Pydantic + model has changes in value in meta slots: [''notes''] .' + - 'pydantic2linkml: Warning: LinkML does not support multiple inheritance. Publishable + is not specified as a parent, through the `is_a` meta slot, but as a mixin.' + is_a: Asset + mixins: + - Publishable + slot_usage: + id: + pattern: ^dandiasset:[a-f0-9]{8}[-]*[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{12}$ + PublishedDandiset: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the id slot. The + slot representation of the id field in the PublishedDandiset Pydantic model + has changes in value in meta slots: [''pattern''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the PublishedDandiset + Pydantic model has changes in value in meta slots: [''notes''] .' + - 'pydantic2linkml: Impossible to generate slot usage entry for the url slot. + The slot representation of the url field in the PublishedDandiset Pydantic model + has changes in value in meta slots: [''notes'', ''required''] .' + - 'pydantic2linkml: Warning: LinkML does not support multiple inheritance. Publishable + is not specified as a parent, through the `is_a` meta slot, but as a mixin.' + is_a: Dandiset + mixins: + - Publishable + slots: + - doi + - releaseNotes + RelatedParticipant: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the RelatedParticipant + Pydantic model has changes in value in meta slots: [''ifabsent'', ''notes'', + ''range''] .' + is_a: DandiBaseModel + slots: + - identifier + - name + - relation + - url + slot_usage: + identifier: + range: string + required: false + name: + required: false + relation: + range: ParticipantRelationType + url: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + Resource: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Resource Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - identifier + - name + - relation + - repository + - resourceType + - url + slot_usage: + identifier: + range: string + required: false + name: + required: false + relation: + range: RelationType + repository: + range: string + url: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + SampleType: + description: OBI based identifier for the sample type used + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the SampleType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + Session: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Session Pydantic + model has changes in value in meta slots: [''any_of'', ''ifabsent'', ''notes''] + .' + is_a: Activity + SexType: + description: Identifier for the sex of the sample + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the SexType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + Software: + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the Software Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: DandiBaseModel + slots: + - identifier + - name + - url + - version + slot_usage: + identifier: + range: string + required: false + pattern: ^RRID:.* + name: + required: true + url: + notes: + - 'pydantic2linkml: Unable to translate the logic contained in the wrap validation + function, .wrap_val + at 0xADDRESS>.' + SpeciesType: + description: Identifier for species of the sample + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the SpeciesType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + StandardsType: + description: Identifier for data standard used + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the StandardsType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType + StrainType: + description: Identifier for the strain of the sample + notes: + - 'pydantic2linkml: Impossible to generate slot usage entry for the schemaKey + slot. The slot representation of the schemaKey field in the StrainType Pydantic + model has changes in value in meta slots: [''ifabsent'', ''notes'', ''range''] + .' + is_a: BaseType diff --git a/dandischema/models_importstab.py b/dandischema/models_importstab.py deleted file mode 100644 index 6815ca49..00000000 --- a/dandischema/models_importstab.py +++ /dev/null @@ -1,9 +0,0 @@ -from .models_linkml import * # noqa: F401,F403 -from .models_orig import DANDI_INSTANCE_URL_PATTERN # noqa: F401 - -# TODO: temporary imports of consts etc which might need to be 'redone' -# so we do not duplicate them - - -# TODO: do the extra tune ups like linking extra validations etc, -# potentially copied from models_orig.py diff --git a/dandischema/models_linkml.py b/dandischema/models_linkml.py new file mode 100644 index 00000000..03fdb9f7 --- /dev/null +++ b/dandischema/models_linkml.py @@ -0,0 +1,7558 @@ +from __future__ import annotations + +from datetime import date, datetime, time +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, Literal, Optional, Union + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + SerializationInfo, + SerializerFunctionWrapHandler, + field_validator, + model_serializer, +) + +metamodel_version = "1.7.0" +version = "0.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + serialize_by_alias=True, + validate_by_name=True, + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + + +class LinkMLMeta(RootModel): + root: dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "default_prefix": "dandi", + "default_range": "string", + "id": "https://schema.dandiarchive.org/s/dandi/v0.7", + "imports": ["linkml:types"], + "name": "dandi-schema", + "prefixes": { + "DANDI": { + "prefix_prefix": "DANDI", + "prefix_reference": "http://dandiarchive.org/dandiset/", + }, + "ORCID": { + "prefix_prefix": "ORCID", + "prefix_reference": "https://orcid.org/", + }, + "PATO": { + "prefix_prefix": "PATO", + "prefix_reference": "http://purl.obolibrary.org/obo/PATO_", + }, + "ROR": {"prefix_prefix": "ROR", "prefix_reference": "https://ror.org/"}, + "RRID": { + "prefix_prefix": "RRID", + "prefix_reference": "https://scicrunch.org/resolver/RRID:", + }, + "dandi": { + "prefix_prefix": "dandi", + "prefix_reference": "http://schema.dandiarchive.org/", + }, + "dandiasset": { + "prefix_prefix": "dandiasset", + "prefix_reference": "http://dandiarchive.org/asset/", + }, + "dcite": { + "prefix_prefix": "dcite", + "prefix_reference": "http://schema.dandiarchive.org/datacite/", + }, + "dct": { + "prefix_prefix": "dct", + "prefix_reference": "http://purl.org/dc/terms/", + }, + "linkml": { + "prefix_prefix": "linkml", + "prefix_reference": "https://w3id.org/linkml/", + }, + "nidm": { + "prefix_prefix": "nidm", + "prefix_reference": "http://purl.org/nidash/nidm#", + }, + "owl": { + "prefix_prefix": "owl", + "prefix_reference": "http://www.w3.org/2002/07/owl#", + }, + "pav": {"prefix_prefix": "pav", "prefix_reference": "http://purl.org/pav/"}, + "prov": { + "prefix_prefix": "prov", + "prefix_reference": "http://www.w3.org/ns/prov#", + }, + "rdf": { + "prefix_prefix": "rdf", + "prefix_reference": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + }, + "rdfa": { + "prefix_prefix": "rdfa", + "prefix_reference": "http://www.w3.org/ns/rdfa#", + }, + "rdfs": { + "prefix_prefix": "rdfs", + "prefix_reference": "http://www.w3.org/2000/01/rdf-schema#", + }, + "rs": { + "prefix_prefix": "rs", + "prefix_reference": "http://schema.repronim.org/", + }, + "schema": { + "prefix_prefix": "schema", + "prefix_reference": "http://schema.org/", + }, + "skos": { + "prefix_prefix": "skos", + "prefix_reference": "http://www.w3.org/2004/02/skos/core#", + }, + "spdx": { + "prefix_prefix": "spdx", + "prefix_reference": "http://spdx.org/licenses/", + }, + "uuid": { + "prefix_prefix": "uuid", + "prefix_reference": "http://uuid.repronim.org/", + }, + "xsd": { + "prefix_prefix": "xsd", + "prefix_reference": "http://www.w3.org/2001/XMLSchema#", + }, + }, + "source_file": "dandischema/models.yaml", + "status": "eunal:concept-status/DRAFT", + } +) + + +class AccessType(str, Enum): + """ + An enumeration of access status options + """ + + dandiCOLONOpenAccess = "dandi:OpenAccess" + dandiCOLONEmbargoedAccess = "dandi:EmbargoedAccess" + + +class AgeReferenceType(str, Enum): + """ + An enumeration of age reference + """ + + dandiCOLONBirthReference = "dandi:BirthReference" + dandiCOLONGestationalReference = "dandi:GestationalReference" + + +class DigestType(str, Enum): + """ + An enumeration of checksum types + """ + + dandiCOLONmd5 = "dandi:md5" + dandiCOLONsha1 = "dandi:sha1" + dandiCOLONsha2_256 = "dandi:sha2-256" + dandiCOLONsha3_256 = "dandi:sha3-256" + dandiCOLONblake2b_256 = "dandi:blake2b-256" + dandiCOLONblake3 = "dandi:blake3" + dandiCOLONdandi_etag = "dandi:dandi-etag" + dandiCOLONdandi_zarr_checksum = "dandi:dandi-zarr-checksum" + + +class IdentifierType(str, Enum): + """ + An enumeration of identifiers + """ + + dandiCOLONdoi = "dandi:doi" + dandiCOLONorcid = "dandi:orcid" + dandiCOLONror = "dandi:ror" + dandiCOLONdandi = "dandi:dandi" + dandiCOLONrrid = "dandi:rrid" + + +class LicenseType(str, Enum): + """ + An enumeration. + """ + + spdxCOLONCC0_1FULL_STOP0 = "spdx:CC0-1.0" + spdxCOLONCC_BY_4FULL_STOP0 = "spdx:CC-BY-4.0" + + +class ParticipantRelationType(str, Enum): + """ + An enumeration of participant relations + """ + + dandiCOLONisChildOf = "dandi:isChildOf" + dandiCOLONisParentOf = "dandi:isParentOf" + dandiCOLONisSiblingOf = "dandi:isSiblingOf" + dandiCOLONisMonozygoticTwinOf = "dandi:isMonozygoticTwinOf" + dandiCOLONisDizygoticTwinOf = "dandi:isDizygoticTwinOf" + + +class RelationType(str, Enum): + """ + An enumeration of resource relations + """ + + IsCitedBy = "dcite:IsCitedBy" + Cites = "dcite:Cites" + IsSupplementTo = "dcite:IsSupplementTo" + IsSupplementedBy = "dcite:IsSupplementedBy" + IsContinuedBy = "dcite:IsContinuedBy" + Continues = "dcite:Continues" + Describes = "dcite:Describes" + IsDescribedBy = "dcite:IsDescribedBy" + HasMetadata = "dcite:HasMetadata" + IsMetadataFor = "dcite:IsMetadataFor" + HasVersion = "dcite:HasVersion" + IsVersionOf = "dcite:IsVersionOf" + IsNewVersionOf = "dcite:IsNewVersionOf" + IsPreviousVersionOf = "dcite:IsPreviousVersionOf" + IsPartOf = "dcite:IsPartOf" + HasPart = "dcite:HasPart" + IsReferencedBy = "dcite:IsReferencedBy" + References = "dcite:References" + IsDocumentedBy = "dcite:IsDocumentedBy" + Documents = "dcite:Documents" + IsCompiledBy = "dcite:IsCompiledBy" + Compiles = "dcite:Compiles" + IsVariantFormOf = "dcite:IsVariantFormOf" + IsOriginalFormOf = "dcite:IsOriginalFormOf" + IsIdenticalTo = "dcite:IsIdenticalTo" + IsReviewedBy = "dcite:IsReviewedBy" + Reviews = "dcite:Reviews" + IsDerivedFrom = "dcite:IsDerivedFrom" + IsSourceOf = "dcite:IsSourceOf" + IsRequiredBy = "dcite:IsRequiredBy" + Requires = "dcite:Requires" + Obsoletes = "dcite:Obsoletes" + IsObsoletedBy = "dcite:IsObsoletedBy" + IsPublishedIn = "dcite:IsPublishedIn" + + +class ResourceType(str, Enum): + """ + An enumeration of resource types + """ + + Audiovisual = "dcite:Audiovisual" + Book = "dcite:Book" + BookChapter = "dcite:BookChapter" + Collection = "dcite:Collection" + ComputationalNotebook = "dcite:ComputationalNotebook" + ConferencePaper = "dcite:ConferencePaper" + ConferenceProceeding = "dcite:ConferenceProceeding" + DataPaper = "dcite:DataPaper" + Dataset = "dcite:Dataset" + Dissertation = "dcite:Dissertation" + Event = "dcite:Event" + Image = "dcite:Image" + Instrument = "dcite:Instrument" + InteractiveResource = "dcite:InteractiveResource" + Journal = "dcite:Journal" + JournalArticle = "dcite:JournalArticle" + Model = "dcite:Model" + OutputManagementPlan = "dcite:OutputManagementPlan" + PeerReview = "dcite:PeerReview" + PhysicalObject = "dcite:PhysicalObject" + Preprint = "dcite:Preprint" + Report = "dcite:Report" + Service = "dcite:Service" + Software = "dcite:Software" + Sound = "dcite:Sound" + Standard = "dcite:Standard" + StudyRegistration = "dcite:StudyRegistration" + Text = "dcite:Text" + Workflow = "dcite:Workflow" + Other = "dcite:Other" + + +class RoleType(str, Enum): + """ + An enumeration of roles + """ + + Author = "dcite:Author" + Conceptualization = "dcite:Conceptualization" + ContactPerson = "dcite:ContactPerson" + DataCollector = "dcite:DataCollector" + DataCurator = "dcite:DataCurator" + DataManager = "dcite:DataManager" + FormalAnalysis = "dcite:FormalAnalysis" + FundingAcquisition = "dcite:FundingAcquisition" + Investigation = "dcite:Investigation" + Maintainer = "dcite:Maintainer" + Methodology = "dcite:Methodology" + Producer = "dcite:Producer" + ProjectLeader = "dcite:ProjectLeader" + ProjectManager = "dcite:ProjectManager" + ProjectMember = "dcite:ProjectMember" + ProjectAdministration = "dcite:ProjectAdministration" + Researcher = "dcite:Researcher" + Resources = "dcite:Resources" + Software = "dcite:Software" + Supervision = "dcite:Supervision" + Validation = "dcite:Validation" + Visualization = "dcite:Visualization" + Funder = "dcite:Funder" + Sponsor = "dcite:Sponsor" + StudyParticipant = "dcite:StudyParticipant" + Affiliation = "dcite:Affiliation" + EthicsApproval = "dcite:EthicsApproval" + Other = "dcite:Other" + + +class DandiBaseModel(ConfiguredBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7"} + ) + + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["DandiBaseModel"] = Field( + default="DandiBaseModel", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class AccessRequirements(DandiBaseModel): + """ + Information about access options for the dataset + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the AccessRequirements Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + } + ) + + contactPoint: Optional[ContactPoint] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["AccessRequirements", "EthicsApproval", "Organization"] + } + }, + ) + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + embargoedUntil: Optional[date] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["AccessRequirements"]}}, + ) + status: AccessType = Field( + default=..., + json_schema_extra={"linkml_meta": {"domain_of": ["AccessRequirements"]}}, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["AccessRequirements"] = Field( + default="AccessRequirements", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class Activity(DandiBaseModel): + """ + Information about the Project activity + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Activity Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": False, + }, + "name": { + "name": "name", + "notes": [ + "pydantic2linkml: LinkML does not have " + "direct support for max length constraints. " + "The max length constraint of 150 is " + "incorporated into the pattern of the " + "slot." + ], + "pattern": "^(?=.{,150}$)", + "required": True, + }, + }, + } + ) + + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + endDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + startDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + used: Optional[list[Equipment]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Activity"]}} + ) + wasAssociatedWith: Optional[list[Union[Agent, Organization, Person, Software]]] = ( + Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + {"range": "Software"}, + {"range": "Agent"}, + ], + "domain_of": ["Activity"], + } + }, + ) + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Activity"] = Field( + default="Activity", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class Affiliation(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Affiliation Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "pattern": "^https://ror.org/[a-z0-9]+$", + "range": "string", + "required": False, + }, + "name": {"name": "name", "required": True}, + }, + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Affiliation"] = Field( + default="Affiliation", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile(r"^https://ror.org/[a-z0-9]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + +class Agent(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Agent Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": False, + }, + "name": {"name": "name", "required": True}, + "url": { + "name": "url", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + }, + }, + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Agent"] = Field( + default="Agent", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Allele(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Allele Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "any_of": [ + {"range": "string"}, + {"multivalued": True, "range": "string"}, + ], + "name": "identifier", + "range": "Any", + "required": True, + } + }, + } + ) + + alleleSymbol: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Allele"]}} + ) + alleleType: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Allele"]}} + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "string"}, + {"multivalued": True, "range": "string"}, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Allele"] = Field( + default="Allele", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class AssetsSummary(DandiBaseModel): + """ + Summary over assets contained in a dandiset (published or not) + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the AssetsSummary Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "species": {"multivalued": True, "name": "species"}, + "variableMeasured": {"name": "variableMeasured", "range": "string"}, + }, + } + ) + + approach: Optional[list[ApproachType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + dataStandard: Optional[list[StandardsType]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["AssetsSummary"]}}, + ) + measurementTechnique: Optional[list[MeasurementTechniqueType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + numberOfBytes: int = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["AssetsSummary"]}} + ) + numberOfCells: Optional[int] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["AssetsSummary"]}}, + ) + numberOfFiles: int = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["AssetsSummary"]}} + ) + numberOfSamples: Optional[int] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["AssetsSummary"]}}, + ) + numberOfSubjects: Optional[int] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["AssetsSummary"]}}, + ) + species: Optional[list[SpeciesType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "Participant"]} + }, + ) + variableMeasured: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["AssetsSummary"] = Field( + default="AssetsSummary", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class BaseType(DandiBaseModel): + """ + Base class for enumerated types + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the BaseType Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes'] ." + ], + "slot_usage": { + "identifier": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable " + "to translate the logic " + "contained in the wrap " + "validation function, " + ".wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "name": "identifier", + "range": "Any", + "required": False, + }, + "name": { + "name": "name", + "notes": [ + "pydantic2linkml: LinkML does not have " + "direct support for max length constraints. " + "The max length constraint of 150 is " + "incorporated into the pattern of the " + "slot." + ], + "pattern": "^(?=.{,150}$)", + "required": False, + }, + }, + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["BaseType"] = Field( + default="BaseType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class Anatomy(BaseType): + """ + UBERON or other identifier for anatomical part studied + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Anatomy Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Anatomy"] = Field( + default="Anatomy", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class ApproachType(BaseType): + """ + Identifier for approach used + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the ApproachType Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["ApproachType"] = Field( + default="ApproachType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class AssayType(BaseType): + """ + OBI based identifier for the assay(s) used + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the AssayType Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["AssayType"] = Field( + default="AssayType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class BioSample(DandiBaseModel): + """ + Description of the sample that was studied + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the BioSample Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": True, + }, + "sameAs": {"name": "sameAs", "range": "string"}, + }, + } + ) + + anatomy: Optional[list[Anatomy]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["BioSample"]}} + ) + assayType: Optional[list[AssayType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["BioSample"]}} + ) + hasMember: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["BioSample"]}} + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + sameAs: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "BioSample", "Dandiset", "Participant"] + } + }, + ) + sampleType: SampleType = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["BioSample"]}} + ) + wasAttributedTo: Optional[list[Participant]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + wasDerivedFrom: Optional[list[BioSample]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["BioSample"] = Field( + default="BioSample", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class CommonModel(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the CommonModel Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes'] ." + ], + "slot_usage": { + "name": { + "name": "name", + "notes": [ + "pydantic2linkml: LinkML does not have " + "direct support for max length constraints. " + "The max length constraint of 150 is " + "incorporated into the pattern of the " + "slot." + ], + "pattern": "^(?=.{,150}$)", + "required": False, + }, + "repository": { + "name": "repository", + "notes": [ + "pydantic2linkml: Unable to translate " + "the logic contained in the wrap " + "validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + "url": { + "name": "url", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + }, + "wasGeneratedBy": {"name": "wasGeneratedBy", "range": "Activity"}, + }, + } + ) + + about: Optional[list[Union[Anatomy, Disorder, GenericType]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "Disorder"}, + {"range": "Anatomy"}, + {"range": "GenericType"}, + ], + "domain_of": ["CommonModel"], + } + }, + ) + access: Optional[list[AccessRequirements]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to express the default factory, at 0xADDRESS>, in LinkML.", + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, .", + ], + } + }, + ) + acknowledgement: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + contributor: Optional[list[Union[Organization, Person]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + ], + "domain_of": ["CommonModel"], + } + }, + ) + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + ethicsApproval: Optional[list[EthicsApproval]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + keywords: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + license: Optional[list[LicenseType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + protocol: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + relatedResource: Optional[list[Resource]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, ." + ], + } + }, + ) + repository: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel", "Resource"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + schemaVersion: Optional[str] = Field( + default="0.7.0", + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel"], "ifabsent": "string(0.7.0)"} + }, + ) + studyTarget: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + wasGeneratedBy: Optional[ + list[Union[Activity, Project, PublishActivity, Session]] + ] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel", "GenotypeInfo"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["CommonModel"] = Field( + default="CommonModel", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("protocol") + def pattern_protocol(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid protocol format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid protocol format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("repository") + def pattern_repository(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid repository format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid repository format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class BareAsset(CommonModel): + """ + Metadata used to describe an asset anywhere (local or server). Derived from C2M2 (Level 0 and 1) and schema.org + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "access slot. The slot representation of the access field in the " + "BareAsset Pydantic model has changes in value in meta slots: " + "['notes'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the BareAsset Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "wasGeneratedBy slot. The slot representation of the wasGeneratedBy " + "field in the BareAsset Pydantic model has changes in value in meta " + "slots: ['range'] .", + ], + "slot_usage": { + "sameAs": { + "name": "sameAs", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + "variableMeasured": { + "name": "variableMeasured", + "range": "PropertyValue", + }, + }, + } + ) + + approach: Optional[list[ApproachType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + blobDateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + contentSize: Union[int, str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"pattern": "^\\s*(\\d*\\.?\\d+)\\s*(\\w+)?", "range": "string"}, + {"minimum_value": 0, "range": "integer"}, + ], + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + dataType: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + dateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "Dandiset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + digest: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >.", + "pydantic2linkml: Warning: The translation is incomplete. `dict` " + "types are yet to be supported.", + ], + } + }, + ) + encodingFormat: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + {"range": "string"}, + ], + "domain_of": ["BareAsset"], + } + }, + ) + measurementTechnique: Optional[list[MeasurementTechniqueType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + path: str = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset"]}} + ) + sameAs: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "BioSample", "Dandiset", "Participant"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + variableMeasured: Optional[list[PropertyValue]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + wasAttributedTo: Optional[list[Participant]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + wasDerivedFrom: Optional[list[BioSample]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + about: Optional[list[Union[Anatomy, Disorder, GenericType]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "Disorder"}, + {"range": "Anatomy"}, + {"range": "GenericType"}, + ], + "domain_of": ["CommonModel"], + } + }, + ) + access: Optional[list[AccessRequirements]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to express the default factory, at 0xADDRESS>, in LinkML.", + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, .", + ], + } + }, + ) + acknowledgement: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + contributor: Optional[list[Union[Organization, Person]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + ], + "domain_of": ["CommonModel"], + } + }, + ) + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + ethicsApproval: Optional[list[EthicsApproval]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + keywords: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + license: Optional[list[LicenseType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + protocol: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + relatedResource: Optional[list[Resource]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, ." + ], + } + }, + ) + repository: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel", "Resource"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + schemaVersion: Optional[str] = Field( + default="0.7.0", + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel"], "ifabsent": "string(0.7.0)"} + }, + ) + studyTarget: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + wasGeneratedBy: Optional[ + list[Union[Activity, Project, PublishActivity, Session]] + ] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel", "GenotypeInfo"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["BareAsset"] = Field( + default="BareAsset", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("dataType") + def pattern_dataType(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid dataType format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid dataType format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("sameAs") + def pattern_sameAs(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid sameAs format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid sameAs format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("protocol") + def pattern_protocol(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid protocol format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid protocol format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("repository") + def pattern_repository(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid repository format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid repository format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Asset(BareAsset): + """ + Metadata used to describe an asset on the server. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "id slot. The slot representation of the id field in the Asset " + "Pydantic model has changes in value in meta slots: ['required'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "pattern": "^(?:urn:uuid:)?[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?4[0-9a-fA-F]{3}-?[89abAB][0-9a-fA-F]{3}-?[0-9a-fA-F]{12}$", + "range": "string", + "required": True, + } + }, + } + ) + + contentUrl: list[str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Asset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + approach: Optional[list[ApproachType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + blobDateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + contentSize: Union[int, str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"pattern": "^\\s*(\\d*\\.?\\d+)\\s*(\\w+)?", "range": "string"}, + {"minimum_value": 0, "range": "integer"}, + ], + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + dataType: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + dateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "Dandiset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + digest: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >.", + "pydantic2linkml: Warning: The translation is incomplete. `dict` " + "types are yet to be supported.", + ], + } + }, + ) + encodingFormat: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + {"range": "string"}, + ], + "domain_of": ["BareAsset"], + } + }, + ) + measurementTechnique: Optional[list[MeasurementTechniqueType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + path: str = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset"]}} + ) + sameAs: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "BioSample", "Dandiset", "Participant"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + variableMeasured: Optional[list[PropertyValue]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + wasAttributedTo: Optional[list[Participant]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + wasDerivedFrom: Optional[list[BioSample]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + about: Optional[list[Union[Anatomy, Disorder, GenericType]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "Disorder"}, + {"range": "Anatomy"}, + {"range": "GenericType"}, + ], + "domain_of": ["CommonModel"], + } + }, + ) + access: Optional[list[AccessRequirements]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to express the default factory, at 0xADDRESS>, in LinkML.", + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, .", + ], + } + }, + ) + acknowledgement: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + contributor: Optional[list[Union[Organization, Person]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + ], + "domain_of": ["CommonModel"], + } + }, + ) + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + ethicsApproval: Optional[list[EthicsApproval]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + keywords: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + license: Optional[list[LicenseType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + protocol: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + relatedResource: Optional[list[Resource]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, ." + ], + } + }, + ) + repository: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel", "Resource"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + schemaVersion: Optional[str] = Field( + default="0.7.0", + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel"], "ifabsent": "string(0.7.0)"} + }, + ) + studyTarget: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + wasGeneratedBy: Optional[ + list[Union[Activity, Project, PublishActivity, Session]] + ] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel", "GenotypeInfo"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Asset"] = Field( + default="Asset", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("contentUrl") + def pattern_contentUrl(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid contentUrl format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid contentUrl format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile( + r"^(?:urn:uuid:)?[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?4[0-9a-fA-F]{3}-?[89abAB][0-9a-fA-F]{3}-?[0-9a-fA-F]{12}$" + ) + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("dataType") + def pattern_dataType(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid dataType format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid dataType format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("sameAs") + def pattern_sameAs(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid sameAs format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid sameAs format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("protocol") + def pattern_protocol(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid protocol format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid protocol format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("repository") + def pattern_repository(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid repository format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid repository format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class ContactPoint(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the ContactPoint Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "url": { + "name": "url", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + } + }, + } + ) + + email: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["ContactPoint", "Contributor"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["ContactPoint"] = Field( + default="ContactPoint", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Contributor(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Contributor Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": False, + }, + "name": {"name": "name", "required": False}, + "url": { + "name": "url", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + }, + }, + } + ) + + awardNumber: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Contributor"]}} + ) + email: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["ContactPoint", "Contributor"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + includeInCitation: Optional[bool] = Field( + default=True, + json_schema_extra={ + "linkml_meta": {"domain_of": ["Contributor"], "ifabsent": "True"} + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + roleName: Optional[list[RoleType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Contributor"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Contributor"] = Field( + default="Contributor", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Dandiset(CommonModel): + """ + A body of structured information describing a DANDI dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "contributor slot. The slot representation of the contributor field " + "in the Dandiset Pydantic model has changes in value in meta slots: " + "['required'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "description slot. The slot representation of the description field " + "in the Dandiset Pydantic model has changes in value in meta slots: " + "['required'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "id slot. The slot representation of the id field in the Dandiset " + "Pydantic model has changes in value in meta slots: ['required'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "license slot. The slot representation of the license field in the " + "Dandiset Pydantic model has changes in value in meta slots: " + "['required'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "name slot. The slot representation of the name field in the " + "Dandiset Pydantic model has changes in value in meta slots: " + "['required'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Dandiset Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "wasGeneratedBy slot. The slot representation of the wasGeneratedBy " + "field in the Dandiset Pydantic model has changes in value in meta " + "slots: ['range'] .", + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "pattern": "^[A-Z][-A-Z]*:\\d{6}$", + "range": "string", + "required": True, + }, + "sameAs": { + "name": "sameAs", + "pattern": "^dandi://[A-Z][-A-Z]*/\\d{6}(@(draft|\\d+\\.\\d+\\.\\d+))?(/\\S+)?$", + "range": "string", + }, + }, + } + ) + + assetsSummary: AssetsSummary = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["Dandiset"]}} + ) + citation: str = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["Dandiset"]}} + ) + dateCreated: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Dandiset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + dateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "Dandiset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + manifestLocation: list[str] = Field( + default=..., + min_length=1, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Dandiset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + sameAs: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "BioSample", "Dandiset", "Participant"] + } + }, + ) + version: str = Field( + default=..., + json_schema_extra={"linkml_meta": {"domain_of": ["Dandiset", "Software"]}}, + ) + about: Optional[list[Union[Anatomy, Disorder, GenericType]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "Disorder"}, + {"range": "Anatomy"}, + {"range": "GenericType"}, + ], + "domain_of": ["CommonModel"], + } + }, + ) + access: Optional[list[AccessRequirements]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to express the default factory, at 0xADDRESS>, in LinkML.", + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, .", + ], + } + }, + ) + acknowledgement: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + contributor: Optional[list[Union[Organization, Person]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + ], + "domain_of": ["CommonModel"], + } + }, + ) + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + ethicsApproval: Optional[list[EthicsApproval]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + keywords: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + license: Optional[list[LicenseType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + protocol: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + relatedResource: Optional[list[Resource]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, ." + ], + } + }, + ) + repository: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel", "Resource"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + schemaVersion: Optional[str] = Field( + default="0.7.0", + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel"], "ifabsent": "string(0.7.0)"} + }, + ) + studyTarget: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + wasGeneratedBy: Optional[ + list[Union[Activity, Project, PublishActivity, Session]] + ] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel", "GenotypeInfo"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Dandiset"] = Field( + default="Dandiset", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile(r"^[A-Z][-A-Z]*:\d{6}$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("manifestLocation") + def pattern_manifestLocation(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid manifestLocation format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid manifestLocation format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("sameAs") + def pattern_sameAs(cls, v): + pattern = re.compile( + r"^dandi://[A-Z][-A-Z]*/\d{6}(@(draft|\d+\.\d+\.\d+))?(/\S+)?$" + ) + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid sameAs format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid sameAs format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("protocol") + def pattern_protocol(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid protocol format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid protocol format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("repository") + def pattern_repository(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid repository format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid repository format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Disorder(BaseType): + """ + Biolink, SNOMED, or other identifier for disorder studied + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Disorder Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + dxdate: Optional[list[Union[date, datetime]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "date"}, + { + "notes": [ + "pydantic2linkml: Unable to express the microseconds " + "precision constraint of truncate. LinkML lacks direct " + "support for this restriction." + ], + "range": "datetime", + }, + ], + "domain_of": ["Disorder"], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Disorder"] = Field( + default="Disorder", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class Equipment(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Equipment Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": False, + }, + "name": { + "name": "name", + "notes": [ + "pydantic2linkml: LinkML does not have " + "direct support for max length constraints. " + "The max length constraint of 150 is " + "incorporated into the pattern of the " + "slot." + ], + "pattern": "^(?=.{,150}$)", + "required": True, + }, + }, + } + ) + + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Equipment"] = Field( + default="Equipment", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class EthicsApproval(DandiBaseModel): + """ + Information about ethics committee approval for project + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the EthicsApproval Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": True, + } + }, + } + ) + + contactPoint: Optional[ContactPoint] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["AccessRequirements", "EthicsApproval", "Organization"] + } + }, + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["EthicsApproval"] = Field( + default="EthicsApproval", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class GenericType(BaseType): + """ + An object to capture any type for about + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the GenericType Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["GenericType"] = Field( + default="GenericType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class GenotypeInfo(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the GenotypeInfo Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "wasGeneratedBy": {"name": "wasGeneratedBy", "range": "Session"} + }, + } + ) + + alleles: list[Allele] = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["GenotypeInfo"]}} + ) + locus: Locus = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["GenotypeInfo"]}} + ) + wasGeneratedBy: Optional[list[Session]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel", "GenotypeInfo"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["GenotypeInfo"] = Field( + default="GenotypeInfo", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class Locus(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Locus Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "any_of": [ + {"range": "string"}, + {"multivalued": True, "range": "string"}, + ], + "name": "identifier", + "range": "Any", + "required": True, + } + }, + } + ) + + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "string"}, + {"multivalued": True, "range": "string"}, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + locusType: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Locus"]}} + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Locus"] = Field( + default="Locus", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class MeasurementTechniqueType(BaseType): + """ + Identifier for measurement technique used + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the MeasurementTechniqueType Pydantic model has changes in value " + "in meta slots: ['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["MeasurementTechniqueType"] = Field( + default="MeasurementTechniqueType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class Organization(Contributor): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "includeInCitation slot. The slot representation of the " + "includeInCitation field in the Organization Pydantic model has " + "changes in value in meta slots: ['ifabsent'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Organization Pydantic model has changes in value in meta " + "slots: ['any_of', 'ifabsent', 'notes'] .", + ], + "slot_usage": { + "contactPoint": {"multivalued": True, "name": "contactPoint"}, + "identifier": { + "name": "identifier", + "pattern": "^https://ror.org/[a-z0-9]+$", + }, + }, + } + ) + + contactPoint: Optional[list[ContactPoint]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["AccessRequirements", "EthicsApproval", "Organization"] + } + }, + ) + awardNumber: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Contributor"]}} + ) + email: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["ContactPoint", "Contributor"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + includeInCitation: Optional[bool] = Field( + default=True, + json_schema_extra={ + "linkml_meta": {"domain_of": ["Contributor"], "ifabsent": "True"} + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + roleName: Optional[list[RoleType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Contributor"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Organization"] = Field( + default="Organization", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile(r"^https://ror.org/[a-z0-9]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Participant(DandiBaseModel): + """ + Description about the Participant or Subject studied. The Participant or Subject can be any individual or synthesized Agent. The properties of the Participant or Subject refers to information at the timepoint when the Participant or Subject engaged in the production of data being described. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Participant Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": True, + }, + "sameAs": {"name": "sameAs", "range": "string"}, + }, + } + ) + + age: Optional[PropertyValue] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Participant"]}} + ) + altName: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Participant"]}} + ) + cellLine: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Participant"]}} + ) + disorder: Optional[list[Disorder]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Participant"]}} + ) + genotype: Optional[Union[GenotypeInfo, str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"multivalued": True, "range": "GenotypeInfo"}, + {"range": "string"}, + ], + "domain_of": ["Participant"], + } + }, + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + relatedParticipant: Optional[list[RelatedParticipant]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Participant"]}} + ) + sameAs: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "BioSample", "Dandiset", "Participant"] + } + }, + ) + sex: Optional[SexType] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Participant"]}} + ) + species: Optional[SpeciesType] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "Participant"]} + }, + ) + strain: Optional[StrainType] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Participant"]}} + ) + vendor: Optional[Organization] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Participant"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, ." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Participant"] = Field( + default="Participant", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class Person(Contributor): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "name slot. The slot representation of the name field in the Person " + "Pydantic model has changes in value in meta slots: ['required'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Person Pydantic model has changes in value in meta slots: " + "['any_of', 'ifabsent', 'notes'] .", + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "pattern": "^\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})$", + } + }, + } + ) + + affiliation: Optional[list[Affiliation]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Person"]}} + ) + awardNumber: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Contributor"]}} + ) + email: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["ContactPoint", "Contributor"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + includeInCitation: Optional[bool] = Field( + default=True, + json_schema_extra={ + "linkml_meta": {"domain_of": ["Contributor"], "ifabsent": "True"} + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + roleName: Optional[list[RoleType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Contributor"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Person"] = Field( + default="Person", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile(r"^\d{4}-\d{4}-\d{4}-(\d{3}X|\d{4})$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Project(Activity): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Project Pydantic model has changes in value in meta slots: " + "['any_of', 'ifabsent', 'notes'] ." + ], + } + ) + + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + endDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + startDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + used: Optional[list[Equipment]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Activity"]}} + ) + wasAssociatedWith: Optional[list[Union[Agent, Organization, Person, Software]]] = ( + Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + {"range": "Software"}, + {"range": "Agent"}, + ], + "domain_of": ["Activity"], + } + }, + ) + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Project"] = Field( + default="Project", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class PropertyValue(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the PropertyValue Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + } + ) + + maxValue: Optional[float] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["PropertyValue"], + "notes": [ + "pydantic2linkml: LinkML does not have support for `'+inf'`, " + "`'-inf'`, and `'NaN'` values. Support for these values is not " + "translated." + ], + } + }, + ) + minValue: Optional[float] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["PropertyValue"], + "notes": [ + "pydantic2linkml: LinkML does not have support for `'+inf'`, " + "`'-inf'`, and `'NaN'` values. Support for these values is not " + "translated." + ], + } + }, + ) + propertyID: Optional[Union[IdentifierType, str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "IdentifierType"}, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + ], + "domain_of": ["PropertyValue"], + } + }, + ) + unitText: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["PropertyValue"]}}, + ) + value: Optional[Any] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [{"range": "Any"}, {"multivalued": True, "range": "Any"}], + "domain_of": ["PropertyValue"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + valueReference: Optional[PropertyValue] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["PropertyValue"]}}, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["PropertyValue"] = Field( + default="PropertyValue", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class Publishable(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Publishable Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + datePublished: datetime = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Publishable"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + publishedBy: Union[PublishActivity, str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + {"range": "PublishActivity"}, + ], + "domain_of": ["Publishable"], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Publishable"] = Field( + default="Publishable", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + +class PublishActivity(Activity): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the PublishActivity Pydantic model has changes in value in meta " + "slots: ['any_of', 'ifabsent', 'notes'] ." + ], + } + ) + + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + endDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + startDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + used: Optional[list[Equipment]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Activity"]}} + ) + wasAssociatedWith: Optional[list[Union[Agent, Organization, Person, Software]]] = ( + Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + {"range": "Software"}, + {"range": "Agent"}, + ], + "domain_of": ["Activity"], + } + }, + ) + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["PublishActivity"] = Field( + default="PublishActivity", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class PublishedAsset(Publishable, Asset): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "mixins": ["Publishable"], + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the PublishedAsset Pydantic model has changes in value in meta " + "slots: ['notes'] .", + "pydantic2linkml: Warning: LinkML does not support multiple " + "inheritance. Publishable is not specified as a parent, through the " + "`is_a` meta slot, but as a mixin.", + ], + "slot_usage": { + "id": { + "name": "id", + "pattern": "^dandiasset:[a-f0-9]{8}[-]*[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{12}$", + } + }, + } + ) + + datePublished: datetime = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Publishable"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + publishedBy: Union[PublishActivity, str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + {"range": "PublishActivity"}, + ], + "domain_of": ["Publishable"], + } + }, + ) + contentUrl: list[str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Asset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + approach: Optional[list[ApproachType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + blobDateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + contentSize: Union[int, str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"pattern": "^\\s*(\\d*\\.?\\d+)\\s*(\\w+)?", "range": "string"}, + {"minimum_value": 0, "range": "integer"}, + ], + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + dataType: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + dateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "Dandiset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + digest: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >.", + "pydantic2linkml: Warning: The translation is incomplete. `dict` " + "types are yet to be supported.", + ], + } + }, + ) + encodingFormat: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + {"range": "string"}, + ], + "domain_of": ["BareAsset"], + } + }, + ) + measurementTechnique: Optional[list[MeasurementTechniqueType]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + path: str = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset"]}} + ) + sameAs: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "BioSample", "Dandiset", "Participant"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + variableMeasured: Optional[list[PropertyValue]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["AssetsSummary", "BareAsset"]} + }, + ) + wasAttributedTo: Optional[list[Participant]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + wasDerivedFrom: Optional[list[BioSample]] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["BareAsset", "BioSample"]}}, + ) + about: Optional[list[Union[Anatomy, Disorder, GenericType]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "Disorder"}, + {"range": "Anatomy"}, + {"range": "GenericType"}, + ], + "domain_of": ["CommonModel"], + } + }, + ) + access: Optional[list[AccessRequirements]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to express the default factory, at 0xADDRESS>, in LinkML.", + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, .", + ], + } + }, + ) + acknowledgement: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + contributor: Optional[list[Union[Organization, Person]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + ], + "domain_of": ["CommonModel"], + } + }, + ) + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + ethicsApproval: Optional[list[EthicsApproval]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + keywords: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + license: Optional[list[LicenseType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + protocol: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + relatedResource: Optional[list[Resource]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, ." + ], + } + }, + ) + repository: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel", "Resource"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + schemaVersion: Optional[str] = Field( + default="0.7.0", + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel"], "ifabsent": "string(0.7.0)"} + }, + ) + studyTarget: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + wasGeneratedBy: Optional[ + list[Union[Activity, Project, PublishActivity, Session]] + ] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel", "GenotypeInfo"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["PublishedAsset"] = Field( + default="PublishedAsset", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("contentUrl") + def pattern_contentUrl(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid contentUrl format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid contentUrl format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile( + r"^(?:urn:uuid:)?[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?4[0-9a-fA-F]{3}-?[89abAB][0-9a-fA-F]{3}-?[0-9a-fA-F]{12}$" + ) + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("dataType") + def pattern_dataType(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid dataType format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid dataType format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("sameAs") + def pattern_sameAs(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid sameAs format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid sameAs format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("protocol") + def pattern_protocol(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid protocol format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid protocol format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("repository") + def pattern_repository(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid repository format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid repository format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("id") + def pattern_id(cls, v): + pattern = re.compile( + r"^dandiasset:[a-f0-9]{8}[-]*[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{12}$" + ) + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid id format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid id format: {v}" + raise ValueError(err_msg) + return v + + +class PublishedDandiset(Publishable, Dandiset): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "mixins": ["Publishable"], + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "id slot. The slot representation of the id field in the " + "PublishedDandiset Pydantic model has changes in value in meta " + "slots: ['pattern'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the PublishedDandiset Pydantic model has changes in value in meta " + "slots: ['notes'] .", + "pydantic2linkml: Impossible to generate slot usage entry for the " + "url slot. The slot representation of the url field in the " + "PublishedDandiset Pydantic model has changes in value in meta " + "slots: ['notes', 'required'] .", + "pydantic2linkml: Warning: LinkML does not support multiple " + "inheritance. Publishable is not specified as a parent, through the " + "`is_a` meta slot, but as a mixin.", + ], + } + ) + + doi: Optional[str] = Field( + default="", + json_schema_extra={ + "linkml_meta": {"domain_of": ["PublishedDandiset"], "ifabsent": "string()"} + }, + ) + releaseNotes: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["PublishedDandiset"]}}, + ) + datePublished: datetime = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Publishable"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + publishedBy: Union[PublishActivity, str] = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + {"range": "PublishActivity"}, + ], + "domain_of": ["Publishable"], + } + }, + ) + assetsSummary: AssetsSummary = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["Dandiset"]}} + ) + citation: str = Field( + default=..., json_schema_extra={"linkml_meta": {"domain_of": ["Dandiset"]}} + ) + dateCreated: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Dandiset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + dateModified: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "Dandiset"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + identifier: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + manifestLocation: list[str] = Field( + default=..., + min_length=1, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Dandiset"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + sameAs: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["BareAsset", "BioSample", "Dandiset", "Participant"] + } + }, + ) + version: str = Field( + default=..., + json_schema_extra={"linkml_meta": {"domain_of": ["Dandiset", "Software"]}}, + ) + about: Optional[list[Union[Anatomy, Disorder, GenericType]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + {"range": "Disorder"}, + {"range": "Anatomy"}, + {"range": "GenericType"}, + ], + "domain_of": ["CommonModel"], + } + }, + ) + access: Optional[list[AccessRequirements]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to express the default factory, at 0xADDRESS>, in LinkML.", + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, .", + ], + } + }, + ) + acknowledgement: Optional[str] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + contributor: Optional[list[Union[Organization, Person]]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + ], + "domain_of": ["CommonModel"], + } + }, + ) + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + ethicsApproval: Optional[list[EthicsApproval]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + keywords: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + license: Optional[list[LicenseType]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + protocol: Optional[list[str]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + relatedResource: Optional[list[Resource]] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, ." + ], + } + }, + ) + repository: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["CommonModel", "Resource"], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + schemaVersion: Optional[str] = Field( + default="0.7.0", + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel"], "ifabsent": "string(0.7.0)"} + }, + ) + studyTarget: Optional[list[str]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + wasGeneratedBy: Optional[ + list[Union[Activity, Project, PublishActivity, Session]] + ] = Field( + default=None, + json_schema_extra={ + "linkml_meta": {"domain_of": ["CommonModel", "GenotypeInfo"]} + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["PublishedDandiset"] = Field( + default="PublishedDandiset", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("doi") + def pattern_doi(cls, v): + pattern = re.compile(r"^(10\.\d{4,}/[a-z][-a-z]*\.\d{6}/\d+\.\d+\.\d+|)$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid doi format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid doi format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile(r"^[A-Z][-A-Z]*:\d{6}$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("manifestLocation") + def pattern_manifestLocation(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid manifestLocation format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid manifestLocation format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("sameAs") + def pattern_sameAs(cls, v): + pattern = re.compile( + r"^dandi://[A-Z][-A-Z]*/\d{6}(@(draft|\d+\.\d+\.\d+))?(/\S+)?$" + ) + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid sameAs format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid sameAs format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("protocol") + def pattern_protocol(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid protocol format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid protocol format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("repository") + def pattern_repository(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid repository format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid repository format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class RelatedParticipant(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the RelatedParticipant Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": False, + }, + "name": {"name": "name", "required": False}, + "relation": {"name": "relation", "range": "ParticipantRelationType"}, + "url": { + "name": "url", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + }, + }, + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + relation: ParticipantRelationType = Field( + default=..., + json_schema_extra={ + "linkml_meta": {"domain_of": ["RelatedParticipant", "Resource"]} + }, + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["RelatedParticipant"] = Field( + default="RelatedParticipant", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class Resource(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Resource Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "range": "string", + "required": False, + }, + "name": {"name": "name", "required": False}, + "relation": {"name": "relation", "range": "RelationType"}, + "repository": {"name": "repository", "range": "string"}, + "url": { + "name": "url", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + }, + }, + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + relation: RelationType = Field( + default=..., + json_schema_extra={ + "linkml_meta": {"domain_of": ["RelatedParticipant", "Resource"]} + }, + ) + repository: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["CommonModel", "Resource"]}}, + ) + resourceType: Optional[ResourceType] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Resource"]}} + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Resource"] = Field( + default="Resource", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class SampleType(BaseType): + """ + OBI based identifier for the sample type used + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the SampleType Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["SampleType"] = Field( + default="SampleType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class Session(Activity): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Session Pydantic model has changes in value in meta slots: " + "['any_of', 'ifabsent', 'notes'] ." + ], + } + ) + + description: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "AccessRequirements", + "Activity", + "CommonModel", + "Equipment", + ] + } + }, + ) + endDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + startDate: Optional[datetime] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": ["Activity"], + "notes": [ + "pydantic2linkml: Unable to express the microseconds precision " + "constraint of truncate. LinkML lacks direct support for this " + "restriction." + ], + } + }, + ) + used: Optional[list[Equipment]] = Field( + default=None, json_schema_extra={"linkml_meta": {"domain_of": ["Activity"]}} + ) + wasAssociatedWith: Optional[list[Union[Agent, Organization, Person, Software]]] = ( + Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Person", + }, + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the after validation function, ." + ], + "range": "Organization", + }, + {"range": "Software"}, + {"range": "Agent"}, + ], + "domain_of": ["Activity"], + } + }, + ) + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Session"] = Field( + default="Session", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class SexType(BaseType): + """ + Identifier for the sex of the sample + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the SexType Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["SexType"] = Field( + default="SexType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class Software(DandiBaseModel): + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the Software Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + "slot_usage": { + "identifier": { + "name": "identifier", + "pattern": "^RRID:.*", + "range": "string", + "required": False, + }, + "name": {"name": "name", "required": True}, + "url": { + "name": "url", + "notes": [ + "pydantic2linkml: Unable to translate the " + "logic contained in the wrap validation " + "function, .wrap_val " + "at 0xADDRESS>." + ], + }, + }, + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + name: str = Field( + default=..., + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ] + } + }, + ) + url: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Agent", + "CommonModel", + "ContactPoint", + "Contributor", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "wrap validation function, .wrap_val at " + "0xADDRESS>." + ], + } + }, + ) + version: str = Field( + default=..., + json_schema_extra={"linkml_meta": {"domain_of": ["Dandiset", "Software"]}}, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["Software"] = Field( + default="Software", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("identifier") + def pattern_identifier(cls, v): + pattern = re.compile(r"^RRID:.*") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid identifier format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid identifier format: {v}" + raise ValueError(err_msg) + return v + + @field_validator("url") + def pattern_url(cls, v): + pattern = re.compile(r"^(?i:http|https)://[^\s]+$") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid url format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid url format: {v}" + raise ValueError(err_msg) + return v + + +class SpeciesType(BaseType): + """ + Identifier for species of the sample + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the SpeciesType Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["SpeciesType"] = Field( + default="SpeciesType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class StandardsType(BaseType): + """ + Identifier for data standard used + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the StandardsType Pydantic model has changes in value in meta " + "slots: ['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["StandardsType"] = Field( + default="StandardsType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +class StrainType(BaseType): + """ + Identifier for the strain of the sample + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + { + "from_schema": "https://schema.dandiarchive.org/s/dandi/v0.7", + "notes": [ + "pydantic2linkml: Impossible to generate slot usage entry for the " + "schemaKey slot. The slot representation of the schemaKey field in " + "the StrainType Pydantic model has changes in value in meta slots: " + "['ifabsent', 'notes', 'range'] ." + ], + } + ) + + identifier: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "any_of": [ + { + "notes": [ + "pydantic2linkml: Unable to translate the logic " + "contained in the wrap validation function, .wrap_val " + "at 0xADDRESS>." + ], + "pattern": "^(?i:http|https)://[^\\s]+$", + "range": "uri", + }, + { + "pattern": "^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\\._]+$", + "range": "string", + }, + ], + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "Allele", + "Asset", + "BaseType", + "BioSample", + "Contributor", + "Dandiset", + "Equipment", + "EthicsApproval", + "Locus", + "Participant", + "RelatedParticipant", + "Resource", + "Software", + ], + } + }, + ) + name: Optional[str] = Field( + default=None, + json_schema_extra={ + "linkml_meta": { + "domain_of": [ + "Activity", + "Affiliation", + "Agent", + "BaseType", + "CommonModel", + "Contributor", + "Equipment", + "RelatedParticipant", + "Resource", + "Software", + ], + "notes": [ + "pydantic2linkml: LinkML does not have direct support for max " + "length constraints. The max length constraint of 150 is " + "incorporated into the pattern of the slot." + ], + } + }, + ) + id: Optional[str] = Field( + default=None, + json_schema_extra={"linkml_meta": {"domain_of": ["DandiBaseModel"]}}, + ) + schemaKey: Literal["StrainType"] = Field( + default="StrainType", + json_schema_extra={ + "linkml_meta": { + "designates_type": True, + "domain_of": ["DandiBaseModel"], + "ifabsent": "string(DandiBaseModel)", + "notes": [ + "pydantic2linkml: Unable to translate the logic contained in the " + "after validation function, >." + ], + } + }, + ) + + @field_validator("name") + def pattern_name(cls, v): + pattern = re.compile(r"^(?=.{,150}$)") + if isinstance(v, list): + for element in v: + if isinstance(element, str) and not pattern.match(element): + err_msg = f"Invalid name format: {element}" + raise ValueError(err_msg) + elif isinstance(v, str) and not pattern.match(v): + err_msg = f"Invalid name format: {v}" + raise ValueError(err_msg) + return v + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +DandiBaseModel.model_rebuild() +AccessRequirements.model_rebuild() +Activity.model_rebuild() +Affiliation.model_rebuild() +Agent.model_rebuild() +Allele.model_rebuild() +AssetsSummary.model_rebuild() +BaseType.model_rebuild() +Anatomy.model_rebuild() +ApproachType.model_rebuild() +AssayType.model_rebuild() +BioSample.model_rebuild() +CommonModel.model_rebuild() +BareAsset.model_rebuild() +Asset.model_rebuild() +ContactPoint.model_rebuild() +Contributor.model_rebuild() +Dandiset.model_rebuild() +Disorder.model_rebuild() +Equipment.model_rebuild() +EthicsApproval.model_rebuild() +GenericType.model_rebuild() +GenotypeInfo.model_rebuild() +Locus.model_rebuild() +MeasurementTechniqueType.model_rebuild() +Organization.model_rebuild() +Participant.model_rebuild() +Person.model_rebuild() +Project.model_rebuild() +PropertyValue.model_rebuild() +Publishable.model_rebuild() +PublishActivity.model_rebuild() +PublishedAsset.model_rebuild() +PublishedDandiset.model_rebuild() +RelatedParticipant.model_rebuild() +Resource.model_rebuild() +SampleType.model_rebuild() +Session.model_rebuild() +SexType.model_rebuild() +Software.model_rebuild() +SpeciesType.model_rebuild() +StandardsType.model_rebuild() +StrainType.model_rebuild() diff --git a/dandischema/models_orig.py b/dandischema/models_orig.py new file mode 100644 index 00000000..9e391510 --- /dev/null +++ b/dandischema/models_orig.py @@ -0,0 +1,1990 @@ +from __future__ import annotations + +from datetime import date, datetime +from enum import Enum +import re +from typing import ( + TYPE_CHECKING, + Annotated, + Any, + Dict, + List, + Literal, + Optional, + Sequence, + Type, + TypeVar, + Union, +) +from warnings import warn + +from pydantic import ( + UUID4, + AnyHttpUrl, + BaseModel, + ConfigDict, + EmailStr, + Field, + GetJsonSchemaHandler, + SerializerFunctionWrapHandler, + StringConstraints, + ValidationInfo, + field_serializer, + field_validator, + model_validator, +) +from pydantic.json_schema import JsonSchemaValue +from pydantic_core import CoreSchema +from zarr_checksum.checksum import InvalidZarrChecksum, ZarrDirectoryDigest + +from dandischema.conf import ( + DEFAULT_INSTANCE_NAME, + UNVENDORED_DOI_PREFIX_PATTERN, + UNVENDORED_ID_PATTERN, + get_instance_config, +) + +from .consts import DANDI_SCHEMA_VERSION +from .digests.dandietag import DandiETag +from .types import ByteSizeJsonSchema +from .utils import name2title + +try: + from anys import AnyBase +except ImportError: + _has_anys = False +else: + _has_anys = True + +# Load needed configurations into constants +_INSTANCE_CONFIG = get_instance_config() + +# Regex pattern for the prefix of identifiers +ID_PATTERN = ( + _INSTANCE_CONFIG.instance_name + if _INSTANCE_CONFIG.instance_name != DEFAULT_INSTANCE_NAME + else UNVENDORED_ID_PATTERN +) + +# The pattern that a DOI prefix of a dandiset must conform to +DOI_PREFIX_PATTERN = ( + re.escape(_INSTANCE_CONFIG.doi_prefix) + if _INSTANCE_CONFIG.doi_prefix is not None + else UNVENDORED_DOI_PREFIX_PATTERN +) + +# The pattern of the DANDI instance URL +DANDI_INSTANCE_URL_PATTERN = ( + ".*" + if _INSTANCE_CONFIG.instance_url is None + else re.escape(str(_INSTANCE_CONFIG.instance_url).rstrip("/")) +) + +NAME_PATTERN = r"^([\w\s\-\.']+),\s+([\w\s\-\.']+)$" +UUID_PATTERN = ( + "[a-f0-9]{8}[-]*[a-f0-9]{4}[-]*" "[a-f0-9]{4}[-]*[a-f0-9]{4}[-]*[a-f0-9]{12}$" +) +ASSET_UUID_PATTERN = r"^dandiasset:" + UUID_PATTERN +VERSION_NUM_PATTERN = r"\d+\.\d+\.\d+" +VERSION_PATTERN = rf"\d{{6}}/{VERSION_NUM_PATTERN}" +_INNER_DANDI_DOI_PATTERN = ( + rf"{DOI_PREFIX_PATTERN}/{ID_PATTERN.lower()}\.{VERSION_PATTERN}" +) +DANDI_DOI_PATTERN = ( + rf"^{_INNER_DANDI_DOI_PATTERN}$" + if _INSTANCE_CONFIG.doi_prefix is not None + else rf"^({_INNER_DANDI_DOI_PATTERN}|)$" # This matches an empty string as well +) +DANDI_PUBID_PATTERN = rf"^{ID_PATTERN}:{VERSION_PATTERN}$" +DANDI_NSKEY = "dandi" # Namespace for DANDI ontology + +PUBLISHED_VERSION_URL_PATTERN = ( + rf"^{DANDI_INSTANCE_URL_PATTERN}/dandiset/{VERSION_PATTERN}$" +) +MD5_PATTERN = r"[0-9a-f]{32}" +SHA256_PATTERN = r"[0-9a-f]{64}" + +M = TypeVar("M", bound=BaseModel) + + +def diff_models(model1: M, model2: M) -> None: + """Perform a field-wise diff""" + for field in model1.model_fields: + if getattr(model1, field) != getattr(model2, field): + print(f"{field} is different") + + +if TYPE_CHECKING: + # This is just a placeholder for static type checking + class LicenseType(Enum): + ... # fmt: skip + +else: + LicenseType = Enum( + "LicenseType", + [(license_.name, license_.value) for license_ in _INSTANCE_CONFIG.licenses], + ) + r""" + An enumeration of supported licenses + + The value of each member is a string that matches the regex pattern of + `^([^:\s]+):(\S+)$` in which the first group matches the license scheme such + as `"spdx"`, and the second group matches the license identifier such as + `"CC-BY-4.0"`. + """ + + +class AccessType(Enum): + """An enumeration of access status options""" + + #: The dandiset is openly accessible + OpenAccess = f"{DANDI_NSKEY}:OpenAccess" + + #: The dandiset is embargoed + EmbargoedAccess = f"{DANDI_NSKEY}:EmbargoedAccess" + + """ + Uncomment when restricted access is implemented: + #: The dandiset is restricted + RestrictedAccess = f"{DANDI_NSKEY}:RestrictedAccess" + """ + + +class DigestType(Enum): + """An enumeration of checksum types""" + + #: MD5 checksum + md5 = f"{DANDI_NSKEY}:md5" + + #: SHA1 checksum + sha1 = f"{DANDI_NSKEY}:sha1" + + #: SHA2-256 checksum + sha2_256 = f"{DANDI_NSKEY}:sha2-256" + + #: SHA3-256 checksum + sha3_256 = f"{DANDI_NSKEY}:sha3-256" + + #: BLAKE2B-256 checksum + blake2b_256 = f"{DANDI_NSKEY}:blake2b-256" + + #: BLAKE3-256 checksum + blake3 = f"{DANDI_NSKEY}:blake3" + + #: S3-style ETag + dandi_etag = f"{DANDI_NSKEY}:dandi-etag" + + #: DANDI Zarr checksum + dandi_zarr_checksum = f"{DANDI_NSKEY}:dandi-zarr-checksum" + + +class IdentifierType(Enum): + """An enumeration of identifiers""" + + doi = f"{DANDI_NSKEY}:doi" + orcid = f"{DANDI_NSKEY}:orcid" + ror = f"{DANDI_NSKEY}:ror" + dandi = f"{DANDI_NSKEY}:dandi" + rrid = f"{DANDI_NSKEY}:rrid" + + +class RelationType(Enum): + """An enumeration of resource relations""" + + #: Indicates that B includes A in a citation + IsCitedBy = "dcite:IsCitedBy" + + #: Indicates that A includes B in a citation + Cites = "dcite:Cites" + + #: Indicates that A is a supplement to B + IsSupplementTo = "dcite:IsSupplementTo" + + #: Indicates that B is a supplement to A + IsSupplementedBy = "dcite:IsSupplementedBy" + + #: Indicates A is continued by the work B + IsContinuedBy = "dcite:IsContinuedBy" + + #: Indicates A is a continuation of the work B + Continues = "dcite:Continues" + + #: Indicates A describes B + Describes = "dcite:Describes" + + #: Indicates A is described by B + IsDescribedBy = "dcite:IsDescribedBy" + + #: Indicates resource A has additional metadata B + HasMetadata = "dcite:HasMetadata" + + #: Indicates additional metadata A for a resource B + IsMetadataFor = "dcite:IsMetadataFor" + + #: Indicates A has a version (B) + HasVersion = "dcite:HasVersion" + + #: Indicates A is a version of B + IsVersionOf = "dcite:IsVersionOf" + + #: Indicates A is a new edition of B + IsNewVersionOf = "dcite:IsNewVersionOf" + + #: Indicates A is a previous edition of B + IsPreviousVersionOf = "dcite:IsPreviousVersionOf" + + #: Indicates A is a portion of B + IsPartOf = "dcite:IsPartOf" + + #: Indicates A includes the part B + HasPart = "dcite:HasPart" + + #: Indicates A is used as a source of information by B + IsReferencedBy = "dcite:IsReferencedBy" + + #: Indicates B is used as a source of information for A + References = "dcite:References" + + #: Indicates B is documentation about/explaining A + IsDocumentedBy = "dcite:IsDocumentedBy" + + #: Indicates A is documentation about B + Documents = "dcite:Documents" + + #: Indicates B is used to compile or create A + IsCompiledBy = "dcite:IsCompiledBy" + + #: Indicates B is the result of a compile or creation event using A + Compiles = "dcite:Compiles" + + #: Indicates A is a variant or different form of B + IsVariantFormOf = "dcite:IsVariantFormOf" + + #: Indicates A is the original form of B + IsOriginalFormOf = "dcite:IsOriginalFormOf" + + #: Indicates that A is identical to B + IsIdenticalTo = "dcite:IsIdenticalTo" + + #: Indicates that A is reviewed by B + IsReviewedBy = "dcite:IsReviewedBy" + + #: Indicates that A is a review of B + Reviews = "dcite:Reviews" + + #: Indicates B is a source upon which A is based + IsDerivedFrom = "dcite:IsDerivedFrom" + + #: Indicates A is a source upon which B is based + IsSourceOf = "dcite:IsSourceOf" + + #: Indicates A is required by B + IsRequiredBy = "dcite:IsRequiredBy" + + #: Indicates A requires B + Requires = "dcite:Requires" + + #: Indicates A replaces B + Obsoletes = "dcite:Obsoletes" + + #: Indicates A is replaced by B + IsObsoletedBy = "dcite:IsObsoletedBy" + + #: Indicates A is published in B + IsPublishedIn = "dcite:IsPublishedIn" + + +class ParticipantRelationType(Enum): + """An enumeration of participant relations""" + + #: Indicates that A is a child of B + isChildOf = f"{DANDI_NSKEY}:isChildOf" + + #: Indicates that A is a parent of B + isParentOf = f"{DANDI_NSKEY}:isParentOf" + + #: Indicates that A is a sibling of B + isSiblingOf = f"{DANDI_NSKEY}:isSiblingOf" + + #: Indicates that A is a monozygotic twin of B + isMonozygoticTwinOf = f"{DANDI_NSKEY}:isMonozygoticTwinOf" + + #: Indicates that A is a dizygotic twin of B + isDizygoticTwinOf = f"{DANDI_NSKEY}:isDizygoticTwinOf" + + +class RoleType(Enum): + """An enumeration of roles""" + + #: Author + Author = "dcite:Author" + + #: Conceptualization + Conceptualization = "dcite:Conceptualization" + + #: Contact Person + ContactPerson = "dcite:ContactPerson" + + #: Data Collector + DataCollector = "dcite:DataCollector" + + #: Data Curator + DataCurator = "dcite:DataCurator" + + #: Data Manager + DataManager = "dcite:DataManager" + + #: Formal Analysis + FormalAnalysis = "dcite:FormalAnalysis" + + #: Funding Acquisition + FundingAcquisition = "dcite:FundingAcquisition" + + #: Investigation + Investigation = "dcite:Investigation" + + #: Maintainer + Maintainer = "dcite:Maintainer" + + #: Methodology + Methodology = "dcite:Methodology" + + #: Producer + Producer = "dcite:Producer" + + #: Project Leader + ProjectLeader = "dcite:ProjectLeader" + + #: Project Manager + ProjectManager = "dcite:ProjectManager" + + #: Project Member + ProjectMember = "dcite:ProjectMember" + + #: Project Administration + ProjectAdministration = "dcite:ProjectAdministration" + + #: Researcher + Researcher = "dcite:Researcher" + + #: Resources + Resources = "dcite:Resources" + + #: Software + Software = "dcite:Software" + + #: Supervision + Supervision = "dcite:Supervision" + + #: Validation + Validation = "dcite:Validation" + + #: Visualization + Visualization = "dcite:Visualization" + + #: Funder + Funder = "dcite:Funder" + + #: Sponsor + Sponsor = "dcite:Sponsor" + + #: Participant in a study + StudyParticipant = "dcite:StudyParticipant" + + #: Affiliated with an entity + Affiliation = "dcite:Affiliation" + + #: Approved ethics protocol + EthicsApproval = "dcite:EthicsApproval" + + #: Other + Other = "dcite:Other" + + +class ResourceType(Enum): + """An enumeration of resource types""" + + #: Audiovisual: A series of visual representations imparting an impression of motion + # when shown in succession. May or may not include sound. + Audiovisual = "dcite:Audiovisual" + + #: Book: A medium for recording information in the form of writing or images, + # typically composed of many pages bound together and protected by a cover. + Book = "dcite:Book" + + #: BookChapter: One of the main divisions of a book. + BookChapter = "dcite:BookChapter" + + #: Collection: An aggregation of resources, which may encompass collections of one + # resourceType as well as those of mixed types. A collection is described as a + # group; its parts may also be separately described. + Collection = "dcite:Collection" + + #: ComputationalNotebook: A virtual notebook environment used for literate + # programming. + ComputationalNotebook = "dcite:ComputationalNotebook" + + #: ConferencePaper: Article that is written with the goal of being accepted to a + # conference. + ConferencePaper = "dcite:ConferencePaper" + + #: ConferenceProceeding: Collection of academic papers published in the context of + # an academic conference. + ConferenceProceeding = "dcite:ConferenceProceeding" + + #: DataPaper: A factual and objective publication with a focused intent to identify + # and describe specific data, sets of data, or data collections to facilitate + # discoverability. + DataPaper = "dcite:DataPaper" + + #: Dataset: Data encoded in a defined structure. + Dataset = "dcite:Dataset" + + #: Dissertation: A written essay, treatise, or thesis, especially one written by a + # candidate for the degree of Doctor of Philosophy. + Dissertation = "dcite:Dissertation" + + #: Event: A non-persistent, time-based occurrence. + Event = "dcite:Event" + + #: Image: A visual representation other than text. + Image = "dcite:Image" + + #: Instrument: A device, tool or apparatus used to obtain, measure and/or analyze + # data. + Instrument = "dcite:Instrument" + + #: InteractiveResource: A resource requiring interaction from the user to be + # understood, executed, or experienced. + InteractiveResource = "dcite:InteractiveResource" + + #: Journal: A scholarly publication consisting of articles that is published + # regularly throughout the year. + Journal = "dcite:Journal" + + #: JournalArticle: A written composition on a topic of interest, which forms a + # separate part of a journal. + JournalArticle = "dcite:JournalArticle" + + #: Model: An abstract, conceptual, graphical, mathematical or visualization model + # that represents empirical objects, phenomena, or physical processes. + Model = "dcite:Model" + + #: OutputManagementPlan: A formal document that outlines how research outputs are to + # be handled both during a research project and after the project is completed. + OutputManagementPlan = "dcite:OutputManagementPlan" + + #: PeerReview: Evaluation of scientific, academic, or professional work by others + # working in the same field. + PeerReview = "dcite:PeerReview" + + #: PhysicalObject: A physical object or substance. + PhysicalObject = "dcite:PhysicalObject" + + #: Preprint: A version of a scholarly or scientific paper that precedes formal peer + # review and publication in a peer-reviewed scholarly or scientific journal. + Preprint = "dcite:Preprint" + + #: Report: A document that presents information in an organized format for a + # specific audience and purpose. + Report = "dcite:Report" + + #: Service: An organized system of apparatus, appliances, staff, etc., for supplying + # some function(s) required by end users. + Service = "dcite:Service" + + #: Software: A computer program other than a computational notebook, in either + # source code (text) or compiled form. Use this type for general software components + # supporting scholarly research. Use the “ComputationalNotebook” value for virtual + # notebooks. + Software = "dcite:Software" + + #: Sound: A resource primarily intended to be heard. + Sound = "dcite:Sound" + + #: Standard: Something established by authority, custom, or general consent as a + # model, example, or point of reference. + Standard = "dcite:Standard" + + #: StudyRegistration: A detailed, time-stamped description of a research plan, often + # openly shared in a registry or published in a journal before the study is + # conducted to lend accountability and transparency in the hypothesis generating and + # testing process. + StudyRegistration = "dcite:StudyRegistration" + + #: Text: A resource consisting primarily of words for reading that is not covered by + # any other textual resource type in this list. + Text = "dcite:Text" + + #: Workflow: A structured series of steps which can be executed to produce a final + # outcome, allowing users a means to specify and enact their work in a more + # reproducible manner. + Workflow = "dcite:Workflow" + + #: Other: A resource that does not fit into any of the other categories. + Other = "dcite:Other" + + +class AgeReferenceType(Enum): + """An enumeration of age reference""" + + #: Age since Birth + BirthReference = f"{DANDI_NSKEY}:BirthReference" + + #: Age of a pregnancy (https://en.wikipedia.org/wiki/Gestational_age) + GestationalReference = f"{DANDI_NSKEY}:GestationalReference" + + +class DandiBaseModel(BaseModel): + id: Optional[str] = Field( + default=None, + description="Uniform resource identifier", + json_schema_extra={"readOnly": True}, + ) + schemaKey: str = Field( + "DandiBaseModel", validate_default=True, json_schema_extra={"readOnly": True} + ) + + def json_dict(self) -> dict: + """ + Recursively convert the instance to a `dict` of JSONable values, + including converting enum values to strings. `None` fields + are omitted. + """ + warn( + "`DandiBaseModel.json_dict()` is deprecated. Use " + "`pydantic.BaseModel.model_dump(mode='json', exclude_none=True)` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.model_dump(mode="json", exclude_none=True) + + if _has_anys: + + @field_serializer("*", mode="wrap") + def preserve_anys_values( + self, value: Any, handler: SerializerFunctionWrapHandler + ) -> Any: + return value if isinstance(value, AnyBase) else handler(value) + + @field_validator("schemaKey") + @classmethod + def ensure_schemakey(cls, val: str) -> str: + tempval = val + if "Published" in cls.__name__: + tempval = "Published" + tempval + elif "BareAsset" == cls.__name__: + tempval = "Bare" + tempval + if tempval != cls.__name__: + raise ValueError( + f"schemaKey {tempval} does not match classname {cls.__name__}" + ) + return val + + @classmethod + def unvalidated(__pydantic_cls__: Type[M], **data: Any) -> M: + """Allow model to be returned without validation""" + + warn( + "`DandiBaseModel.unvalidated()` is deprecated. " + "Use `pydantic.BaseModel.model_construct()` instead.", + DeprecationWarning, + stacklevel=2, + ) + + return __pydantic_cls__.model_construct(**data) + + @classmethod + def to_dictrepr(__pydantic_cls__: Type["DandiBaseModel"]) -> str: + return ( + __pydantic_cls__.model_construct() + .__repr__() + .replace(__pydantic_cls__.__name__, "dict") + ) + + @classmethod + def __get_pydantic_json_schema__( + cls, + core_schema_: CoreSchema, + handler: GetJsonSchemaHandler, + ) -> JsonSchemaValue: + schema = handler(core_schema_) + schema = handler.resolve_ref_schema(schema) + + if schema["title"] == "PropertyValue": + schema["required"] = sorted({"value"}.union(schema.get("required", []))) + schema["title"] = name2title(schema["title"]) + if schema["type"] == "object": + schema["required"] = sorted({"schemaKey"}.union(schema.get("required", []))) + for prop, value in schema.get("properties", {}).items(): + if schema["title"] == "Person": + if prop == "name": + # JSON schema doesn't support validating unicode + # characters using the \w pattern, but Python does. So + # we are dropping the regex pattern for the schema. + del value["pattern"] + if value.get("title") is None or value["title"] == prop.title(): + value["title"] = name2title(prop) + if re.match("\\^https?://", value.get("pattern", "")): + # triggers only for ROR in identifier + value["format"] = "uri" + if value.get("format", None) == "uri": + value["maxLength"] = 1000 + allOf = value.get("allOf") + anyOf = value.get("anyOf") + items = value.get("items") + if allOf is not None: + if len(allOf) == 1 and "$ref" in allOf[0]: + value["$ref"] = allOf[0]["$ref"] + del value["allOf"] + elif len(allOf) > 1: + value["oneOf"] = value["allOf"] + value["type"] = "object" + del value["allOf"] + if anyOf is not None: + if len(anyOf) > 1 and any(["$ref" in val for val in anyOf]): + value["type"] = "object" + if items is not None: + anyOf = items.get("anyOf") + if ( + anyOf is not None + and len(anyOf) > 1 + and any(["$ref" in val for val in anyOf]) + ): + value["items"]["type"] = "object" + # In pydantic 1.8+ all Literals are mapped on to enum + # This presently breaks the schema editor UI. Revert + # to const when generating the schema. + # Note: this no longer happens with custom metaclass + if prop == "schemaKey": + if "enum" in value and len(value["enum"]) == 1: + value["const"] = value["enum"][0] + del value["enum"] + else: + value["const"] = value["default"] + if "readOnly" in value: + del value["readOnly"] + + return schema + + +class PropertyValue(DandiBaseModel): + maxValue: Optional[float] = Field(None, json_schema_extra={"nskey": "schema"}) + minValue: Optional[float] = Field(None, json_schema_extra={"nskey": "schema"}) + unitText: Optional[str] = Field(None, json_schema_extra={"nskey": "schema"}) + value: Union[Any, List[Any]] = Field( + None, + validate_default=True, + json_schema_extra={"nskey": "schema"}, + description="The value associated with this property.", + ) + valueReference: Optional["PropertyValue"] = Field( + None, json_schema_extra={"nskey": "schema"} + ) # Note: recursive (circular or not) + propertyID: Optional[Union[IdentifierType, AnyHttpUrl]] = Field( + None, + description="A commonly used identifier for " + "the characteristic represented by the property. " + "For example, a known prefix like DOI or a full URL.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["PropertyValue"] = Field( + "PropertyValue", validate_default=True, json_schema_extra={"readOnly": True} + ) + + @field_validator("value") + @classmethod + def ensure_value(cls, val: Union[Any, List[Any]]) -> Union[Any, List[Any]]: + if not val: + raise ValueError( + "The value field of a PropertyValue cannot be None or empty." + ) + return val + + _ldmeta = {"nskey": "schema"} + + +# This is mostly not needed at all since self-referencing models +# are automatically resolved by Pydantic in a pretty consistent way even in Pydantic V1 +# https://docs.pydantic.dev/1.10/usage/postponed_annotations/#self-referencing-models +# and continue to be so in Pydantic V2 +# https://docs.pydantic.dev/latest/concepts/postponed_annotations/#self-referencing-or-recursive-models +PropertyValue.model_rebuild() + +Identifier = str +ORCID = str +RORID = str +DANDI = str +RRID = str + + +class BaseType(DandiBaseModel): + """Base class for enumerated types""" + + identifier: Optional[ + Annotated[ + Union[ + AnyHttpUrl, + Annotated[ + str, StringConstraints(pattern=r"^[a-zA-Z0-9-]+:[a-zA-Z0-9-/\._]+$") + ], + ], + Field(union_mode="left_to_right"), + ] + ] = Field( + None, + description="The identifier can be any url or a compact URI, preferably" + " supported by identifiers.org.", + json_schema_extra={"nskey": "schema"}, + ) + name: Optional[str] = Field( + None, + description="The name of the item.", + max_length=150, + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: str = Field( + "BaseType", validate_default=True, json_schema_extra={"readOnly": True} + ) + _ldmeta = {"rdfs:subClassOf": ["prov:Entity", "schema:Thing"], "nskey": DANDI_NSKEY} + + @classmethod + def __get_pydantic_json_schema__( + cls, + core_schema_: CoreSchema, + handler: GetJsonSchemaHandler, + ) -> JsonSchemaValue: + schema = super().__get_pydantic_json_schema__(core_schema_, handler) + + for prop, value in schema.get("properties", {}).items(): + # This check removes the anyOf field from the identifier property + # in the schema generation. This relates to a UI issue where two + # basic properties, in this case "string", is dropped from the UI. + if prop == "identifier": + for option in value.pop("anyOf", []): + if option.get("format", "") == "uri": + value.update(**option) + value["maxLength"] = 1000 + + return schema + + +class AssayType(BaseType): + """OBI based identifier for the assay(s) used""" + + schemaKey: Literal["AssayType"] = Field( + "AssayType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class SampleType(BaseType): + """OBI based identifier for the sample type used""" + + schemaKey: Literal["SampleType"] = Field( + "SampleType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class Anatomy(BaseType): + """UBERON or other identifier for anatomical part studied""" + + schemaKey: Literal["Anatomy"] = Field( + "Anatomy", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class StrainType(BaseType): + """Identifier for the strain of the sample""" + + schemaKey: Literal["StrainType"] = Field( + "StrainType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class SexType(BaseType): + """Identifier for the sex of the sample""" + + schemaKey: Literal["SexType"] = Field( + "SexType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class SpeciesType(BaseType): + """Identifier for species of the sample""" + + schemaKey: Literal["SpeciesType"] = Field( + "SpeciesType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class Disorder(BaseType): + """Biolink, SNOMED, or other identifier for disorder studied""" + + dxdate: Optional[List[Union[date, datetime]]] = Field( + None, + title="Dates of diagnosis", + description="Dates of diagnosis", + json_schema_extra={"nskey": DANDI_NSKEY, "rangeIncludes": "schema:Date"}, + ) + schemaKey: Literal["Disorder"] = Field( + "Disorder", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class GenericType(BaseType): + """An object to capture any type for about""" + + schemaKey: Literal["GenericType"] = Field( + "GenericType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class ApproachType(BaseType): + """Identifier for approach used""" + + schemaKey: Literal["ApproachType"] = Field( + "ApproachType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class MeasurementTechniqueType(BaseType): + """Identifier for measurement technique used""" + + schemaKey: Literal["MeasurementTechniqueType"] = Field( + "MeasurementTechniqueType", + validate_default=True, + json_schema_extra={"readOnly": True}, + ) + + +class StandardsType(BaseType): + """Identifier for data standard used""" + + schemaKey: Literal["StandardsType"] = Field( + "StandardsType", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +nwb_standard = StandardsType( + name="Neurodata Without Borders (NWB)", + identifier="RRID:SCR_015242", +).model_dump(mode="json", exclude_none=True) + +bids_standard = StandardsType( + name="Brain Imaging Data Structure (BIDS)", + identifier="RRID:SCR_016124", +).model_dump(mode="json", exclude_none=True) + +ome_ngff_standard = StandardsType( + name="OME/NGFF Standard", + identifier="DOI:10.25504/FAIRsharing.9af712", +).model_dump(mode="json", exclude_none=True) + + +class ContactPoint(DandiBaseModel): + email: Optional[EmailStr] = Field( + None, + description="Email address of contact.", + json_schema_extra={"nskey": "schema"}, + ) + url: Optional[AnyHttpUrl] = Field( + None, + description="A Web page to find information on how to contact.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["ContactPoint"] = Field( + "ContactPoint", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = {"nskey": "schema"} + + +class Contributor(DandiBaseModel): + identifier: Optional[Identifier] = Field( + None, + title="A common identifier", + description="Use a common identifier such as ORCID (orcid.org) for " + "people or ROR (ror.org) for institutions.", + json_schema_extra={"nskey": "schema"}, + ) + name: Optional[str] = Field(None, json_schema_extra={"nskey": "schema"}) + email: Optional[EmailStr] = Field(None, json_schema_extra={"nskey": "schema"}) + url: Optional[AnyHttpUrl] = Field(None, json_schema_extra={"nskey": "schema"}) + roleName: Optional[List[RoleType]] = Field( + None, + title="Role", + description="Role(s) of the contributor. Multiple roles can be selected.", + json_schema_extra={"nskey": "schema"}, + ) + includeInCitation: bool = Field( + True, + title="Include contributor in citation", + description="A flag to indicate whether a contributor should be included " + "when generating a citation for the item.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + awardNumber: Optional[Identifier] = Field( + None, + title="Identifier for an award", + description="Identifier associated with a sponsored or gift award.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + schemaKey: Literal["Contributor", "Organization", "Person"] = Field( + "Contributor", validate_default=True, json_schema_extra={"readOnly": True} + ) + + @model_validator(mode="after") + def ensure_contact_person_has_email(self) -> Contributor: + role_names = self.roleName + + if role_names is not None and RoleType.ContactPerson in role_names: + if self.email is None: + raise ValueError("Contact person must have an email address.") + + return self + + +class Organization(Contributor): + identifier: Optional[RORID] = Field( + None, + title="A ror.org identifier", + description="Use an ror.org identifier for institutions.", + pattern=r"^https://ror.org/[a-z0-9]+$", + json_schema_extra={"nskey": "schema"}, + ) + + includeInCitation: bool = Field( + False, + title="Include contributor in citation", + description="A flag to indicate whether a contributor should be included " + "when generating a citation for the item", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + contactPoint: Optional[List[ContactPoint]] = Field( + None, + title="Organization contact information", + description="Contact for the organization", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["Organization"] = Field( + "Organization", validate_default=True, json_schema_extra={"readOnly": True} + ) + _ldmeta = { + "rdfs:subClassOf": ["schema:Organization", "prov:Organization"], + "nskey": DANDI_NSKEY, + } + + +class Affiliation(DandiBaseModel): + identifier: Optional[RORID] = Field( + None, + title="A ror.org identifier", + description="Use an ror.org identifier for institutions.", + pattern=r"^https://ror.org/[a-z0-9]+$", + json_schema_extra={"nskey": "schema"}, + ) + name: str = Field( + json_schema_extra={"nskey": "schema"}, description="Name of organization" + ) + schemaKey: Literal["Affiliation"] = Field( + "Affiliation", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:Organization", "prov:Organization"], + "nskey": DANDI_NSKEY, + } + + +class Person(Contributor): + identifier: Optional[ORCID] = Field( + None, + title="An ORCID identifier", + description="An ORCID (orcid.org) identifier for an individual.", + pattern=r"^\d{4}-\d{4}-\d{4}-(\d{3}X|\d{4})$", + json_schema_extra={"nskey": "schema"}, + ) + name: str = Field( + title="Use Last, First. Example: Lovelace, Augusta Ada", + description="Use the format: familyname, given names ...", + pattern=NAME_PATTERN, + json_schema_extra={"nskey": "schema"}, + ) + affiliation: Optional[List[Affiliation]] = Field( + None, + description="An organization that this person is affiliated with.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["Person"] = Field( + "Person", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:Person", "prov:Person"], + "nskey": DANDI_NSKEY, + } + + +class Software(DandiBaseModel): + identifier: Optional[RRID] = Field( + None, + pattern=r"^RRID:.*", + title="Research resource identifier", + description="RRID of the software from scicrunch.org.", + json_schema_extra={"nskey": "schema"}, + ) + name: str = Field(json_schema_extra={"nskey": "schema"}) + version: str = Field(json_schema_extra={"nskey": "schema"}) + url: Optional[AnyHttpUrl] = Field( + None, + description="Web page for the software.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["Software"] = Field( + "Software", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:SoftwareApplication", "prov:Software"], + "nskey": DANDI_NSKEY, + } + + +class Agent(DandiBaseModel): + identifier: Optional[Identifier] = Field( + None, + title="Identifier", + description="Identifier for an agent.", + json_schema_extra={"nskey": "schema"}, + ) + name: str = Field( + json_schema_extra={"nskey": "schema"}, + ) + url: Optional[AnyHttpUrl] = Field(None, json_schema_extra={"nskey": "schema"}) + schemaKey: Literal["Agent"] = Field( + "Agent", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["prov:Agent"], + "nskey": DANDI_NSKEY, + } + + +class EthicsApproval(DandiBaseModel): + """Information about ethics committee approval for project""" + + identifier: Identifier = Field( + json_schema_extra={"nskey": "schema"}, + title="Approved protocol identifier", + description="Approved Protocol identifier, often a number or alphanumeric string.", + ) + contactPoint: Optional[ContactPoint] = Field( + None, + description="Information about the ethics approval committee.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["EthicsApproval"] = Field( + "EthicsApproval", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = {"rdfs:subClassOf": ["schema:Thing", "prov:Entity"], "nskey": DANDI_NSKEY} + + +class Resource(DandiBaseModel): + identifier: Optional[Identifier] = Field( + None, json_schema_extra={"nskey": "schema"} + ) + name: Optional[str] = Field( + None, title="A title of the resource", json_schema_extra={"nskey": "schema"} + ) + url: Optional[AnyHttpUrl] = Field( + None, title="URL of the resource", json_schema_extra={"nskey": "schema"} + ) + repository: Optional[str] = Field( + None, + title="Name of the repository", + description="Name of the repository in which the resource is housed.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + relation: RelationType = Field( + title="Resource relation", + description="Indicates how the resource is related to the dataset. " + "This relation should satisfy: dandiset resource.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + resourceType: Optional[ResourceType] = Field( + default=None, + title="Resource type", + description="The type of resource.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + + schemaKey: Literal["Resource"] = Field( + "Resource", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], + "rdfs:comment": "A resource related to the project (e.g., another " + "dataset, publication, Webpage)", + "nskey": DANDI_NSKEY, + } + + @model_validator(mode="after") + def identifier_or_url(self) -> "Resource": + identifier, url = self.identifier, self.url + if identifier is None and url is None: + raise ValueError("Both identifier and url cannot be None") + return self + + +class AccessRequirements(DandiBaseModel): + """Information about access options for the dataset""" + + status: AccessType = Field( + title="Access status", + description="The access status of the item.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + contactPoint: Optional[ContactPoint] = Field( + None, + description="Who or where to look for information about access.", + json_schema_extra={"nskey": "schema"}, + ) + description: Optional[str] = Field( + None, + description="Information about access requirements when embargoed or restricted", + json_schema_extra={"nskey": "schema"}, + ) + embargoedUntil: Optional[date] = Field( + None, + title="Embargo end date", + description="Date on which embargo ends.", + json_schema_extra={ + "readOnly": True, + "nskey": DANDI_NSKEY, + "rangeIncludes": "schema:Date", + }, + ) + schemaKey: Literal["AccessRequirements"] = Field( + "AccessRequirements", + validate_default=True, + json_schema_extra={"readOnly": True}, + ) + + _ldmeta = {"rdfs:subClassOf": ["schema:Thing", "prov:Entity"], "nskey": DANDI_NSKEY} + + @model_validator(mode="after") + def open_or_embargoed(self) -> "AccessRequirements": + status, embargoed = self.status, self.embargoedUntil + if status == AccessType.EmbargoedAccess and embargoed is None: + raise ValueError( + "An embargo end date is required for NIH awards to be in " + "compliance with NIH resource sharing policy." + ) + return self + + +class AssetsSummary(DandiBaseModel): + """Summary over assets contained in a dandiset (published or not)""" + + # stats which are not stats + numberOfBytes: int = Field( + json_schema_extra={"readOnly": True, "sameas": "schema:contentSize"} + ) + numberOfFiles: int = Field(json_schema_extra={"readOnly": True}) # universe + numberOfSubjects: Optional[int] = Field( + None, json_schema_extra={"readOnly": True} + ) # NWB + BIDS + numberOfSamples: Optional[int] = Field( + None, json_schema_extra={"readOnly": True} + ) # more of NWB + numberOfCells: Optional[int] = Field(None, json_schema_extra={"readOnly": True}) + + dataStandard: Optional[List[StandardsType]] = Field( + None, json_schema_extra={"readOnly": True} + ) + # Web UI: icons per each modality? + approach: Optional[List[ApproachType]] = Field( + None, json_schema_extra={"readOnly": True} + ) + # Web UI: could be an icon with number, which if hovered on show a list? + measurementTechnique: Optional[List[MeasurementTechniqueType]] = Field( + None, json_schema_extra={"readOnly": True, "nskey": "schema"} + ) + variableMeasured: Optional[List[str]] = Field( + None, json_schema_extra={"readOnly": True, "nskey": "schema"} + ) + + species: Optional[List[SpeciesType]] = Field( + None, json_schema_extra={"readOnly": True} + ) + schemaKey: Literal["AssetsSummary"] = Field( + "AssetsSummary", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], + "nskey": DANDI_NSKEY, + } + + +class Equipment(DandiBaseModel): + identifier: Optional[Identifier] = Field( + None, json_schema_extra={"nskey": "schema"} + ) + name: str = Field( + title="Title", + description="A name for the equipment.", + max_length=150, + json_schema_extra={"nskey": "schema"}, + ) + description: Optional[str] = Field( + None, + description="The description of the equipment.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["Equipment"] = Field( + "Equipment", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], + "nskey": DANDI_NSKEY, + } + + +class Activity(DandiBaseModel): + """Information about the Project activity""" + + identifier: Optional[Identifier] = Field( + None, json_schema_extra={"nskey": "schema"} + ) + name: str = Field( + title="Title", + description="The name of the activity.", + max_length=150, + json_schema_extra={"nskey": "schema"}, + ) + description: Optional[str] = Field( + None, + description="The description of the activity.", + json_schema_extra={"nskey": "schema"}, + ) + startDate: Optional[datetime] = Field(None, json_schema_extra={"nskey": "schema"}) + endDate: Optional[datetime] = Field(None, json_schema_extra={"nskey": "schema"}) + + # isPartOf: Optional["Activity"] = Field(None, json_schema_extra={"nskey": "schema"}) + # hasPart: Optional["Activity"] = Field(None, json_schema_extra={"nskey": "schema"}) + wasAssociatedWith: Optional[List[Union[Person, Organization, Software, Agent],]] = ( + Field(None, json_schema_extra={"nskey": "prov"}) + ) + used: Optional[List[Equipment]] = Field( + None, + description="A listing of equipment used for the activity.", + json_schema_extra={"nskey": "prov"}, + ) + schemaKey: Literal["Activity", "Project", "Session", "PublishActivity"] = Field( + "Activity", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["prov:Activity", "schema:Thing"], + "nskey": DANDI_NSKEY, + } + + +class Project(Activity): + name: str = Field( + title="Name of project", + description="The name of the project that generated this Dandiset or asset.", + max_length=150, + json_schema_extra={"nskey": "schema"}, + ) + description: Optional[str] = Field( + None, + description="A brief description of the project.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["Project"] = Field( + "Project", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class Session(Activity): + name: str = Field( + title="Name of session", + description="The name of the logical session associated with the asset.", + max_length=150, + json_schema_extra={"nskey": "schema"}, + ) + description: Optional[str] = Field( + None, + description="A brief description of the session.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["Session"] = Field( + "Session", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class PublishActivity(Activity): + schemaKey: Literal["PublishActivity"] = Field( + "PublishActivity", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class Locus(DandiBaseModel): + identifier: Union[Identifier, List[Identifier]] = Field( + description="Identifier for genotyping locus.", + json_schema_extra={"nskey": "schema"}, + ) + locusType: Optional[str] = Field(None) + schemaKey: Literal["Locus"] = Field( + "Locus", validate_default=True, json_schema_extra={"readOnly": True} + ) + _ldmeta = {"nskey": DANDI_NSKEY} + + +class Allele(DandiBaseModel): + identifier: Union[Identifier, List[Identifier]] = Field( + description="Identifier for genotyping allele.", + json_schema_extra={"nskey": "schema"}, + ) + alleleSymbol: Optional[str] = Field(None) + alleleType: Optional[str] = Field(None) + schemaKey: Literal["Allele"] = Field( + "Allele", validate_default=True, json_schema_extra={"readOnly": True} + ) + _ldmeta = {"nskey": DANDI_NSKEY} + + +class GenotypeInfo(DandiBaseModel): + locus: Locus = Field(description="Locus at which information was extracted.") + alleles: List[Allele] = Field(description="Information about alleles at the locus.") + wasGeneratedBy: Optional[List["Session"]] = Field( + None, + description="Information about session activity used to determine genotype.", + json_schema_extra={"nskey": "prov"}, + ) + schemaKey: Literal["GenotypeInfo"] = Field( + "GenotypeInfo", validate_default=True, json_schema_extra={"readOnly": True} + ) + _ldmeta = {"nskey": DANDI_NSKEY} + + +class RelatedParticipant(DandiBaseModel): + identifier: Optional[Identifier] = Field( + None, json_schema_extra={"nskey": "schema"} + ) + name: Optional[str] = Field( + None, + title="Name of the participant or subject", + json_schema_extra={"nskey": "schema"}, + ) + url: Optional[AnyHttpUrl] = Field( + None, + title="URL of the related participant or subject", + json_schema_extra={"nskey": "schema"}, + ) + relation: ParticipantRelationType = Field( + title="Participant or subject relation", + description="Indicates how the current participant or subject is related " + "to the other participant or subject. This relation should " + "satisfy: Participant/Subject relatedParticipant/Subject.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + schemaKey: Literal["RelatedParticipant"] = Field( + "RelatedParticipant", + validate_default=True, + json_schema_extra={"readOnly": True}, + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], + "rdfs:comment": "Another participant or subject related to the current " + "participant or subject (e.g., another parent, sibling, child).", + "nskey": DANDI_NSKEY, + } + + +class Participant(DandiBaseModel): + """Description about the Participant or Subject studied. + + The Participant or Subject can be any individual or synthesized Agent. The + properties of the Participant or Subject refers to information at the timepoint + when the Participant or Subject engaged in the production of data being described. + """ + + identifier: Identifier = Field(json_schema_extra={"nskey": "schema"}) + altName: Optional[List[Identifier]] = Field( + None, json_schema_extra={"nskey": DANDI_NSKEY} + ) + + strain: Optional[StrainType] = Field( + None, + description="Identifier for the strain of the participant or subject.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + cellLine: Optional[Identifier] = Field( + None, + description="Cell line associated with the participant or subject.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + vendor: Optional[Organization] = Field( + None, json_schema_extra={"nskey": DANDI_NSKEY} + ) + age: Optional[PropertyValue] = Field( + None, + description="A representation of age using ISO 8601 duration. This " + "should include a valueReference if anything other than " + "date of birth is used.", + json_schema_extra={"nskey": DANDI_NSKEY, "rangeIncludes": "schema:Duration"}, + ) + + sex: Optional[SexType] = Field( + None, + description="Identifier for sex of the participant or subject if " + "available. (e.g. from OBI)", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + genotype: Optional[Union[List[GenotypeInfo], Identifier]] = Field( + None, + description="Genotype descriptor of participant or subject if available", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + species: Optional[SpeciesType] = Field( + None, + description="An identifier indicating the taxonomic classification of " + "the participant or subject.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + disorder: Optional[List[Disorder]] = Field( + None, + description="Any current diagnosed disease or disorder associated with " + "the participant or subject.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + + relatedParticipant: Optional[List[RelatedParticipant]] = Field( + None, + description="Information about related participants or subjects in a " + "study or across studies.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + sameAs: Optional[List[Identifier]] = Field( + None, + description="An identifier to link participants or subjects across datasets.", + json_schema_extra={"nskey": "schema"}, + ) + schemaKey: Literal["Participant"] = Field( + "Participant", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["prov:Agent"], + "rdfs:label": "Information about the participant or subject.", + "nskey": DANDI_NSKEY, + } + + +class BioSample(DandiBaseModel): + """Description of the sample that was studied""" + + identifier: Identifier = Field(json_schema_extra={"nskey": "schema"}) + sampleType: SampleType = Field( + description="Identifier for the sample characteristics (e.g., from OBI, Encode).", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + assayType: Optional[List[AssayType]] = Field( + None, + description="Identifier for the assay(s) used (e.g., OBI).", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + anatomy: Optional[List[Anatomy]] = Field( + None, + description="Identifier for what organ the sample belongs " + "to. Use the most specific descriptor from sources such as UBERON.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + + wasDerivedFrom: Optional[List["BioSample"]] = Field( + None, + description="Describes the hierarchy of sample derivation or aggregation.", + json_schema_extra={"nskey": "prov"}, + ) + wasAttributedTo: Optional[List[Participant]] = Field( + None, + description="Participant(s) or Subject(s) associated with this sample.", + json_schema_extra={"nskey": "prov"}, + ) + sameAs: Optional[List[Identifier]] = Field( + None, json_schema_extra={"nskey": "schema"} + ) + hasMember: Optional[List[Identifier]] = Field( + None, json_schema_extra={"nskey": "prov"} + ) + schemaKey: Literal["BioSample"] = Field( + "BioSample", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:Thing", "prov:Entity"], + "rdfs:label": "Information about the biosample.", + "nskey": DANDI_NSKEY, + } + + +# This is mostly not needed at all since self-referencing models +# are automatically resolved by Pydantic in a pretty consistent way even in Pydantic V1 +# https://docs.pydantic.dev/1.10/usage/postponed_annotations/#self-referencing-models +# and continue to be so in Pydantic V2 +# https://docs.pydantic.dev/latest/concepts/postponed_annotations/#self-referencing-or-recursive-models +BioSample.model_rebuild() + + +class CommonModel(DandiBaseModel): + schemaVersion: str = Field( + default=DANDI_SCHEMA_VERSION, + json_schema_extra={"readOnly": True, "nskey": "schema"}, + ) + name: Optional[str] = Field( + None, + title="Title", + description="The name of the item.", + max_length=150, + json_schema_extra={"nskey": "schema"}, + ) + description: Optional[str] = Field( + None, + description="A description of the item.", + json_schema_extra={"nskey": "schema"}, + ) + contributor: Optional[List[Union[Person, Organization]]] = Field( + None, + title="Contributors", + description="Contributors to this item: persons or organizations.", + json_schema_extra={"nskey": "schema"}, + ) + about: Optional[List[Union[Disorder, Anatomy, GenericType]]] = Field( + None, + title="Subject matter of the dataset", + description="The subject matter of the content, such as disorders, brain anatomy.", + json_schema_extra={"nskey": "schema"}, + ) + studyTarget: Optional[List[str]] = Field( + None, + description="Objectives or specific questions of the study.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + license: Optional[List[LicenseType]] = Field( + None, + description="Licenses associated with the item. DANDI only supports a " + "subset of Creative Commons Licenses (creativecommons.org) " + "applicable to datasets.", + json_schema_extra={"nskey": "schema"}, + ) + protocol: Optional[List[AnyHttpUrl]] = Field( + None, + description="A list of persistent URLs describing the protocol (e.g. " + "protocols.io, or other DOIs).", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + ethicsApproval: Optional[List[EthicsApproval]] = Field( + None, title="Ethics approvals", json_schema_extra={"nskey": DANDI_NSKEY} + ) + keywords: Optional[List[str]] = Field( + None, + description="Keywords used to describe this content.", + json_schema_extra={"nskey": "schema"}, + ) + acknowledgement: Optional[str] = Field( + None, + description="Any acknowledgments not covered by contributors or external resources.", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + + # Linking to this dandiset or the larger thing + access: List[AccessRequirements] = Field( + title="Access information", + default_factory=lambda: [AccessRequirements(status=AccessType.OpenAccess)], + json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True}, + ) + url: Optional[AnyHttpUrl] = Field( + None, + description="permalink to the item", + json_schema_extra={"readOnly": True, "nskey": "schema"}, + ) + repository: Optional[AnyHttpUrl] = Field( + default=_INSTANCE_CONFIG.instance_url, + description="location of the item", + json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True}, + ) + relatedResource: Optional[List[Resource]] = Field( + None, json_schema_extra={"nskey": DANDI_NSKEY} + ) + + wasGeneratedBy: Optional[Sequence[Activity]] = Field( + None, json_schema_extra={"nskey": "prov"} + ) + schemaKey: str = Field( + "CommonModel", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +class Dandiset(CommonModel): + """A body of structured information describing a DANDI dataset.""" + + model_config = ConfigDict(extra="allow") + + @field_validator("contributor") + @classmethod + def contributor_musthave_contact( + cls, values: List[Union[Person, Organization]] + ) -> List[Union[Person, Organization]]: + contacts = [] + for val in values: + if val.roleName and RoleType.ContactPerson in val.roleName: + contacts.append(val) + if len(contacts) == 0: + raise ValueError("At least one contributor must have role ContactPerson") + return values + + id: str = Field( + description="Uniform resource identifier", + pattern=( + rf"^({ID_PATTERN}|{ID_PATTERN.lower()}):\d{{6}}(/(draft|{VERSION_NUM_PATTERN}))$" + ), + json_schema_extra={"readOnly": True}, + ) + + identifier: DANDI = Field( + title="Dandiset identifier", + description="A Dandiset identifier that can be resolved by identifiers.org.", + pattern=rf"^{ID_PATTERN}:\d{{6}}$", + json_schema_extra={"readOnly": True, "nskey": "schema"}, + ) + + sameAs: Annotated[ + Optional[ + list[ + Annotated[ + str, + StringConstraints( + pattern=( + rf"^dandi://{UNVENDORED_ID_PATTERN}/\d{{6}}" + rf"(@(draft|{VERSION_NUM_PATTERN}))?(/\S+)?$" + ) + ), + ] + ] + ], + Field( + default=None, + description="Known DANDI URLs of the Dandiset at other DANDI instances.", + json_schema_extra={"nskey": "schema"}, + ), + ] + + name: str = Field( + title="Dandiset title", + description="A title associated with the Dandiset.", + max_length=150, + json_schema_extra={"nskey": "schema"}, + ) + description: str = Field( + description="A description of the Dandiset", + max_length=10000, + json_schema_extra={"nskey": "schema"}, + ) + contributor: List[Union[Person, Organization]] = Field( + title="Dandiset contributors", + description="People or Organizations that have contributed to this Dandiset.", + json_schema_extra={"nskey": "schema"}, + min_length=1, + ) + dateCreated: Optional[datetime] = Field( + None, + json_schema_extra={"nskey": "schema", "readOnly": True}, + title="Dandiset creation date and time.", + ) + dateModified: Optional[datetime] = Field( + None, + json_schema_extra={"nskey": "schema", "readOnly": True}, + title="Last modification date and time.", + ) + + license: List[LicenseType] = Field( + min_length=1, + description="Licenses associated with the item. DANDI only supports a " + "subset of Creative Commons Licenses (creativecommons.org) " + "applicable to datasets.", + json_schema_extra={"nskey": "schema"}, + ) + + citation: str = Field(json_schema_extra={"readOnly": True, "nskey": "schema"}) + + # From assets + assetsSummary: AssetsSummary = Field( + json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True} + ) + + # From server (requested by users even for drafts) + manifestLocation: List[AnyHttpUrl] = Field( + min_length=1, json_schema_extra={"nskey": DANDI_NSKEY, "readOnly": True} + ) + + version: str = Field(json_schema_extra={"nskey": "schema", "readOnly": True}) + + wasGeneratedBy: Optional[Sequence[Project]] = Field( + None, + title="Associated projects", + description="Project(s) that generated this Dandiset.", + json_schema_extra={"nskey": "prov"}, + ) + + schemaKey: Literal["Dandiset"] = Field( + "Dandiset", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:Dataset", "prov:Entity"], + "rdfs:label": "Information about the dataset", + "nskey": DANDI_NSKEY, + } + + +class BareAsset(CommonModel): + """Metadata used to describe an asset anywhere (local or server). + + Derived from C2M2 (Level 0 and 1) and schema.org + """ + + contentSize: ByteSizeJsonSchema = Field(json_schema_extra={"nskey": "schema"}) + encodingFormat: Union[AnyHttpUrl, str] = Field( + title="File encoding format", json_schema_extra={"nskey": "schema"} + ) + digest: Dict[DigestType, str] = Field( + title="A map of dandi digests to their values", + json_schema_extra={"nskey": DANDI_NSKEY}, + ) + path: str = Field(json_schema_extra={"nskey": DANDI_NSKEY}) + + dateModified: Optional[datetime] = Field( + None, + json_schema_extra={"nskey": "schema"}, + title="Asset (file or metadata) modification date and time", + ) + blobDateModified: Optional[datetime] = Field( + None, + json_schema_extra={"nskey": DANDI_NSKEY}, + title="Asset file modification date and time.", + ) + # overload to restrict with max_items=1 + access: List[AccessRequirements] = Field( + title="Access information", + default_factory=lambda: [AccessRequirements(status=AccessType.OpenAccess)], + json_schema_extra={"nskey": DANDI_NSKEY}, + max_length=1, + ) + + # this is from C2M2 level 1 - using EDAM vocabularies - in our case we would + # need to come up with things for neurophys + # TODO: waiting on input + dataType: Optional[AnyHttpUrl] = Field( + None, json_schema_extra={"nskey": DANDI_NSKEY} + ) + + sameAs: Optional[List[AnyHttpUrl]] = Field( + None, json_schema_extra={"nskey": "schema"} + ) + + # TODO + approach: Optional[List[ApproachType]] = Field( + None, json_schema_extra={"readOnly": True, "nskey": DANDI_NSKEY} + ) + measurementTechnique: Optional[List[MeasurementTechniqueType]] = Field( + None, json_schema_extra={"readOnly": True, "nskey": "schema"} + ) + variableMeasured: Optional[List[PropertyValue]] = Field( + None, json_schema_extra={"readOnly": True, "nskey": "schema"} + ) + + wasDerivedFrom: Optional[List[BioSample]] = Field( + None, json_schema_extra={"nskey": "prov"} + ) + wasAttributedTo: Optional[List[Participant]] = Field( + None, + description="Associated participant(s) or subject(s).", + json_schema_extra={"nskey": "prov"}, + ) + wasGeneratedBy: Optional[List[Union[Session, Project, Activity]]] = Field( + None, + title="Name of the session, project or activity.", + description="Describe the session, project or activity that generated this asset.", + json_schema_extra={"nskey": "prov"}, + ) + + # Bare asset is to be just Asset. + schemaKey: Literal["Asset"] = Field( + "Asset", validate_default=True, json_schema_extra={"readOnly": True} + ) + + _ldmeta = { + "rdfs:subClassOf": ["schema:CreativeWork", "prov:Entity"], + "rdfs:label": "Information about the asset", + "nskey": DANDI_NSKEY, + } + + @field_validator("digest") + @classmethod + def digest_check( + cls, v: Dict[DigestType, str], info: ValidationInfo + ) -> Dict[DigestType, str]: + values = info.data + if values.get("encodingFormat") == "application/x-zarr": + if DigestType.dandi_zarr_checksum not in v: + raise ValueError("A zarr asset must have a zarr checksum.") + if v.get(DigestType.dandi_etag): + raise ValueError("Digest cannot have both etag and zarr checksums.") + digest = v[DigestType.dandi_zarr_checksum] + try: + chksum = ZarrDirectoryDigest.parse(digest) + except InvalidZarrChecksum: + raise ValueError( + "Digest must have an appropriate dandi-zarr-checksum value." + f" Got {digest}" + ) + zarr_size = chksum.size + content_size = values.get("contentSize") + if content_size != zarr_size: + raise ValueError( + f"contentSize {content_size} is not equal to the checksum size {zarr_size}." + ) + else: + if DigestType.dandi_etag not in v: + raise ValueError("A non-zarr asset must have a dandi-etag.") + if v.get(DigestType.dandi_zarr_checksum): + raise ValueError("Digest cannot have both etag and zarr checksums.") + digest = v[DigestType.dandi_etag] + if not re.fullmatch(DandiETag.REGEX, digest): + raise ValueError( + f"Digest must have an appropriate dandi-etag value. " + f"Got {digest}" + ) + return v + + +class Asset(BareAsset): + """Metadata used to describe an asset on the server.""" + + # all of the following are set by server + id: str = Field( + json_schema_extra={"readOnly": True}, description="Uniform resource identifier." + ) + identifier: UUID4 = Field(json_schema_extra={"readOnly": True, "nskey": "schema"}) + contentUrl: List[AnyHttpUrl] = Field( + json_schema_extra={"readOnly": True, "nskey": "schema"} + ) + + +class Publishable(DandiBaseModel): + publishedBy: Union[AnyHttpUrl, PublishActivity] = Field( + description="The URL should contain the provenance of the publishing process.", + json_schema_extra={"readOnly": True, "nskey": DANDI_NSKEY}, + ) + datePublished: datetime = Field( + json_schema_extra={"readOnly": True, "nskey": "schema"} + ) + schemaKey: Literal["Publishable", "Dandiset", "Asset"] = Field( + "Publishable", validate_default=True, json_schema_extra={"readOnly": True} + ) + + +_doi_field_kwargs: dict[str, Any] = { + "title": "DOI", + "pattern": DANDI_DOI_PATTERN, + "json_schema_extra": {"readOnly": True, "nskey": DANDI_NSKEY}, +} +if _INSTANCE_CONFIG.doi_prefix is None: + _doi_field_kwargs["default"] = "" + + +class PublishedDandiset(Dandiset, Publishable): + id: str = Field( + description="Uniform resource identifier.", + pattern=DANDI_PUBID_PATTERN, + json_schema_extra={"readOnly": True}, + ) + doi: str = Field(**_doi_field_kwargs) + """ + The DOI of the published Dandiset + + The value of the empty string indicates that there is no DOI for the published + Dandiset. + """ + + url: AnyHttpUrl = Field( + description="Permalink to the Dandiset.", + json_schema_extra={"readOnly": True, "nskey": "schema"}, + ) + releaseNotes: Optional[str] = Field( + None, + description="The description of the release", + json_schema_extra={"readOnly": True, "nskey": "schema"}, + ) + + schemaKey: Literal["Dandiset"] = Field( + "Dandiset", validate_default=True, json_schema_extra={"readOnly": True} + ) + + @field_validator("assetsSummary") + @classmethod + def check_filesbytes(cls, values: AssetsSummary) -> AssetsSummary: + if values.numberOfBytes == 0 or values.numberOfFiles == 0: + raise ValueError( + "A Dandiset containing no files or zero bytes is not publishable" + ) + return values + + @field_validator("url") + @classmethod + def check_url(cls, url: AnyHttpUrl) -> AnyHttpUrl: + if not re.match(PUBLISHED_VERSION_URL_PATTERN, str(url)): + raise ValueError( + f'string does not match regex "{PUBLISHED_VERSION_URL_PATTERN}"' + ) + return url + + +class PublishedAsset(Asset, Publishable): + id: str = Field( + description="Uniform resource identifier.", + pattern=ASSET_UUID_PATTERN, + json_schema_extra={"readOnly": True}, + ) + + schemaKey: Literal["Asset"] = Field( + "Asset", validate_default=True, json_schema_extra={"readOnly": True} + ) + + @field_validator("digest") + @classmethod + def digest_sha256check( + cls, v: Dict[DigestType, str], info: ValidationInfo + ) -> Dict[DigestType, str]: + values = info.data + if values.get("encodingFormat") != "application/x-zarr": + if DigestType.sha2_256 not in v: + raise ValueError("A non-zarr asset must have a sha2_256.") + digest = v[DigestType.sha2_256] + if not re.fullmatch(SHA256_PATTERN, digest): + raise ValueError( + f"Digest must have an appropriate sha2_256 value. Got {digest}" + ) + return v + + +def get_schema_version() -> str: + return DANDI_SCHEMA_VERSION