diff --git a/docker/dockbuild.sh b/docker/dockbuild.sh index 29e62f7..a1b34b5 100755 --- a/docker/dockbuild.sh +++ b/docker/dockbuild.sh @@ -10,10 +10,10 @@ execdir=`dirname $0` codedir=`(cd $execdir/.. > /dev/null 2>&1; pwd)` set -e -## These are set by default via _run.sh; if necessary, uncomment and customize +## These are set by default via _dockbuild.sh; if necessary, uncomment and customize # PACKAGE_NAME=oar-metadata -# + ## list the names of the image directories (each containing a Dockerfile) for ## containers to be built. List them in dependency order (where a latter one ## depends the former ones). diff --git a/docker/ejsonschema/Dockerfile b/docker/ejsonschema/Dockerfile index 3070624..05ff5c1 100644 --- a/docker/ejsonschema/Dockerfile +++ b/docker/ejsonschema/Dockerfile @@ -1,10 +1,18 @@ FROM oar-metadata/jqfromsrc:latest -RUN apt-get update && apt-get install -y python python-pip python-dev unzip \ - uwsgi uwsgi-plugin-python python-yaml -RUN pip install 'pip==20.3.4' 'setuptools==44.0.0' -RUN pip install json-spec jsonmerge==1.3.0 jsonschema==2.6.0 requests pynoid \ - pytest==4.6.5 filelock crossrefapi +RUN apt-get update && apt-get install -y unzip uwsgi uwsgi-src \ + uuid-dev libcap-dev libpcre3-dev python3-distutils +RUN PYTHON=python3.8 uwsgi --build-plugin "/usr/src/uwsgi/plugins/python python38" && \ + mv python38_plugin.so /usr/lib/uwsgi/plugins/python38_plugin.so && \ + chmod 644 /usr/lib/uwsgi/plugins/python38_plugin.so + +RUN update-alternatives --install /usr/lib/uwsgi/plugins/python3_plugin.so \ + python_plugin.so /usr/lib/uwsgi/plugins/python38_plugin.so 1 + +RUN python -m pip install setuptools --upgrade +RUN python -m pip install json-spec jsonschema==2.4.0 requests \ + pytest==4.6.5 filelock crossrefapi pyyaml +RUN python -m pip install --no-dependencies jsonmerge==1.3.0 WORKDIR /root @@ -12,7 +20,13 @@ RUN curl -L -o ejsonschema.zip \ https://github.com/usnistgov/ejsonschema/archive/master.zip && \ unzip ejsonschema.zip && \ cd ejsonschema-master && \ - python setup.py install + python setup.py install --install-purelib=/usr/local/lib/python3.8/dist-packages + +RUN curl -L -o pynoid.zip \ + https://github.com/RayPlante/pynoid/archive/master.zip && \ + unzip pynoid.zip && \ + cd pynoid-master && \ + python setup.py install --install-purelib=/usr/local/lib/python3.8/dist-packages CMD ["bash"] diff --git a/docker/jqfromsrc/Dockerfile b/docker/jqfromsrc/Dockerfile index 12aec58..5b2107e 100644 --- a/docker/jqfromsrc/Dockerfile +++ b/docker/jqfromsrc/Dockerfile @@ -3,14 +3,16 @@ From oar-metadata/pymongo RUN apt-get update && \ apt-get install -y libonig-dev curl build-essential libtool zip \ unzip autoconf git +RUN pip install pipenv WORKDIR /root RUN git clone http://github.com/stedolan/jq.git jq-dev && \ cd jq-dev && \ - git checkout 80052e5275ae8c45b20411eecdd49c945a64a412 && \ + git checkout a9f97e9e61a910a374a5d768244e8ad63f407d3e && \ git submodule update --init && \ autoreconf -fi && \ - ./configure --with-oniguruma=builtin --disable-docs && \ + (cd docs && pipenv install) && \ + ./configure --with-oniguruma=builtin && \ make -j8 && \ make check-TESTS && \ make install diff --git a/docker/mdtests/Dockerfile b/docker/mdtests/Dockerfile index f1ac1b5..6db5107 100644 --- a/docker/mdtests/Dockerfile +++ b/docker/mdtests/Dockerfile @@ -10,9 +10,12 @@ RUN set -ex; \ "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$arch"; \ wget -O /usr/local/bin/gosu.asc \ "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$arch.asc";\ - verify-asc.sh /usr/local/bin/gosu /usr/local/bin/gosu.asc \ - B42F6819007F00F88E364FD4036A9C25BF357DD4; \ - rm /usr/local/bin/gosu.asc; \ + export GNUPGHOME="$(mktemp -d)"; \ + echo "disable-ipv6" >> "$GNUPGHOME/dirmngr.conf"; \ + gpg --batch --keyserver hkps://keys.openpgp.org \ + --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4; \ + gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu; \ + rm -r /usr/local/bin/gosu.asc; \ chmod +x /usr/local/bin/gosu; \ gosu nobody true diff --git a/docker/mdtests/mdtests.sh b/docker/mdtests/mdtests.sh index dffe077..2d503fd 100755 --- a/docker/mdtests/mdtests.sh +++ b/docker/mdtests/mdtests.sh @@ -8,7 +8,7 @@ function launch_uwsgi { mkdir docker/_docker_ingest_archive echo starting uwsgi... - uwsgi --daemonize docker/_docker_uwsgi.log --plugin python \ + uwsgi --daemonize docker/_docker_uwsgi.log --plugin python3 \ --http-socket :9090 --wsgi-file scripts/ingest-uwsgi.py \ --set-ph oar_config_file=docker/mdtests/ingest_conf.yml \ --pidfile /tmp/ingest.pid diff --git a/docker/pymongo/Dockerfile b/docker/pymongo/Dockerfile index 4397197..57b77c6 100644 --- a/docker/pymongo/Dockerfile +++ b/docker/pymongo/Dockerfile @@ -1,11 +1,18 @@ -FROM mongo:4.2 +# This provides the base support for Python 3.8 and MongoDB 4.4 + +FROM mongo:4.4 # VOLUME /data MAINTAINER Ray Plante COPY mongod.conf /etc/mongod.conf COPY mongod_ctl.sh /usr/local/bin -RUN sed -e '/jessie-updates/ s/^deb/#deb/' /etc/apt/sources.list \ - > /tmp/sources.list && mv /tmp/sources.list /etc/apt/sources.list -RUN apt-get update && apt-get install -y python python-pip python-dev -RUN pip install pymongo +RUN apt-get update && apt-get install -y ca-certificates locales python3.8 python3-pip python3.8-dev +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.8 1; \ + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1; \ + update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1 +RUN locale-gen en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 +RUN python -m pip install pymongo diff --git a/etc/merge/pdp0/Component-schema.json b/etc/merge/pdp0/Component-schema.json new file mode 100644 index 0000000..5b38800 --- /dev/null +++ b/etc/merge/pdp0/Component-schema.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "id": "https://www.nist.gov/od/dm/nerdm-schema/v0.6/Component#", + "$ref": "https://www.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" +} diff --git a/etc/merge/pdp0/DataFile-schema.json b/etc/merge/pdp0/DataFile-schema.json new file mode 100644 index 0000000..acb9363 --- /dev/null +++ b/etc/merge/pdp0/DataFile-schema.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "id": "https://www.nist.gov/od/dm/nerdm-schema/v0.1/DataFile#", + "$ref": "https://www.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Component" +} diff --git a/etc/merge/pdp0/README.md b/etc/merge/pdp0/README.md new file mode 100644 index 0000000..b03f591 --- /dev/null +++ b/etc/merge/pdp0/README.md @@ -0,0 +1,12 @@ +This directory contains annotated schemas used to merge NERDm +documents. Specifically, it contains schemas for the **pdp0** +conventions for merging metadata annotations with the base metadata as +done in the PDP implementation of SIP publishing. In the PDP +implementation, annotations are used to hold metadata values fixed by +the convention, preventing SIP-providing clients from updating them. + +Because of limitations of jsonmerge (including its lack of awareness +of ejsonschema conventions), the core NERDm schema is represented by +`nerdm-amalgamated-schema.json` which folds different resource and +and component types together. + diff --git a/etc/merge/pdp0/Resource-schema.json b/etc/merge/pdp0/Resource-schema.json new file mode 100644 index 0000000..c3a6e1d --- /dev/null +++ b/etc/merge/pdp0/Resource-schema.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "id": "https://www.nist.gov/od/dm/nerdm-schema/v0.6/Resource#", + "$ref": "https://www.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Resource" +} diff --git a/etc/merge/pdp0/nerdm-amalgamated-schema.json b/etc/merge/pdp0/nerdm-amalgamated-schema.json new file mode 100644 index 0000000..33c6d1f --- /dev/null +++ b/etc/merge/pdp0/nerdm-amalgamated-schema.json @@ -0,0 +1,984 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://www.nist.gov/od/dm/nerdm-schema/v0.6#", + "rev": "wd1", + "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", + "description": "A JSON Schema specfying the core NERDm classes", + "definitions": { + + "Resource": { + "description": "a resource (e.g. data collection, service, website or tool) that can participate in a data-driven application", + "properties": { + + "title": { + "title": "Title", + "description": "Human-readable, descriptive name of the resource", + "notes": [ + "Acronyms should be avoided" + ], + "type": "string", + "mergeStrategy": "preferHead" + }, + + "description": { + "title": "Description", + "description": "Human-readable description (e.g., an abstract) of the resource", + "notes": [ + "Each element in the array should be considered a separate paragraph" + ], + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "mergeStrategy": "preferHead" + }, + + "keyword": { + "title": "Tags", + "description": "Tags (or keywords) help users discover your dataset; please include terms that would be used by technical and non-technical users.", + "notes": [ + "Surround each keyword with quotes. Separate keywords with commas. Avoid duplicate keywords in the same record." + ], + "type": "array", + "items": { "type": "string", "minLength": 1 }, + "mergeStrategy": "preferHead" + }, + + "topic": { + "description": "Identified tags referring to things or concepts that this resource addresses or speaks to", + "type": "array", + "items": { "$ref": "#/definitions/Topic" }, + "mergeStrategy": "topicArray" + }, + + "modified": { + "description": "Most recent date on which the dataset was changed, updated or modified.", + "notes": [ + "Dates should be ISO 8601 of highest resolution. In other words, as much of YYYY-MM-DDThh:mm:ss.sTZD as is relevant to this dataset. If there is a need to reflect that the dataset is continually updated, ISO 8601 formatting can account for this with repeating intervals. For instance, R/P1D for daily, R/P2W for every two weeks, and R/PT5M for every five minutes." + ], + "$ref": "#/definitions/ISO8601DateRange", + "mergeStrategy": "keepBase" + }, + + "issued": { + "title": "Release Date", + "description": "Date of formal issuance of the resource", + "anyOf": [ + { + "type": "string", + "pattern": "^([\\+-]?\\d{4}(?!\\d{2}\\b))((-?)((0[1-9]|1[0-2])(\\3([12]\\d|0[1-9]|3[01]))?|W([0-4]\\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\\d|[12]\\d{2}|3([0-5]\\d|6[1-6])))([T\\s]((([01]\\d|2[0-3])((:?)[0-5]\\d)?|24\\:?00)([\\.,]\\d+(?!:))?)?(\\17[0-5]\\d([\\.,]\\d+)?)?([zZ]|([\\+-])([01]\\d|2[0-3]):?([0-5]\\d)?)?)?)?$" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferHead" + }, + + "publisher": { + "description": "The publishing entity and optionally their parent organization(s).", + "notes": [ + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + ], + "$ref": "#/definitions/Organization", + "mergeStrategy": "preferHead" + }, + + "contactPoint": { + "description": "Contact information for getting more information about this resource", + "notes": [ + "This should include at least a name and an email address", + "The information can reflect either a person or a group (such as a help desk)" + ], + "$ref": "#/definitions/ContactInfo", + "mergeStrategy": "preferBase" + }, + + "accessLevel": { + "title": "Public Access Level", + "description": "The degree to which this dataset could be made publicly-available, regardless of whether it has been made available", + "notes": [ + ], + "type": "string", + "enum": [ "public", "restricted public", "non-public" ], + "valueDocumentation": { + "public": { + "description": "Data asset is or could be made publicly available to all without restrictions" + }, + "restricted public": { + "description": "Data asset is available under certain use restrictions" + }, + "non-public": { + "description": "Data asset is not available to members of the public" + } + }, + "mergeStrategy": "preferHead" + }, + + "license": { + "title": "License", + "description": "A pointer to the primary license or non-license (i.e. Public Domain) statement with which the dataset or API has been published", + "notes": [ + "Software and data developed primarily by federal employees must be considered in the public domain; software primarily developed by contract can be assigned a license, including an open source license.", + "By default, NIST-produced data and software should point to http://www.nist.gov/data/license.cfm", + "See Open Licenses (https://project-open-data.cio.gov/open-licenses/) for more information." + ], + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferHead" + }, + + "rights": { + "title": "Rights", + "description": "information regarding access or restrictions based on privacy, security, or other policies", + "notes": [ + "This should also provide an explanation for the selected \"accessLevel\" including instructions for how to access a restricted file, if applicable, or explanation for why a \"non-public\" or \"restricted public\" data assetis not \"public,\" if applicable.", + "Text must be 255 or fewer characters." + ], + "anyOf": [ + { + "type": "string", + "minLength": 1, + "maxLength": 255 + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferHead" + }, + + "components": { + "description": "a listing of the various component resources, tools, and distributions of this resource", + "notes": [ + "Records for the resources referenced in this list should specify this resource in its isPartOf field.", + "The @type property will indicate which type of component it is. The first value in the @type list should be considered the primary or most important classification.", + "If an item's @type includes dcat:Distribution, the item should be convertable to a distribution in the POD schema.", + "The order should be considered meaningful (at least for components of the same type). The meaning or intention behind the order can depend on the type; however, generally, display of the components of a common type should preserve the order. For clarity then, it is recommended that items of the same primary type should be grouped together in the list." + ], + "type": "array", + "items": { "$ref": "#/definitions/Component" }, + "minLength": 1, + "uniqueItems": true, + "mergeStrategy": "arrayMergeById", + "mergeOptions": { "idRef": "/@id" } + }, + + "conformsTo": { + "title": "Standard", + "description": "URI used to identify a standardized specification the resource conforms to", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferBase" + }, + + "isPartOf": { + "title": "Collection", + "description": "The collection of which the dataset is a subset", + "$ref": "#/definitions/ResourceReference", + "mergeStrategy": "objectMerge" + }, + + "language": { + "title": "Language", + "description": "The primary language used in the dataset", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string", + "pattern": "^(((([A-Za-z]{2,3}(-([A-Za-z]{3}(-[A-Za-z]{3}){0,2}))?)|[A-Za-z]{4}|[A-Za-z]{5,8})(-([A-Za-z]{4}))?(-([A-Za-z]{2}|[0-9]{3}))?(-([A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(-([0-9A-WY-Za-wy-z](-[A-Za-z0-9]{2,8})+))*(-(x(-[A-Za-z0-9]{1,8})+))?)|(x(-[A-Za-z0-9]{1,8})+)|((en-GB-oed|i-ami|i-bnn|i-default|i-enochian|i-hak|i-klingon|i-lux|i-mingo|i-navajo|i-pwn|i-tao|i-tay|i-tsu|sgn-BE-FR|sgn-BE-NL|sgn-CH-DE)|(art-lojban|cel-gaulish|no-bok|no-nyn|zh-guoyu|zh-hakka|zh-min|zh-min-nan|zh-xiang)))$" + } + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferHead" + }, + + "landingPage": { + "title": "Homepage URL", + "description": "a URL to a human-friendly web page that represents the home or gateway to the resources that are part of this dataset.", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferHead" + }, + + "references": { + "title": "Related Documents", + "description": "Related documents such as technical information about a dataset, developer documentation, etc.", + "type": "array", + "items": { "$ref": "#/definitions/DocumentReference" }, + "mergeStrategy": "preferBase", + "mergeOptions": { "idRef": "/@id" } + }, + + "theme": { + "title": "Category", + "description": "Main thematic category of the dataset.", + "notes": [ + "Could include ISO Topic Categories (http://www.isotopicmaps.org/)" + ], + "anyOf": [ + { + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "minItems": 1, + "uniqueItems": true + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferHead" + }, + + "@id": { + "description": "A (primary) unique identifier for the resource", + "notes": [ + "It is expected that this field will contain a type of identifier that is of a uniform type across all resource descriptions in the system", + "This identifier should, by default, resolve to the metadata record (or some rendering of it) rather than to the resource itself (e.g. via its landing page)." + ], + "type": "string", + "minLength": 5, + "mergeStrategy": "overwrite" + }, + + "doi": { + "description": "A Digital Object Identifier (DOI) in non-resolving form.", + "notes": [ + "The DOI value should not have the resolver URI base, but should have the form 'doi:NNNNN/XXXXXX'", + "It is expected that this will primarily be Datacite DOIs" + ], + "type": "string", + "pattern": "^doi:[0-9]+\\.[0-9]+/.*$", + "minLength": 5, + "mergeStrategy": "overwrite" + }, + + "ediid": { + "description": "the NIST EDI identifier assigned to the resource", + "type": "string", + "mergeStrategy": "overwrite" + }, + + "abbrev": { + "description": "an abbreviated form of the resource's title", + "notes": [ + "this can be used as a label for a compact display or text for a link to this resource" + ], + "type": "array", + "items": { + "type": "string", + "maxLength": 24 + }, + "mergeStrategy": "preferBase" + }, + + "@type": { + "description": "the linked-data class types for this resource", + "notes": [ + "The value must always be given as an array, even when only one type is provided.", + "Multiple values indicate that the Resource can be considered of multiple types", + "If this resource is not to be considered a subtype of the nrd:Resource, its value should be 'nrd:Resource'." + + ], + "type": "array", + "items": { "type": "string" } , + "mergeStrategy": "preferBase" + }, + + "accrualPeriodicity": { + "title": "Frequency", + "description": "Frequency with which dataset is published.", + "anyOf": [ + { + "enum": [ + "irregular" + ], + "valueDocumentation": { + "irregular": { + "description": "the data is updated or republished on an irregular schedule" + } + } + }, + { + "type": "string", + "pattern": "^R\\/P(?:\\d+(?:\\.\\d+)?Y)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?W)?(?:\\d+(?:\\.\\d+)?D)?(?:T(?:\\d+(?:\\.\\d+)?H)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?S)?)?$" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferBase" + }, + + "bureauCode": { + "title": "Bureau Code", + "description": "an identifier provided by the OMB Circular A-11, Appendix C that identifies the originating federal agency", + "notes": [ + "OMB Circular A-11, Appendix C is available via http://www.whitehouse.gov/sites/default/files/omb/assets/a11_current_year/app_c.pdf", + "A machine-readable listing of the defined codes is available via https://project-open-data.cio.gov/data/omb_bureau_codes.csv", + "Codes have the format of 015:01" + ], + "type": "array", + "items": { + "type": "string", + "pattern": "^[0-9]{3}:[0-9]{2}$" + }, + "uniqueItems": true, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Bureau Code", + "referenceProperty": "pod:bureauCode" + }, + "mergeStrategy": "preferHead" + }, + + "programCode": { + "title": "Program Code", + "description": "an identifier provided by the Federal Program Inventory that identifies the primary program related to this data asset", + "notes": [ + "A machine-readable listing of the defined codes is available via https://www.performance.gov/sites/default/files/files/FederalProgramInventory_FY13_MachineReadable_091613.xls", + "Codes have the format of 015:001" + ], + "type": "array", + "items": { + "type": "string", + "pattern": "^[0-9]{3}:[0-9]{3}$" + }, + "uniqueItems": true, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Program Code", + "referenceProperty": "pod:programCode" + }, + "mergeStrategy": "preferHead" + }, + + "dataQuality": { + "title": "Data Quality", + "description": "Whether the dataset meets the agency's Information Quality Guidelines", + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferBase" + }, + + "primaryITInvestmentUII": { + "title": "Primary IT Investment UII", + "description": "The IT Unique Investment Identifier (UII) that is associated with this dataset", + "anyOf": [ + { + "type": "string", + "pattern": "[0-9]{3}-[0-9]{9}" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferHead" + }, + + "systemOfRecords": { + "title": "System of Records", + "description": "The URL to the System of Records Notice related to this dataset if is so designated under the Privacy Act of 1974", + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "type": "null" + } + ], + "mergeStrategy": "overwrite" + } + }, + + "required": [ + "title", "description", "landingPage", "publisher", + "contactPoint", "accessLevel" + ] + }, + + "DocumentReference": { + "description": "descriptive reference to a document", + "type": "object", + "properties": { + "@type": { + "type": "string", + "enum": [ "deo:BibliographicReference" ] + }, + + "location": { + "description": "the URL for accessing the document", + "notes": [ + "It is assumed that the document can be viewed via a web browser (e.g. HTML, PDF, etc.)" + ], + "type": "string", + "format": "uri" + }, + + "label": { + "description": "a recommended label or title to display as the text for a link to the document", + "notes": [ + "This is intended to be briefer than a citation." + ], + "type": "string" + }, + + "citation": { + "description": "a full formated citation string for the reference, appropriate for inclusion in a bibliography", + "type": "string" + }, + + "refid": { + "description": "An persistent ID for the referenced document.", + "notes": [ + "The ID should be given in a form (e.g. with a scheme prefix) such that its type is obvious. Use 'doi:' for Cross-ref or Datacite IDs.", + "location may use the URL form of the ID." + ], + "type": "string" + }, + + "refType": { + "description": "the type of relationship that this document has with the resource", + "notes": [ + "This is equivalent to the Datacite relationType in that the term is a predicate that connects the resource as the subject to the referenced document as the object (e.g. resource IsDocumentedBy referenced-doc)", + "The DCiteDocumentReference type sets DataCite terms as controlled vocabulary" + ], + "type": "string" + } + }, + "required": [ "@type", "location" ] + }, + + "DCiteDocumentReference": { + "description": "a descriptive reference to a document with a controlled vocabulary for its reference type (refType)", + "notes": [ + "This object is intended to point to human-readable documents", + "Use 'isDocumentedBy' to indicate documents that provide the most comprehensive explanation of the contents of the resource. List these documents in order of importance (as the first one will be exported as the 'describedBy' document when converted to the POD schema).", + "Use 'isSourceOf' if the document provides analysis and interpretation of the resource. In particular, journal articles that are co-published with this resource should be listed with this type. It is recommended that these documents be listed either in order of publication date or importance.", + "Documents may be listed more than once having different types, namely both 'isDocumentedBy' and 'isSourceOf'; however, it is recommended that such multiple classifications should be minimized." + ], + "allOf": [ + { "$ref": "#/definitions/DocumentReference" }, + { + "properties": { + "refType": { + "type": "string", + "enum": [ "IsDocumentedBy", "IsSupplementedBy", + "IsCitedBy", "Cites", "IsReviewedBy", + "IsReferencedBy", "References", + "IsSourceOf", "IsDerivedFrom" ], + "valueDocumentation": { + "IsDocumentedBy": { + "description": "The referenced document provides documentation of the resource.", + "notes": [ + ] + }, + "IsSupplementedBy": { + "description": "The referenced document is a supplement to the resource.", + "notes": [ + ] + }, + "IsCitedBy": { + "description": "The referenced document cites the resource in some way.", + "notes": [ + "This relationship indicates is lighter than IsReferenceBy: the referenced document may discuss this resource without drawing on and using data or information from this resource." + ] + }, + "Cites": { + "description": "This resource cites the referenced document.", + "notes": [ + "Like IsCitedBy, this relationship indicates is lighter than References: the referenced document may discuss this resource without drawing on and using data or information from this resource." + ] + }, + "IsReviewedBy": { + "description": "The referenced document reviews this resource.", + "notes": [ + "This is a lighter relationship than the resource property, describedBy; the latter refers to a document that is the primary, detailed description and/or analysis of this resource" + ] + }, + "IsReferencedBy": { + "description": "The resource is used as a source of information by the referenced document.", + "notes": [ + ] + }, + "References": { + "description": "The referenced document is used as a source of information by the resource.", + "notes": [ + ] + }, + "IsSourceOf": { + "description": "The resource is the source of upon which the referenced document is based.", + "notes": [ + "In other words, the referenced document is derived from the resource." + ] + }, + "IsDerivedFrom": { + "description": "The referenced document is the source upon which the resource is based.", + "notes": [ + "In other words, the resource is derived from the referenced document." + ] + } + } + } + }, + "required": [ "refType" ] + } + ] + }, + + "ResourceReference": { + "description": "a reference to another resource that may have an associated ID", + "notes": [ + "While providing a resType property is recommended, it is required if the proxyFor ID is given." + ], + "type": "object", + "properties": { + "title": { + "description": "the name of the resource being referenced", + "notes": [ + "This value is intended for display.", + "This can be, but is not required to be, the same title given in the metadata record describing the resource" + ], + "type": "string", + "minLength": 1 + }, + "proxyFor": { + "description": "a local identifier representing this resource", + "notes": [ + "This identifier is expected to point to an up-to-date description of the resource as known to the local system. The properties associated with that identifier may be different those given in the current record." + ], + "type": "string", + "format": "uri" + }, + "resourceType": { + "description": "the linked-data class types for the referenced resource", + "notes": [ + "The value must always be given as an array, even when only one type is provided.", + "Multiple values indicate that the Resource can be considered of multiple types; it is recommended that the primary type (usually the most specific) should be given first.", + "If this resource is not to be considered a subtype of the nrd:Resource, its value should be 'nrd:Resource'." + + ], + "type": "array", + "items": { "type": "string" } + + } + }, + "required": [ "title" ], + "dependencies": { + "proxyFor": { + "required": [ "resType" ] + } + } + }, + + "Component": { + "description": "a description of a component of a resource", + "type": "object", + "notes": [ + "Include a $extensionSchema property to validate this description against a Component sub-type" + ], + "properties": { + "@id": { + "description": "a (relative) identifier for the distribution", + "notes": [ + "Though not required by this schema, providing an identifier for a component is critical for merging information from different sources.", + "A relative identifier requires that @base be set to the Resource identifier in the @context." + ], + "type": "string", + "mergeStrategy": "overwrite" + }, + "@type": { + "description": "the types of components that this component can be classified as", + "notes": [ + "the first value should be considered its primary type. This is usually a subtype of Component." + ], + "type": "array", + "items": { "type": "string" }, + "mergeStrategy": "preferBase", + "mergeOptions": { + "incompatible": [ + [ "nrdp:DataFile", "nrdp:Subcollection", + "nrd:Hidden", "nrdp:AccessPage", + "nrdp:SearchPage" ] + ] + } + }, + "title": { + "description": "a descriptive title for the component", + "type": "string", + "mergeStrategy": "preferBase" + }, + "description": { + "description": "a description of the nature and contents of the component, including the role it plays as part of the resource", + "type": "string", + "mergeStrategy": "preferBase" + }, + "topic": { + "description": "Identified tags referring to things or concepts that this component addresses or speaks to", + "type": "array", + "items": { "$ref": "#/definitions/Topic" }, + "mergeStrategy": "topicArray" + }, + "conformsTo": { + "title": "Standard", + "description": "URI used to identify a standardized specification the component conforms to", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferBase" + }, + + "filepath": { + "description": "a name for the data file reflecting its hierarchical location in the data source collection", + "notes": [ + "Forward slashes delimit the hierarchical names in the path", + "If there are no slashes in this value, this file is assumed to be at the top of the file hierarchy", + "The base name of this value (i.e. the last field in the path) can be used as the default filename to give to the file if downloaded.", + "The component title may have the same value." + ], + "type": "string", + "mergeStrategy": "keepBase" + }, + + "downloadURL": { + "title": "Download URL", + "description": "URL providing direct access to a downloadable file of a dataset", + "type": "string", + "format": "uri", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Download URL", + "referenceProperty": "dcat:downloadURL" + }, + "mergeStrategy": "keepBase" + }, + + "mediaType": { + "title": "Media Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "mergeStrategy": "keepBase" + }, + + "format": { + "title": "Format", + "description": "A human-readable description of the file format of a distribution", + "$ref": "#/definitions/Format", + "mergeStrategy": "keepBase" + }, + + "describedBy": { + "title": "Data Dictionary", + "description": "URL to the data dictionary for the distribution found at the downloadURL", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferBase" + }, + + "describedByType": { + "title": "Data Dictionary Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "mergeStrategy": "preferBase" + }, + + "accessURL": { + "description": "the URL for accessing this indirect access to the resource", + "type": "string", + "format": "uri", + "mergeStrategy": "keepBase" + } + + + } + }, + + "Format": { + "description": "a description of a file format that a file employs", + "type": "object", + "properties": { + "description": { + "description": "a human-readable description of the format", + "type": "string" + }, + + "scheme": { + "description": "a URI that identifies the format type registry or identification system that the value is defined in.", + "type": "string", + "format": "uri", + "asOnotology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Schema", + "referenceProperty": "vold:vocabulary" + } + }, + + "@id": { + "description": "the unique identifier for the format", + "type": "string", + "format": "uri" + }, + + "tag": { + "description": "a short, display-able token or abbreviation for the format", + "notes": [ + "As a token, it is intended that applications can search for this value and find all files having the same format. Thus, regardless of whether the @id field is provided, all references to the same format should use the same tag value." + ], + "type": "string" + } + } + }, + + "IncludedResource": { + "description": "A reference to another resource (which has its own record) that is a part of this resource", + "notes": [ + "Include 'nrd:IncludedResource' as a value to @type", + "The title should be (but is not required to be) the title provided in the included resource's metadata record." + ], + "allOf": [ + { "$ref": "#/definitions/Component" }, + { "$ref": "#/definitions/ResourceReference" } + ] + }, + + "ISO8601DateRange": { + "title": "Last Update", + "description": "a single date-time or a date-time range", + "anyOf": [ + { + "type": "string", + "pattern": "^([\\+-]?\\d{4}(?!\\d{2}\\b))((-?)((0[1-9]|1[0-2])(\\3([12]\\d|0[1-9]|3[01]))?|W([0-4]\\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\\d|[12]\\d{2}|3([0-5]\\d|6[1-6])))([T\\s]((([01]\\d|2[0-3])((:?)[0-5]\\d)?|24\\:?00)([\\.,]\\d+(?!:))?)?(\\17[0-5]\\d([\\.,]\\d+)?)?([zZ]|([\\+-])([01]\\d|2[0-3]):?([0-5]\\d)?)?)?)?$" + }, + { + "type": "string", + "pattern": "^(R\\d*\\/)?P(?:\\d+(?:\\.\\d+)?Y)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?W)?(?:\\d+(?:\\.\\d+)?D)?(?:T(?:\\d+(?:\\.\\d+)?H)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?S)?)?$" + }, + { + "type": "string", + "pattern": "^(R\\d*\\/)?([\\+-]?\\d{4}(?!\\d{2}\\b))((-?)((0[1-9]|1[0-2])(\\4([12]\\d|0[1-9]|3[01]))?|W([0-4]\\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\\d|[12]\\d{2}|3([0-5]\\d|6[1-6])))([T\\s]((([01]\\d|2[0-3])((:?)[0-5]\\d)?|24\\:?00)([\\.,]\\d+(?!:))?)?(\\18[0-5]\\d([\\.,]\\d+)?)?([zZ]|([\\+-])([01]\\d|2[0-3]):?([0-5]\\d)?)?)?)?(\\/)P(?:\\d+(?:\\.\\d+)?Y)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?W)?(?:\\d+(?:\\.\\d+)?D)?(?:T(?:\\d+(?:\\.\\d+)?H)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?S)?)?$" + } + ] + }, + + "Topic": { + "description": "a container for an identified concept term or proper thing", + "notes": [ + "A concept term refers to a subject or keyword term, like 'magnetism' while a proper thing is a particular instance of a concept that has a name, like the planet 'Saturn' or the person called 'Abraham Lincoln'", + "The meaning of concept is that given by the OWL ontology (owl:Concept); the meaning of thing is that given by the SKOS ontology (skos:Thing). See also the FOAF ontology." + ], + "type": "object", + "properties": { + "@type": { + "description": "a label indicating whether the value refers to a concept or a thing", + "type": "string", + "enum": [ "Concept", "Thing" ], + "valueDocumentation": { + "Concept": { + "description": "label indicating that the value refers to a concept (as in owl:Concept)" + }, + "Thing": { + "description": "label indicating that the value refers to a named person, place, or thing (as in skos:Thing)" + } + } + }, + + "scheme": { + "description": "a URI that identifies the controlled vocabulary, registry, or identifier system that the value is defined in.", + "type": "string", + "format": "uri", + "asOnotology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Schema", + "referenceProperty": "vold:vocabulary" + } + }, + + "@id": { + "description": "the unique identifier identifying the concept or thing", + "type": "string", + "format": "uri" + }, + + "tag": { + "description": "a short, display-able token that locally represents the concept or thing", + "notes": [ + "As a token, it is intended that applications can search for this value and find all resources that are talking about the same thing. Thus, regardless of whether the @id field is provided, all references to the same concept or thing should use the same tag value." + ], + "type": "string" + } + }, + "required": [ "@type", "tag" ] + }, + + "Organization": { + "description": "a named organization that may be part of a larger organization", + "type": "object", + "properties": { + "@type": { + "title": "Metadata Context", + "description": "IRI for the JSON-LD data type. This should be org:Organization for each publisher", + "type": "string", + "enum": [ "org:Organization" ] + }, + "name": { + "title": "Publisher Name", + "description": "The plain-text name of the organization", + "type": "string", + "minLength": 1 + } + }, + "required": [ "name" ] + }, + + "ContactInfo": { + "description": "Information describing various ways to contact an entity", + "notes": [ + ], + "properties": { + "@type": { + "type": "string", + "enum": [ "vcard:Contact" ] + }, + "fn": { + "title": "Contact Name", + "description": "full name of the contact person, role, or organization", + "type": "string", + "minLength": 1 + }, + + "hasEmail": { + "title": "Email", + "description": "The email address of the resource contact", + "type": "string", + "pattern": "^[\\w\\_\\~\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\=\\:.-]+@[\\w.-]+\\.[\\w.-]+?$" + }, + + "postalAddress": { + "description": "the contact mailing address", + "notes": [ + ], + "$ref": "#/definitions/PostalAddress" + }, + + "phoneNumber": { + "description": "the contact telephone number", + "notes": [ "Complete international dialing codes should be given, e.g. '+1-410-338-1234'" ], + "type" : "string" + }, + + "timezone": { + "description": "the time zone where the contact typically operates", + "type" : "string", + "pattern": "^[-+][0-9]{4}$" + }, + + "proxyFor": { + "description": "a local identifier representing this person", + "notes": [ + "This identifier is expected to point to an up-to-date description of the person as known to the local system. The properties associated with that identifier may be different those given in the current record." + ], + "type": "string", + "format": "uri" + } + + } + + }, + + "PostalAddress": { + "description": "a line-delimited listing of a postal address", + "type": "array", + "item": { "type": "string", "minLength": 1 } + }, + + "Identifier": { + "description": "a complete description of an identifier, including the scheme that it adheres to", + "title": "Identifier", + "properties": { + "scheme": { + "description": "a label indicating the system that the identifier adheres to", + "notes": [ + "this label may imply a particular resolver to use" + ], + "type": "string" + }, + + "value": { + "description": "the value of the identifier", + "notes": [ + "if no scheme is provided, a URI form of the identifier should be given" + ], + "type": "string" + } + + }, + "required": [ "value" ] + } + }, + + "$ref": "#/definitions/Resource" +} diff --git a/etc/merge/pdp0/nerdm-pub-schema.json b/etc/merge/pdp0/nerdm-pub-schema.json new file mode 100644 index 0000000..0437ceb --- /dev/null +++ b/etc/merge/pdp0/nerdm-pub-schema.json @@ -0,0 +1,695 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.1#", + "rev": "wd2", + "title": "The NERDm extension metadata for Public Data", + "description": "These classes extend the based NERDm schema to different types of published data", + "definitions": { + + "PublicDataResource": { + "description": "a resource that can/should have a record in NIST's public data listing (PDL)", + "notes": [ + "This must be convertable to a compliant and complete POD record; thus, this class adds all remaining POD elements missing from the core" + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Resource"}, + { + "type": "object", + "properties": { + + "accrualPeriodicity": { + "title": "Frequency", + "description": "Frequency with which dataset is published.", + "anyOf": [ + { + "enum": [ + "irregular" + ], + "valueDocumentation": { + "irregular": { + "description": "the data is updated or republished on an irregular schedule" + } + } + }, + { + "type": "string", + "pattern": "^R\\/P(?:\\d+(?:\\.\\d+)?Y)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?W)?(?:\\d+(?:\\.\\d+)?D)?(?:T(?:\\d+(?:\\.\\d+)?H)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?S)?)?$" + }, + { + "type": "null" + } + ], + "asOntology": { + "@conxtext": "profile-schema-onto.json", + "prefLabel": "Frequency", + "referenceProperty": "dc:accrualPeriodicity" + } + }, + + "bureauCode": { + "title": "Bureau Code", + "description": "an identifier provided by the OMB Circular A-11, Appendix C that identifies the originating federal agency", + "notes": [ + "OMB Circular A-11, Appendix C is available via http://www.whitehouse.gov/sites/default/files/omb/assets/a11_current_year/app_c.pdf", + "A machine-readable listing of the defined codes is available via https://project-open-data.cio.gov/data/omb_bureau_codes.csv", + "Codes have the format of 015:01" + ], + "type": "array", + "items": { + "type": "string", + "pattern": "^[0-9]{3}:[0-9]{2}$" + }, + "uniqueItems": true, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Bureau Code", + "referenceProperty": "pod:bureauCode" + } + }, + + "programCode": { + "title": "Program Code", + "description": "an identifier provided by the Federal Program Inventory that identifies the primary program related to this data asset", + "notes": [ + "A machine-readable listing of the defined codes is available via https://www.performance.gov/sites/default/files/files/FederalProgramInventory_FY13_MachineReadable_091613.xls", + "Codes have the format of 015:001" + ], + "type": "array", + "items": { + "type": "string", + "pattern": "^[0-9]{3}:[0-9]{3}$" + }, + "uniqueItems": true, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Program Code", + "referenceProperty": "pod:programCode" + } + }, + + "dataQuality": { + "title": "Data Quality", + "description": "Whether the dataset meets the agency's Information Quality Guidelines", + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Quality", + "referenceProperty": "pod:dataQuality" + } + }, + + "primaryITInvestmentUII": { + "title": "Primary IT Investment UII", + "description": "The IT Unique Investment Identifier (UII) that is associated with this dataset", + "anyOf": [ + { + "type": "string", + "pattern": "[0-9]{3}-[0-9]{9}" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Primary IT Investment UII", + "referenceProperty": "pod:primaryITInvestmentUII" + } + }, + + "systemOfRecords": { + "title": "System of Records", + "description": "The URL to the System of Records Notice related to this dataset if is so designated under the Privacy Act of 1974", + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "type": "null" + } + ] + } + + }, + "required": [ "bureauCode", "programCode", "ediid" ] + } + ] + }, + + "DownloadableFile": { + "description": "a description of a downloadable, finite stream of data", + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Component" }, + { + "properties": { + + "filepath": { + "description": "a name for the data file reflecting its hierarchical location in the data source collection", + "notes": [ + "Forward slashes delimit the hierarchical names in the path", + "If there are no slashes in this value, this file is assumed to be at the top of the file hierarchy", + "The base name of this value (i.e. the last field in the path) can be used as the default filename to give to the file if downloaded.", + "The component title may have the same value." + ], + "type": "string" + }, + + "downloadURL": { + "title": "Download URL", + "description": "URL providing direct access to a downloadable file of a dataset", + "type": "string", + "format": "uri", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Download URL", + "referenceProperty": "dcat:downloadURL" + } + }, + + "mediaType": { + "title": "Media Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Media Type", + "referenceProperty": "dcat:mediaType" + } + }, + + "format": { + "title": "Format", + "description": "A human-readable description of the file format of a distribution", + "$ref": "#/definitions/Format", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Format", + "referenceProperty": "dc:format" + } + }, + + "checksum": { + "title": "Checksum", + "description": "a checksum for the file", + "$ref": "#/definitions/Checksum" + }, + + "size": { + "description": "the size of the file in bytes", + "type": "integer", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "file size", + "referenceProperty": "schema:fileSize" + } + } + + }, + "required": [ "filepath" ], + + "dependencies": { + "downloadURL": { + "properties": { + "mediaType": { + "type": "string", + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$" + } + }, + "required": [ "mediaType" ] + } + } + } + ] + }, + + "DataFile": { + "description": "a description of a downloadable file that was provided by the authors (as opposed to a system or checksum file produced by the publication system).", + "allOf": [ + { "$ref": "#/definitions/DownloadableFile" }, + { + "properties": { + "describedBy": { + "title": "Data Dictionary", + "description": "URL to the data dictionary for the distribution found at the downloadURL", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary", + "referenceProperty": "http://www.w3.org/2007/05/powder-s#describedby" + } + }, + + "describedByType": { + "title": "Data Dictionary Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary Type", + "referenceProperty": "pod:describedByType" + } + } + } + } + ] + }, + + "ChecksumFile": { + "description": "a downloadable file that contains the checksum value for a DataFile.", + "allOf": [ + { "$ref": "#/definitions/DownloadableFile" }, + { + "properties": { + "algorithm": { + "description": "the algorithm used to produce the checksum hash", + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Topic" + }, + + "valid": { + "type": "boolean", + "description": "A flag, if True, indicating the the hash value contained in this ChecksumFile is confirmed to be correct for its associated data file." + } + + "describes": { + "type": "string", + "format": "uri-reference" + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Checksum for", + "referenceProperty": "ov:describes" + } + } + } + } + ] + }, + + "Checksum": { + "description": "a checksum with its algorithm noted", + "type": "object", + "properties": { + "algorithm": { + "description": "the algorithm used to produce the checksum hash", + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Topic" + }, + "hash": { + "description": "the checksum value", + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "checksum", + "referenceProperty": "dataid:checksum" + } + } + }, + "required": [ "hash" ] + }, + + "Subcollection": { + "description": "A grouping of components within a named subcollection of the resource", + "notes": [ + "This Component subtype implements hierarchical resources; a subcollection is equivalent to a directory that can contain other components, including other subcollections." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Component" }, + { + "properties": { + + "filepath": { + "description": "a name for the data file reflecting its hierarchical location in the data source collection", + "notes": [ + "Forward slashes delimit the hierarchical names in the path", + "If there are no slashes in this value, this file is assumed to be at the top of the file hierarchy", + "The base name of this value (i.e. the last field in the path) can be used as the default filename to give to the file if downloaded.", + "The component title may have the same value." + ], + "type": "string" + }, + + "contains": { + "description": "a listing of resource components that are directly part of this subcollection", + "notes": [ + "Each item is a URI identifier (possibly abbreviated)" + ], + "type": "array", + "items": { + "type": "string" + }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "contains", + "referenceProperty": "ldp:contains" + } + }, + + "hasParent": { + "description": "The identifier for the parent collection that contains this subcollection", + "type": "string" + } + + }, + "required": [ "filepath" ] + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Subcollection", + "referenceProperty": "fedora:Container" + } + }, + + "AccessPage": { + "description": "a web page that provides indirect access to the resource", + "notes": [ + "This type should not be used to capture a resource's home page as this would be redundant with the landingPage resource property." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Component" }, + { + "properties": { + "accessURL": { + "description": "the URL for accessing this indirect access to the resource", + "type": "string", + "format": "uri" + }, + + "format": { + "title": "Format", + "description": "A human-readable description of the file format of a distribution", + "$ref": "#/definitions/Format", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Format", + "referenceProperty": "dc:format" + } + } + }, + "required": [ "accessURL" ] + } + ] + }, + + "SearchPage": { + "description": "a web page that can be used to search the contents of the resource", + "notes": [ + "Provide this component even if the accessURL is the same as the landing page; this indicates that the landing page provides a search tool in it." + ], + "allOf": [ + { "$ref": "#/definitions/AccessPage" } + ] + }, + + "API": { + "description": "an application programming interface to the resource", + "notes": [ + "This is typically a web-based interface", + "When converting an API component to a POD distribution, the output format should set to 'API'." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/Component" }, + { + "properties": { + "accessURL": { + "description": "the URL for accessing this indirect access to the resource", + "type": "string", + "format": "uri" + }, + + "describedBy": { + "title": "API Description", + "description": "URL to a formal or informal description of the API", + "notes": [ + "Use describedByType to help distinguish between formal and informal (i.e. human readable) descriptions." + ], + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary", + "referenceProperty": "http://www.w3.org/2007/05/powder-s#describedby" + } + }, + + "describedByType": { + "title": "API Descriptions Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary Type", + "referenceProperty": "pod:describedByType" + } + } + } + } + ] + }, + + "Format": { + "description": "a description of a file format that a file employs", + "type": "object", + "properties": { + "description": { + "description": "a human-readable description of the format", + "type": "string" + }, + + "scheme": { + "description": "a URI that identifies the format type registry or identification system that the value is defined in.", + "type": "string", + "format": "uri", + "asOnotology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Schema", + "referenceProperty": "vold:vocabulary" + } + }, + + "@id": { + "description": "the unique identifier for the format", + "type": "string", + "format": "uri" + }, + + "tag": { + "description": "a short, display-able token or abbreviation for the format", + "notes": [ + "As a token, it is intended that applications can search for this value and find all files having the same format. Thus, regardless of whether the @id field is provided, all references to the same format should use the same tag value." + ], + "type": "string" + } + } + }, + + "DataPublication": { + "description": "Data presented by one or more authors as citable publication", + "allOf": [ + { "$ref": "#/definitions/PublicDataResource" }, + { + "type": "object", + "properties": { + "subtitle": { + "description": "a secondary or sub-title for the resource", + "type": "array", + "items": { "type": "string" } + }, + "aka": { + "description": "other (unofficial) titles that this resource is sometimes known as", + "type": "array", + "items": { "type": "string" } + }, + "authors": { + "description": "the ordered list of authors of this data publication", + "notes": [ + "Authors should generally be assumed to be considered creators of the data; where this is is not true or insufficient, the contributors property can be used ot add or clarify who contributed to data creation." + ], + "type": "array", + "items": { "$ref": "#/definitions/Person" }, + "asOntology": { + "@conxtext": "profile-schema-onto.json", + "prefLabel": "Authors", + "referenceProperty": "bibo:authorList" + } + }, + "recommendedCitation": { + "description": "a recommended formatting of a citation to this data publication", + "type": "string", + "asOntology": { + "@conxtext": "profile-schema-onto.json", + "prefLabel": "Cite as", + "referenceProperty": "dc:bibliographicCitation" + } + } + } + } + ] + }, + + "Person": { + "description": "an identification a Person contributing to the publication of a resource", + "notes": [ + "The information here is intended to reflect information about the person at teh time of the contribution or publication." + ], + "type": "object", + "properties": { + "@type": { + "description": "the class indicating that this is a Person", + "type": "string", + "enum": [ + "foaf:Person" + ] + }, + + "fn": { + "description": "the author's full name in the preferred format", + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Name", + "referenceProperty": "vcard:fn" + } + }, + + "givenName": { + "description": "the author's given name", + "notes": [ + "Often referred to in English-speaking conventions as the first name" + ], + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "First Name", + "referenceProperty": "foaf:givenName" + } + }, + + "familyName": { + "description": "the author's family name", + "notes": [ + "Often referred to in English-speaking conventions as the last name" + ], + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Last Name", + "referenceProperty": "foaf:familyName" + } + }, + + "middleName": { + "description": "the author's middle names or initials", + "notes": [ + "Often referred to in English-speaking conventions as the first name" + ], + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Middle Names or Initials", + "referenceProperty": "vcard:middleName" + } + }, + + "orcid": { + "description": "the author's ORCID", + "notes:": [ + "The value should not include the resolving URI base (http://orcid.org)" + ], + "$ref": "#/definitions/ORCIDpath", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Last Name", + "referenceProperty": "vivo:orcidid" + } + }, + + "affiliation": { + "description": "The institution the person was affiliated with at the time of publication", + "type": "array", + "items": { + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#/definitions/ResourceReference" + }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Affiliation", + "referenceProperty": "schema:affiliation" + } + }, + + "proxyFor": { + "description": "a local identifier representing this person", + "notes": [ + "This identifier is expected to point to an up-to-date description of the person as known to the local system. The properties associated with that identifier may be different those given in the current record." + ], + "type": "string", + "format": "uri", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Current Person Information", + "referenceProperty": "ore:proxyFor" + } + } + }, + "required": [ "fn" ] + }, + + "ORCIDpath": { + "description": "the format of the path portion of an ORCID identifier (i.e. without the preceding resolver URL base)", + "type": "string", + "pattern": "^[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{3}[0-9X]$" + } + + } +} + diff --git a/jq/pod2nerdm.jq b/jq/pod2nerdm.jq index a834c4b..e40eab7 100644 --- a/jq/pod2nerdm.jq +++ b/jq/pod2nerdm.jq @@ -21,15 +21,15 @@ include "urldecode"; # the base NERDm JSON schema namespace # -def nerdm_schema: "https://data.nist.gov/od/dm/nerdm-schema/v0.6#"; +def nerdm_schema: "https://data.nist.gov/od/dm/nerdm-schema/v0.7#"; # the NERDm pub schema extension namespace # -def nerdm_pub_schema: "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#"; +def nerdm_pub_schema: "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#"; # the NERDm bib schema extension namespace # -def nerdm_bib_schema: "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#"; +def nerdm_bib_schema: "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#"; # the NERDm context location # diff --git a/jq/tests/test_pod2nerdm.jqt b/jq/tests/test_pod2nerdm.jqt index f49152a..3f442d8 100644 --- a/jq/tests/test_pod2nerdm.jqt +++ b/jq/tests/test_pod2nerdm.jqt @@ -10,14 +10,14 @@ # include "pod2nerdm"; nerdm_schema null -"https://data.nist.gov/od/dm/nerdm-schema/v0.6#" +"https://data.nist.gov/od/dm/nerdm-schema/v0.7#" #-------------- # testing nerdm_schema() # include "pod2nerdm"; nerdm_pub_schema null -"https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#" +"https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#" #-------------- # testing nerdm_context() @@ -31,7 +31,7 @@ null # include "pod2nerdm"; dciteRefType null -"https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#/definitions/DCiteReference" +"https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#/definitions/DCiteReference" #-------------- # testing resid() @@ -91,7 +91,7 @@ include "pod2nerdm"; pdrLandingPageURL # include "pod2nerdm"; map(cvtref) [ "http://goob.net/doc1.txt", "https://goob.gov/doc2.txt" ] -[{ "@type": ["deo:BibliographicReference"],"@id":"#ref:doc1.txt", "refType": "IsSupplementTo", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#/definitions/DCiteReference" ], "location": "http://goob.net/doc1.txt"}, { "@type": ["deo:BibliographicReference"],"@id":"#ref:doc2.txt", "refType": "IsSupplementTo", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#/definitions/DCiteReference" ], "location": "https://goob.gov/doc2.txt"}] +[{ "@type": ["deo:BibliographicReference"],"@id":"#ref:doc1.txt", "refType": "IsSupplementTo", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#/definitions/DCiteReference" ], "location": "http://goob.net/doc1.txt"}, { "@type": ["deo:BibliographicReference"],"@id":"#ref:doc2.txt", "refType": "IsSupplementTo", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#/definitions/DCiteReference" ], "location": "https://goob.gov/doc2.txt"}] #--------------- # testing filepath() @@ -220,14 +220,14 @@ include "pod2nerdm"; map(componentID("#")) # include "pod2nerdm"; dist2download {"describedBy": "http://data.nist.gov/srd/srd_data/srd13_janaf-data-defs.json", "downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json", "mediaType": "application/json","title": "Titanium Boride" } -{"describedBy": "http://data.nist.gov/srd/srd_data/srd13_janaf-data-defs.json", "downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json","mediaType": "application/json", "title": "Titanium Boride", "filepath":"srd13_B-101.json", "@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/srd13_B-101.json","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile"]} +{"describedBy": "http://data.nist.gov/srd/srd_data/srd13_janaf-data-defs.json", "downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json","mediaType": "application/json", "title": "Titanium Boride", "filepath":"srd13_B-101.json", "@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/srd13_B-101.json","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile"]} #-------------- # testing dist2checksum() # include "pod2nerdm"; dist2checksum {"downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json.sha256", "title": "Checksum for srd13_B-101.json" } -{"downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json.sha256","mediaType": "text/plain", "description": "SHA-256 checksum value for srd13_B-101.json", "title": "Checksum for srd13_B-101.json", "filepath":"srd13_B-101.json.sha256", "algorithm": {"@type": "Thing","tag": "sha256"},"@type": ["nrdp:ChecksumFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/srd13_B-101.json.sha256","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/ChecksumFile"]} +{"downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json.sha256","mediaType": "text/plain", "description": "SHA-256 checksum value for srd13_B-101.json", "title": "Checksum for srd13_B-101.json", "filepath":"srd13_B-101.json.sha256", "algorithm": {"@type": "Thing","tag": "sha256"},"@type": ["nrdp:ChecksumFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/srd13_B-101.json.sha256","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/ChecksumFile"]} #-------------- # testing dist2hidden() @@ -248,7 +248,7 @@ include "pod2nerdm"; dist2inaccess # include "pod2nerdm"; dist2accesspage {"accessURL": "https://doi.org/10.18434/T42C7D","title": "A Library to Enable the Modeling of Optical Imaging of Finite Multi-Line Arrays"} -{"accessURL": "https://doi.org/10.18434/T42C7D","title": "A Library to Enable the Modeling of Optical Imaging of Finite Multi-Line Arrays","@type": [ "nrdp:AccessPage", "dcat:Distribution" ],"@id":"#10.18434/T42C7D","_extensionSchemas":["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/AccessPage"]} +{"accessURL": "https://doi.org/10.18434/T42C7D","title": "A Library to Enable the Modeling of Optical Imaging of Finite Multi-Line Arrays","@type": [ "nrdp:AccessPage", "dcat:Distribution" ],"@id":"#10.18434/T42C7D","_extensionSchemas":["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/AccessPage"]} #-------------- # testing dist2comp() @@ -267,7 +267,7 @@ include "pod2nerdm"; dist2comp("doi:10.18434/T42C7D") # include "pod2nerdm"; dist2comp("doi:10.18434/T42C7D") {"describedBy": "http://data.nist.gov/srd/srd_data/srd13_janaf-data-defs.json", "downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json", "mediaType": "application/json","title": "Titanium Boride" } -{"describedBy": "http://data.nist.gov/srd/srd_data/srd13_janaf-data-defs.json", "downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json","mediaType": "application/json", "title": "Titanium Boride", "filepath":"srd13_B-101.json", "@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/srd13_B-101.json","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile"]} +{"describedBy": "http://data.nist.gov/srd/srd_data/srd13_janaf-data-defs.json", "downloadURL": "http://data.nist.gov/srd/srd_data/srd13_B-101.json","mediaType": "application/json", "title": "Titanium Boride", "filepath":"srd13_B-101.json", "@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/srd13_B-101.json","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile"]} #-------------- # testing dist2comp @@ -277,7 +277,7 @@ include "pod2nerdm"; dist2comp("doi:10.18434/T42C7D") # include "pod2nerdm"; dist2comp(null) {"accessURL": "http://www.nsrl.nist.gov/Downloads.htm","conformsTo": "http://www.nsrl.nist.gov/Documents/Data-Formats-of-the-NSRL-Reference-Data-Set-16.pdf","downloadURL": "http://www.nsrl.nist.gov/RDS/rds_2.50/RDS_250.iso","format": "ISO 9660 disk image","mediaType": "application/zip" } -{"accessURL": "http://www.nsrl.nist.gov/Downloads.htm","conformsTo": "http://www.nsrl.nist.gov/Documents/Data-Formats-of-the-NSRL-Reference-Data-Set-16.pdf","downloadURL": "http://www.nsrl.nist.gov/RDS/rds_2.50/RDS_250.iso","format": { "description": "ISO 9660 disk image"},"mediaType": "application/zip", "filepath":"RDS_250.iso", "@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/RDS_250.iso","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile"] } +{"accessURL": "http://www.nsrl.nist.gov/Downloads.htm","conformsTo": "http://www.nsrl.nist.gov/Documents/Data-Formats-of-the-NSRL-Reference-Data-Set-16.pdf","downloadURL": "http://www.nsrl.nist.gov/RDS/rds_2.50/RDS_250.iso","format": { "description": "ISO 9660 disk image"},"mediaType": "application/zip", "filepath":"RDS_250.iso", "@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"],"@id":"cmps/RDS_250.iso","_extensionSchemas": ["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile"] } #-------------- # testing dist2comp @@ -287,7 +287,7 @@ include "pod2nerdm"; dist2comp(null) # include "pod2nerdm"; dist2comp("doi:10.18434/T42C7D") {"accessURL": "http://webbook.nist.gov/chemistry/","description": "Landing page for the NIST Chemistry WebBook.","mediaType": "text/html"} -{ "accessURL": "http://webbook.nist.gov/chemistry/","description": "Landing page for the NIST Chemistry WebBook.","mediaType": "text/html","@type": ["nrdp:AccessPage","dcat:Distribution"],"@id":"#chemistry/","_extensionSchemas":["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/AccessPage"]} +{ "accessURL": "http://webbook.nist.gov/chemistry/","description": "Landing page for the NIST Chemistry WebBook.","mediaType": "text/html","@type": ["nrdp:AccessPage","dcat:Distribution"],"@id":"#chemistry/","_extensionSchemas":["https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/AccessPage"]} # testing dist2comp # @@ -397,13 +397,13 @@ include "pod2nerdm"; select_comp_type("nrdp:Subcollection"; "foo/bar") # include "pod2nerdm"; create_subcoll_for "a/b/foo" -{"@id": "cmps/a/b/foo", "@type": ["nrdp:Subcollection"], "filepath": "a/b/foo", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/Subcollection" ]} +{"@id": "cmps/a/b/foo", "@type": ["nrdp:Subcollection"], "filepath": "a/b/foo", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/Subcollection" ]} # testing insert_subcoll_comps # include "pod2nerdm"; insert_subcoll_comps [{ "title": "Titanium Boride", "filepath": "foo/srd13_B-101.json","@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"]},{ "title": "foo", "filepath": "foo", "@type": [ "nrdp:Subcollection"]},{"title": "Titanium Boride","filepath": "foo/bar/srd13_B-101.json","@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"]},{ "title": "foo bar goo", "filepath": "foo/bar/goo", "@type": ["nrdp:Subcollection"]}] -[{"@id": "cmps/foo/bar", "@type": ["nrdp:Subcollection"], "filepath": "foo/bar", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/Subcollection" ]},{ "title": "Titanium Boride", "filepath": "foo/srd13_B-101.json","@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"]},{ "title": "foo", "filepath": "foo", "@type": [ "nrdp:Subcollection"]},{"title": "Titanium Boride","filepath": "foo/bar/srd13_B-101.json","@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"]},{ "title": "foo bar goo", "filepath": "foo/bar/goo", "@type": ["nrdp:Subcollection"]}] +[{"@id": "cmps/foo/bar", "@type": ["nrdp:Subcollection"], "filepath": "foo/bar", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/Subcollection" ]},{ "title": "Titanium Boride", "filepath": "foo/srd13_B-101.json","@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"]},{ "title": "foo", "filepath": "foo", "@type": [ "nrdp:Subcollection"]},{"title": "Titanium Boride","filepath": "foo/bar/srd13_B-101.json","@type": ["nrdp:DataFile","nrdp:DownloadableFile","dcat:Distribution"]},{ "title": "foo bar goo", "filepath": "foo/bar/goo", "@type": ["nrdp:Subcollection"]}] diff --git a/jq/tests/test_podds2resource.py b/jq/tests/test_podds2resource.py index 134d682..e308f39 100755 --- a/jq/tests/test_podds2resource.py +++ b/jq/tests/test_podds2resource.py @@ -1,11 +1,12 @@ -#!/usr/bin/python +#!/usr/bin/env python # import os, unittest, json, subprocess as subproc, types, pdb +from collections import OrderedDict import ejsonschema as ejs -nerdm = "https://data.nist.gov/od/dm/nerdm-schema/v0.6#" -nerdmpub = "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#" -nerdmbib = "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#" +nerdm = "https://data.nist.gov/od/dm/nerdm-schema/v0.7#" +nerdmpub = "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#" +nerdmbib = "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#" datadir = os.path.join(os.path.dirname(__file__), "data") janaffile = os.path.join(datadir, "janaf_pod.json") corrfile = os.path.join(datadir, "CORR-DATA.json") @@ -21,38 +22,38 @@ def setUp(self): self.out = send_file_thru_jq('nerdm::podds2resource', janaffile, {"id": "ark:ID"}) - def test_id(self): self.assertEquals(self.out['@id'], "ark:ID") - def test_al(self): self.assertEquals(self.out['accessLevel'], "public") + def test_id(self): self.assertEqual(self.out['@id'], "ark:ID") + def test_al(self): self.assertEqual(self.out['accessLevel'], "public") def test_rights(self): - self.assertEquals(self.out['rights'], "data is free to use") + self.assertEqual(self.out['rights'], "data is free to use") def test_context(self): - self.assertEquals(self.out['@context'], + self.assertEqual(self.out['@context'], [ "https://data.nist.gov/od/dm/nerdm-pub-context.jsonld", {"@base": "ark:ID"} ]) def test_schema(self): - self.assertEquals(self.out['_schema'], - "https://data.nist.gov/od/dm/nerdm-schema/v0.6#") + self.assertEqual(self.out['_schema'], + "https://data.nist.gov/od/dm/nerdm-schema/v0.7#") def test_extsch(self): exts = self.out['_extensionSchemas'] - self.assertEquals(len(exts), 1) + self.assertEqual(len(exts), 1) self.assertIn(nerdmpub+"/definitions/PublicDataResource", exts) def test_restypes(self): types = self.out['@type'] self.assertIsInstance(types, list) - self.assertEquals(len(types), 3) - self.assertEquals(types[0], "nrd:SRD") - self.assertEquals(types[1], "nrdp:PublicDataResource") - self.assertEquals(types[2], "dcat:Dataset") + self.assertEqual(len(types), 3) + self.assertEqual(types[0], "nrd:SRD") + self.assertEqual(types[1], "nrdp:PublicDataResource") + self.assertEqual(types[2], "dcat:Dataset") def test_arestr(self): props = "title modified ediid landingPage license".split() for prop in props: self.assertIn(prop, self.out, "Property not found: " + prop) - self.assertIsInstance(self.out[prop], types.StringTypes, + self.assertIsInstance(self.out[prop], (str,), "Property '{0}' not a string: {1}".format(prop, self.out[prop])) def test_arearrays(self): @@ -83,18 +84,18 @@ def test_components(self): props = "title describedBy downloadURL mediaType filepath".split() for prop in props: self.assertIn(prop, comps[0], "Property not found: " + prop) - self.assertIsInstance(comps[0][prop], types.StringTypes, + self.assertIsInstance(comps[0][prop], (str,), "Property '{0}' not a string: {1}".format(prop, comps[0][prop])) exts = comps[0]['_extensionSchemas'] - self.assertEquals(len(exts), 1) + self.assertEqual(len(exts), 1) self.assertIn(nerdmpub+"/definitions/DataFile", exts) typs = comps[0]['@type'] - self.assertEquals(len(typs), 3) - self.assertEquals(typs[0], "nrdp:DataFile") - self.assertEquals(typs[1], "nrdp:DownloadableFile") - self.assertEquals(typs[2], "dcat:Distribution") + self.assertEqual(len(typs), 3) + self.assertEqual(typs[0], "nrdp:DataFile") + self.assertEqual(typs[1], "nrdp:DownloadableFile") + self.assertEqual(typs[2], "dcat:Distribution") props = "describedBy downloadURL".split() for prop in props: @@ -103,17 +104,17 @@ def test_components(self): def test_references(self): refs =self.out['references'] - self.assertEquals(len(refs), 1) + self.assertEqual(len(refs), 1) self.assertIsInstance(refs[0]['@type'], list) - self.assertIsInstance(refs[0]['@type'][0], types.StringTypes) - self.assertEquals(refs[0]['@type'], ["deo:BibliographicReference"]) - self.assertEquals(refs[0]['refType'], "IsSupplementTo") - self.assertEquals(refs[0]['location'], + self.assertIsInstance(refs[0]['@type'][0], (str,)) + self.assertEqual(refs[0]['@type'], ["deo:BibliographicReference"]) + self.assertEqual(refs[0]['refType'], "IsSupplementTo") + self.assertEqual(refs[0]['location'], "http://www.nist.gov/data/PDFfiles/jpcrdS1V14.pdf") exts = refs[0]['_extensionSchemas'] - self.assertEquals(len(exts), 1) + self.assertEqual(len(exts), 1) self.assertIn(nerdmbib+"/definitions/DCiteReference", exts) def test_hierarchy(self): @@ -140,34 +141,34 @@ def setUp(self): self.out = send_file_thru_jq('nerdm::podds2resource', corrfile, {"id": "ark:ID"}) - def test_id(self): self.assertEquals(self.out['@id'], "ark:ID") - def test_al(self): self.assertEquals(self.out['accessLevel'], "public") + def test_id(self): self.assertEqual(self.out['@id'], "ark:ID") + def test_al(self): self.assertEqual(self.out['accessLevel'], "public") def test_context(self): - self.assertEquals(self.out['@context'], + self.assertEqual(self.out['@context'], [ "https://data.nist.gov/od/dm/nerdm-pub-context.jsonld", {"@base": "ark:ID"} ]) def test_schema(self): - self.assertEquals(self.out['_schema'], - "https://data.nist.gov/od/dm/nerdm-schema/v0.6#") + self.assertEqual(self.out['_schema'], + "https://data.nist.gov/od/dm/nerdm-schema/v0.7#") def test_extsch(self): exts = self.out['_extensionSchemas'] - self.assertEquals(len(exts), 1) + self.assertEqual(len(exts), 1) self.assertIn(nerdmpub+"/definitions/PublicDataResource", exts) def test_restypes(self): types = self.out['@type'] self.assertIsInstance(types, list) - self.assertEquals(len(types), 2) - self.assertEquals(types[0], "nrdp:PublicDataResource") - self.assertEquals(types[1], "dcat:Dataset") + self.assertEqual(len(types), 2) + self.assertEqual(types[0], "nrdp:PublicDataResource") + self.assertEqual(types[1], "dcat:Dataset") def test_arestr(self): props = "title modified ediid landingPage license".split() for prop in props: self.assertIn(prop, self.out, "Property not found: " + prop) - self.assertIsInstance(self.out[prop], types.StringTypes, + self.assertIsInstance(self.out[prop], (str,), "Property '{0}' not a string: {1}".format(prop, self.out[prop])) def test_arearrays(self): @@ -182,7 +183,7 @@ def test_description(self): self.assertEqual(len(self.out['description']), 3) def test_ediid(self): - self.assertEquals(self.out['ediid'], + self.assertEqual(self.out['ediid'], "54AE54FB37AC022DE0531A570681D4291851") def test_components(self): @@ -195,18 +196,18 @@ def test_components(self): props = "downloadURL mediaType filepath".split() for prop in props: self.assertIn(prop, comps[3], "Property not found: " + prop) - self.assertIsInstance(comps[3][prop], types.StringTypes, + self.assertIsInstance(comps[3][prop], (str,), "Property '{0}' not a string: {1}".format(prop, comps[3][prop])) exts = comps[3]['_extensionSchemas'] - self.assertEquals(len(exts), 1) + self.assertEqual(len(exts), 1) self.assertIn(nerdmpub+"/definitions/DataFile", exts) typs = comps[3]['@type'] - self.assertEquals(len(typs), 3) - self.assertEquals(typs[0], "nrdp:DataFile") - self.assertEquals(typs[1], "nrdp:DownloadableFile") - self.assertEquals(typs[2], "dcat:Distribution") + self.assertEqual(len(typs), 3) + self.assertEqual(typs[0], "nrdp:DataFile") + self.assertEqual(typs[1], "nrdp:DownloadableFile") + self.assertEqual(typs[2], "dcat:Distribution") props = "downloadURL".split() for prop in props: @@ -245,18 +246,18 @@ def setUp(self): self.out = send_file_thru_jq('nerdm::podds2resource', minfile, {"id": "ark:ID"}) - def test_id(self): self.assertEquals(self.out['@id'], "ark:ID") - def test_al(self): self.assertEquals(self.out['accessLevel'], "public") + def test_id(self): self.assertEqual(self.out['@id'], "ark:ID") + def test_al(self): self.assertEqual(self.out['accessLevel'], "public") def test_context(self): - self.assertEquals(self.out['@context'], - [ "https://data.nist.gov/od/dm/nerdm-pub-context.jsonld", - {"@base": "ark:ID"} ]) + self.assertEqual(self.out['@context'], + [ "https://data.nist.gov/od/dm/nerdm-pub-context.jsonld", + {"@base": "ark:ID"} ]) def test_arestr(self): props = "title modified ediid landingPage".split() for prop in props: self.assertIn(prop, self.out, "Property not found: " + prop) - self.assertIsInstance(self.out[prop], types.StringTypes, + self.assertIsInstance(self.out[prop], (str,), "Property '{0}' not a string: {1}".format(prop, self.out[prop])) def test_default_landingPage(self): @@ -318,7 +319,7 @@ def impnerdm(filter): cmd.append(impnerdm(jqfilter)) proc = subproc.Popen(cmd, stdout=subproc.PIPE, stderr=subproc.PIPE, - stdin=subproc.PIPE) + stdin=subproc.PIPE, universal_newlines=True) (out, err) = proc.communicate(datastr) if proc.returncode != 0: @@ -339,7 +340,7 @@ def send_file_thru_jq(jqfilter, filepath, args=None): with open(filepath): pass - if not isinstance(jqfilter, types.StringTypes): + if not isinstance(jqfilter, (str,)): raise ValueError("jqfilter parameter not a string: " + str(jqfilter)) cmd = "jq -L {0}".format(jqlib).split() + argopts @@ -349,7 +350,8 @@ def impnerdm(filter): cmd.extend([impnerdm(jqfilter), filepath]) - proc = subproc.Popen(cmd, stdout=subproc.PIPE, stderr=subproc.PIPE) + proc = subproc.Popen(cmd, stdout=subproc.PIPE, stderr=subproc.PIPE, + universal_newlines=True) (out, err) = proc.communicate() if proc.returncode != 0: @@ -372,7 +374,7 @@ def formatcmd(cmd): class TestSelf(unittest.TestCase): def test_format_argopts(self): - opts = format_argopts({"id": "ark:ID", "goober": [ 1, 2 ]}) + opts = format_argopts(OrderedDict([("id", "ark:ID"), ("goober", [ 1, 2 ])])) self.assertEqual(opts, ['--argjson', 'id', '"ark:ID"', '--argjson', 'goober', '[1, 2]']) @@ -391,17 +393,17 @@ def test_send_file_badfilter(self): def test_formatcmd(self): cmd = ['jq', '-L', 'jqlib', 'import "pod2nerdm" as nerdm; .accessLevel', 'janaf_pod.json'] - self.assertEquals(formatcmd(cmd), + self.assertEqual(formatcmd(cmd), "jq -L jqlib 'import \"pod2nerdm\" as nerdm; .accessLevel' janaf_pod.json") def test_send_file(self): out = send_file_thru_jq(".accessLevel", janaffile) - self.assertEquals(out, 'public') + self.assertEqual(out, 'public') def test_send_file_w_args(self): out = send_file_thru_jq(".accessLevel", janaffile, {"id": "ID", "goob": "gurn"}) - self.assertEquals(out, 'public') + self.assertEqual(out, 'public') if __name__ == '__main__': diff --git a/jq/tests/test_resource2midaspodds.py b/jq/tests/test_resource2midaspodds.py index 5f620e0..d6af1ec 100644 --- a/jq/tests/test_resource2midaspodds.py +++ b/jq/tests/test_resource2midaspodds.py @@ -106,7 +106,7 @@ def send_file_thru_jq(jqfilter, filepath, args=None): with open(filepath): pass - if not isinstance(jqfilter, types.StringTypes): + if not isinstance(jqfilter, (str,)): raise ValueError("jqfilter parameter not a string: " + str(jqfilter)) cmd = "jq -L {0}".format(jqlib).split() + argopts diff --git a/model/examples/hitsc-0.1.json b/model/examples/hitsc-0.1.json index 7b9a903..76a9251 100644 --- a/model/examples/hitsc-0.1.json +++ b/model/examples/hitsc-0.1.json @@ -3,7 +3,7 @@ "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.1#/definitions/DataPublication" ], - "@type": [ "nrdp:Database", "nrdp:SRD", "nrdp:PublishedDataResource" ], + "@type": [ "nrdp:Database", "nrdp:SRD", "nrdp:PublicDataResource" ], "@id": "ark:/88434/sdp0fjspek353", "title": "NIST High Temperature Superconducting Materials Database - SRD 62", "abbrev": [ "SRD#62" ], diff --git a/model/examples/hitsc-0.2.json b/model/examples/hitsc-0.2.json index 46832e0..7d19833 100644 --- a/model/examples/hitsc-0.2.json +++ b/model/examples/hitsc-0.2.json @@ -3,7 +3,7 @@ "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.2#", "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.2#/definitions/DataPublication" ], - "@type": [ "nrdp:Database", "nrdp:SRD", "nrdp:PublishedDataResource" ], + "@type": [ "nrdp:Database", "nrdp:SRD", "nrdp:PublicDataResource" ], "@id": "ark:/88434/sdp0fjspek353", "title": "NIST High Temperature Superconducting Materials Database - SRD 62", "version": "1.0", diff --git a/model/examples/hitsc.json b/model/examples/hitsc.json index dd16842..892c05a 100644 --- a/model/examples/hitsc.json +++ b/model/examples/hitsc.json @@ -1,9 +1,9 @@ { "@context": "https://data.nist.gov/od/dm/nerdm-pub-context.jsonld", - "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#", - "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataPublication" ], + "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#", + "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataPublication" ], - "@type": [ "nrdp:Database", "nrdp:SRD", "nrdp:PublishedDataResource" ], + "@type": [ "nrdp:Database", "nrdp:SRD", "nrdp:PublicDataResource" ], "@id": "ark:/88434/sdp0fjspek353", "title": "NIST High Temperature Superconducting Materials Database - SRD 62", "version": "1.0", @@ -26,6 +26,7 @@ { "@id": "ark:/88434/sdp0fjspek352", "title": "NIST Ceramics WebBook - SRD 151", + "proxyFor": "ark:/88434/sdp0fjspek352", "@type": [ "nrdp:Portal", "nrdp:SRD" ] } ], @@ -61,7 +62,7 @@ "label": "User Manual", "location": "https://srdata.nist.gov/CeramicDataPortal/Manual/HtsHelper", "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#/definitions/DCiteReference" + "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#/definitions/DCiteReference" ] } ], diff --git a/model/examples/janaf.json b/model/examples/janaf.json index bb4fda8..ab1aaca 100644 --- a/model/examples/janaf.json +++ b/model/examples/janaf.json @@ -1,7 +1,7 @@ { "@context": "https://data.nist.gov/od/dm/nerdm-pub-context.jsonld", - "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#", - "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataPublication" ], + "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#", + "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataPublication" ], "@type": [ "nrd:SRD", "nrdp:DataPublication", "nrdp:PublicDataResource" ], "@id": "ark:/88434/sdp0fjspek351", @@ -117,7 +117,7 @@ "label": "JPCRD Monograph: NIST-JANAF Thermochemical Tables, Pt. 1 (AL-C", "location": "http://kinetics.nist.gov/janaf/pdf/JANAF-FourthEd-1998-1Vol1-Intro.pdf", "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#/definitions/DCiteReference" + "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#/definitions/DCiteReference" ] }, { @@ -127,7 +127,7 @@ "label": "JPCRD Monograph: NIST-JANAF Thermochemical Tables, Pt. 2 (Cr-Zr)", "location": "http://kinetics.nist.gov/janaf/pdf/JANAF-FourthEd-1998-1Vol2-Intro.pdf", "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#/definitions/DCiteReference" + "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#/definitions/DCiteReference" ] } ], @@ -477,7 +477,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-101.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-101.json", @@ -488,7 +488,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-102.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-102.json", @@ -499,7 +499,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-103.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-103.json", @@ -510,7 +510,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-104.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-104.json", @@ -521,7 +521,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-105.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-105.json", @@ -532,7 +532,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-106.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-106.json", @@ -543,7 +543,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-107.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-107.json", @@ -554,7 +554,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-108.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-108.json", @@ -565,7 +565,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-109.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-109.json", @@ -576,7 +576,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-110.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-110.json", @@ -587,7 +587,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-111.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-111.json", @@ -598,7 +598,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-112.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-112.json", @@ -609,7 +609,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-113.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-113.json", @@ -620,7 +620,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-114.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-114.json", @@ -631,7 +631,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-115.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-115.json", @@ -642,7 +642,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-116.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-116.json", @@ -653,7 +653,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-117.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-117.json", @@ -664,7 +664,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-118.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-118.json", @@ -675,7 +675,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-119.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-119.json", @@ -686,7 +686,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-120.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-120.json", @@ -697,7 +697,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-121.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-121.json", @@ -708,7 +708,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-122.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-122.json", @@ -719,7 +719,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-123.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-123.json", @@ -730,7 +730,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-124.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-124.json", @@ -741,11 +741,11 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-125.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-125.json", @@ -756,11 +756,11 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-126.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-126.json", @@ -771,7 +771,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-127.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-127.json", @@ -782,7 +782,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-128.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-128.json", @@ -793,7 +793,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-129.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-129.json", @@ -804,7 +804,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-130.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-130.json", @@ -815,7 +815,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-131.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-131.json", @@ -826,7 +826,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-132.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-132.json", @@ -837,7 +837,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-133.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-133.json", @@ -848,7 +848,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-134.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-134.json", @@ -859,7 +859,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-135.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-135.json", @@ -870,7 +870,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-136.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-136.json", @@ -881,7 +881,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-137.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-137.json", @@ -892,7 +892,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-138.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-138.json", @@ -903,7 +903,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-139.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-139.json", @@ -914,7 +914,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_janaf-zipfile.zip", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_janaf-zipfile.zip", @@ -925,7 +925,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_janaf.species.json", @@ -937,7 +937,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-001.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-001.json", @@ -948,7 +948,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-002.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-002.json", @@ -959,7 +959,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-003.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-003.json", @@ -970,7 +970,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-004.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-004.json", @@ -981,7 +981,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-005.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-005.json", @@ -992,7 +992,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-006.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-006.json", @@ -1003,7 +1003,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-007.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-007.json", @@ -1014,7 +1014,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-008.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-008.json", @@ -1025,7 +1025,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-009.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-009.json", @@ -1036,7 +1036,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-010.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-010.json", @@ -1047,7 +1047,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-011.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-011.json", @@ -1058,7 +1058,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-012.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-012.json", @@ -1069,7 +1069,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-013.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-013.json", @@ -1080,7 +1080,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-014.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-014.json", @@ -1091,7 +1091,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-015.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-015.json", @@ -1102,7 +1102,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-016.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-016.json", @@ -1113,7 +1113,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-017.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-017.json", @@ -1124,7 +1124,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-018.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-018.json", @@ -1135,7 +1135,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-019.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-019.json", @@ -1146,7 +1146,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-020.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-020.json", @@ -1157,7 +1157,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-021.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-021.json", @@ -1168,7 +1168,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-022.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-022.json", @@ -1179,7 +1179,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-023.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-023.json", @@ -1190,7 +1190,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-024.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-024.json", @@ -1201,7 +1201,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-025.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-025.json", @@ -1212,7 +1212,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-026.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-026.json", @@ -1223,7 +1223,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-027.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-027.json", @@ -1234,7 +1234,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-028.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-028.json", @@ -1245,7 +1245,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-029.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-029.json", @@ -1256,7 +1256,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-030.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-030.json", @@ -1267,7 +1267,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-031.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-031.json", @@ -1278,7 +1278,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-032.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-032.json", @@ -1289,7 +1289,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-033.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-033.json", @@ -1300,7 +1300,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-034.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-034.json", @@ -1311,7 +1311,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-035.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-035.json", @@ -1322,7 +1322,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-036.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-036.json", @@ -1333,7 +1333,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-037.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-037.json", @@ -1344,7 +1344,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-038.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-038.json", @@ -1355,7 +1355,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-039.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-039.json", @@ -1366,7 +1366,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-040.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-040.json", @@ -1377,7 +1377,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-041.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-041.json", @@ -1388,7 +1388,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-042.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-042.json", @@ -1399,7 +1399,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-043.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-043.json", @@ -1410,7 +1410,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-044.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-044.json", @@ -1421,7 +1421,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-045.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-045.json", @@ -1432,7 +1432,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-046.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-046.json", @@ -1443,7 +1443,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-047.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-047.json", @@ -1454,7 +1454,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-048.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-048.json", @@ -1465,7 +1465,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-049.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-049.json", @@ -1476,7 +1476,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-050.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-050.json", @@ -1487,7 +1487,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-051.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-051.json", @@ -1498,7 +1498,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-052.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-052.json", @@ -1509,7 +1509,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-053.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-053.json", @@ -1520,7 +1520,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-054.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-054.json", @@ -1531,7 +1531,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-055.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-055.json", @@ -1542,7 +1542,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-056.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-056.json", @@ -1553,7 +1553,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-057.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-057.json", @@ -1564,7 +1564,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-058.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-058.json", @@ -1575,7 +1575,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-059.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-059.json", @@ -1586,7 +1586,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-060.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-060.json", @@ -1597,7 +1597,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-061.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-061.json", @@ -1608,7 +1608,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-062.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-062.json", @@ -1619,7 +1619,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-063.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-063.json", @@ -1630,7 +1630,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-064.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-064.json", @@ -1641,7 +1641,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-065.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-065.json", @@ -1652,7 +1652,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-066.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-066.json", @@ -1663,7 +1663,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-067.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-067.json", @@ -1674,7 +1674,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-068.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-068.json", @@ -1685,7 +1685,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-069.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-069.json", @@ -1696,7 +1696,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-070.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-070.json", @@ -1707,7 +1707,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-071.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-071.json", @@ -1718,7 +1718,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-072.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-072.json", @@ -1729,7 +1729,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-073.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-073.json", @@ -1740,7 +1740,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-074.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-074.json", @@ -1751,7 +1751,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-075.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-075.json", @@ -1762,7 +1762,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-076.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-076.json", @@ -1773,7 +1773,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-077.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-077.json", @@ -1784,7 +1784,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-078.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-078.json", @@ -1795,7 +1795,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-079.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-079.json", @@ -1806,7 +1806,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-080.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-080.json", @@ -1817,7 +1817,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-081.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-081.json", @@ -1828,7 +1828,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-082.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-082.json", @@ -1839,7 +1839,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-083.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-083.json", @@ -1850,7 +1850,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-084.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-084.json", @@ -1861,7 +1861,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-085.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-085.json", @@ -1872,7 +1872,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Rn-002.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Rn-002.json", @@ -1883,7 +1883,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-086.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-086.json", @@ -1894,7 +1894,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-087.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-087.json", @@ -1905,7 +1905,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-088.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-088.json", @@ -1916,7 +1916,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-089.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-089.json", @@ -1927,7 +1927,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-090.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-090.json", @@ -1938,7 +1938,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-091.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-091.json", @@ -1949,7 +1949,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-092.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-092.json", @@ -1960,7 +1960,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-093.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-093.json", @@ -1971,7 +1971,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-094.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-094.json", @@ -1982,7 +1982,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-095.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-095.json", @@ -1993,7 +1993,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-096.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-096.json", @@ -2004,7 +2004,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-097.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-097.json", @@ -2015,7 +2015,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-098.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-098.json", @@ -2026,7 +2026,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-099.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-099.json", @@ -2037,7 +2037,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-100.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-100.json", @@ -2048,7 +2048,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-101.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-101.json", @@ -2059,7 +2059,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-102.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-102.json", @@ -2070,7 +2070,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-103.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-103.json", @@ -2081,7 +2081,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-104.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-104.json", @@ -2092,7 +2092,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-105.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-105.json", @@ -2103,7 +2103,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-106.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-106.json", @@ -2114,7 +2114,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-107.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-107.json", @@ -2125,7 +2125,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-108.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-108.json", @@ -2136,7 +2136,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-109.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-109.json", @@ -2147,7 +2147,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-110.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-110.json", @@ -2158,7 +2158,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-111.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-111.json", @@ -2169,7 +2169,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Al-112.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Al-112.json", @@ -2180,7 +2180,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Ar-001.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Ar-001.json", @@ -2191,7 +2191,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Ar-002.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Ar-002.json", @@ -2202,7 +2202,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-001.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-001.json", @@ -2213,7 +2213,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-002.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-002.json", @@ -2224,7 +2224,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-003.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-003.json", @@ -2235,7 +2235,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-004.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-004.json", @@ -2246,7 +2246,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-005.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-005.json", @@ -2257,7 +2257,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-006.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-006.json", @@ -2268,7 +2268,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-007.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-007.json", @@ -2279,7 +2279,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-008.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-008.json", @@ -2290,7 +2290,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-009.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-009.json", @@ -2301,7 +2301,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-010.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-010.json", @@ -2312,7 +2312,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-011.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-011.json", @@ -2323,7 +2323,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-012.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-012.json", @@ -2334,7 +2334,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-013.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-013.json", @@ -2345,7 +2345,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-014.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-014.json", @@ -2356,7 +2356,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-015.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-015.json", @@ -2367,7 +2367,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-016.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-016.json", @@ -2378,7 +2378,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-017.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-017.json", @@ -2389,7 +2389,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-018.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-018.json", @@ -2400,7 +2400,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-019.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-019.json", @@ -2411,7 +2411,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-020.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-020.json", @@ -2422,7 +2422,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-021.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-021.json", @@ -2433,7 +2433,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-022.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-022.json", @@ -2444,7 +2444,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-023.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-023.json", @@ -2455,7 +2455,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-024.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-024.json", @@ -2466,7 +2466,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-025.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-025.json", @@ -2477,7 +2477,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-026.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-026.json", @@ -2488,7 +2488,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-027.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-027.json", @@ -2499,7 +2499,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-028.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-028.json", @@ -2510,7 +2510,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-029.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-029.json", @@ -2521,7 +2521,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-030.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-030.json", @@ -2532,7 +2532,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-031.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-031.json", @@ -2543,7 +2543,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-032.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-032.json", @@ -2554,7 +2554,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-033.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-033.json", @@ -2565,7 +2565,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-034.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-034.json", @@ -2576,7 +2576,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-035.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-035.json", @@ -2587,7 +2587,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-036.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-036.json", @@ -2598,7 +2598,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-037.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-037.json", @@ -2609,7 +2609,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-038.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-038.json", @@ -2620,7 +2620,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-039.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-039.json", @@ -2631,7 +2631,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-040.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-040.json", @@ -2642,7 +2642,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-041.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-041.json", @@ -2653,7 +2653,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-042.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-042.json", @@ -2664,7 +2664,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-043.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-043.json", @@ -2675,7 +2675,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-044.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-044.json", @@ -2686,7 +2686,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-045.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-045.json", @@ -2697,7 +2697,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-046.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-046.json", @@ -2708,7 +2708,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-047.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-047.json", @@ -2719,7 +2719,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-048.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-048.json", @@ -2730,7 +2730,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-049.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-049.json", @@ -2741,7 +2741,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-050.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-050.json", @@ -2752,7 +2752,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-051.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-051.json", @@ -2763,7 +2763,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-052.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-052.json", @@ -2774,7 +2774,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-053.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-053.json", @@ -2785,7 +2785,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-054.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-054.json", @@ -2796,7 +2796,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-055.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-055.json", @@ -2807,7 +2807,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-056.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-056.json", @@ -2818,7 +2818,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-057.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-057.json", @@ -2829,7 +2829,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-058.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-058.json", @@ -2840,7 +2840,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-059.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-059.json", @@ -2851,7 +2851,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-060.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-060.json", @@ -2862,7 +2862,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-061.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-061.json", @@ -2873,7 +2873,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-062.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-062.json", @@ -2884,7 +2884,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-063.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-063.json", @@ -2895,7 +2895,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-064.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-064.json", @@ -2906,7 +2906,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-065.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-065.json", @@ -2917,7 +2917,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-066.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-066.json", @@ -2928,7 +2928,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-067.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-067.json", @@ -2939,7 +2939,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-068.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-068.json", @@ -2950,7 +2950,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-069.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-069.json", @@ -2961,7 +2961,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-070.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-070.json", @@ -2972,7 +2972,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-071.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-071.json", @@ -2983,7 +2983,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-072.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-072.json", @@ -2994,7 +2994,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-073.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-073.json", @@ -3005,7 +3005,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-074.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-074.json", @@ -3016,7 +3016,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-075.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-075.json", @@ -3027,7 +3027,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-076.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-076.json", @@ -3038,7 +3038,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-077.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-077.json", @@ -3049,7 +3049,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-078.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-078.json", @@ -3060,7 +3060,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-079.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-079.json", @@ -3071,7 +3071,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-080.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-080.json", @@ -3082,7 +3082,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-081.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-081.json", @@ -3093,7 +3093,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-082.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-082.json", @@ -3104,7 +3104,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-083.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-083.json", @@ -3115,7 +3115,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-084.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-084.json", @@ -3126,7 +3126,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-085.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-085.json", @@ -3137,7 +3137,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-086.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-086.json", @@ -3148,7 +3148,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-087.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-087.json", @@ -3159,7 +3159,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-088.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-088.json", @@ -3170,7 +3170,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-089.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-089.json", @@ -3181,7 +3181,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-090.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-090.json", @@ -3192,7 +3192,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-091.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-091.json", @@ -3203,7 +3203,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-092.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-092.json", @@ -3214,7 +3214,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-093.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-093.json", @@ -3225,7 +3225,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-094.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-094.json", @@ -3236,7 +3236,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-095.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-095.json", @@ -3247,7 +3247,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-096.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-096.json", @@ -3258,7 +3258,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-097.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-097.json", @@ -3269,7 +3269,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-098.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-098.json", @@ -3280,7 +3280,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-099.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-099.json", @@ -3291,7 +3291,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-100.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-100.json", @@ -3302,7 +3302,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_B-140.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_B-140.json", @@ -3313,7 +3313,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Ba-001.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Ba-001.json", @@ -3324,7 +3324,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Ba-002.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Ba-002.json", @@ -3335,7 +3335,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Ba-003.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Ba-003.json", @@ -3346,7 +3346,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-004.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-004.json", @@ -3357,7 +3357,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-005.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-005.json", @@ -3368,7 +3368,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-006.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-006.json", @@ -3379,7 +3379,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-007.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-007.json", @@ -3390,7 +3390,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-008.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-008.json", @@ -3401,7 +3401,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-009.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-009.json", @@ -3412,7 +3412,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-010.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-010.json", @@ -3423,7 +3423,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-011.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-011.json", @@ -3434,7 +3434,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-012.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-012.json", @@ -3445,7 +3445,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-013.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-013.json", @@ -3456,7 +3456,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-014.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-014.json", @@ -3467,7 +3467,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-015.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-015.json", @@ -3478,7 +3478,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-016.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-016.json", @@ -3489,7 +3489,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-017.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-017.json", @@ -3500,7 +3500,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-018.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-018.json", @@ -3511,7 +3511,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-019.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-019.json", @@ -3522,7 +3522,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-020.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-020.json", @@ -3533,7 +3533,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-021.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-021.json", @@ -3544,7 +3544,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-022.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-022.json", @@ -3555,7 +3555,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-023.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-023.json", @@ -3566,7 +3566,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-024.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-024.json", @@ -3577,7 +3577,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-025.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-025.json", @@ -3588,7 +3588,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-026.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-026.json", @@ -3599,7 +3599,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-027.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-027.json", @@ -3610,7 +3610,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-028.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-028.json", @@ -3621,7 +3621,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-029.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-029.json", @@ -3632,7 +3632,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-030.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-030.json", @@ -3643,7 +3643,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-031.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-031.json", @@ -3654,7 +3654,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-032.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-032.json", @@ -3665,7 +3665,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-033.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-033.json", @@ -3676,7 +3676,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-034.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-034.json", @@ -3687,7 +3687,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-035.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-035.json", @@ -3698,7 +3698,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-036.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-036.json", @@ -3709,7 +3709,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-037.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-037.json", @@ -3720,7 +3720,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-038.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-038.json", @@ -3731,7 +3731,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-039.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-039.json", @@ -3742,7 +3742,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-040.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-040.json", @@ -3753,7 +3753,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-041.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-041.json", @@ -3764,7 +3764,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-042.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-042.json", @@ -3775,7 +3775,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-043.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-043.json", @@ -3786,7 +3786,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-044.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-044.json", @@ -3797,7 +3797,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-045.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-045.json", @@ -3808,7 +3808,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-046.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-046.json", @@ -3819,7 +3819,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-047.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-047.json", @@ -3830,7 +3830,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-048.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-048.json", @@ -3841,7 +3841,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-049.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-049.json", @@ -3852,7 +3852,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-050.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-050.json", @@ -3863,7 +3863,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-051.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-051.json", @@ -3874,7 +3874,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-052.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-052.json", @@ -3885,7 +3885,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-053.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-053.json", @@ -3896,7 +3896,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-054.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-054.json", @@ -3907,7 +3907,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-055.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-055.json", @@ -3918,7 +3918,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-056.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-056.json", @@ -3929,7 +3929,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-057.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-057.json", @@ -3940,7 +3940,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-058.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-058.json", @@ -3951,7 +3951,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-059.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-059.json", @@ -3962,7 +3962,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-060.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-060.json", @@ -3973,7 +3973,7 @@ { "@type": [ "nrdp:DataFile", "dcat:Distribution" ], "_extensionSchemas": [ - "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/DataFile" + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" ], "filepath": "srd13_Br-061.json", "downloadURL": "http://www.nist.gov/srd/srd_data/srd13_Br-061.json", diff --git a/model/examples/mds2-2106.json b/model/examples/mds2-2106.json new file mode 100644 index 0000000..2d46944 --- /dev/null +++ b/model/examples/mds2-2106.json @@ -0,0 +1,265 @@ +{ + "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#", + "@context": [ + "https://data.nist.gov/od/dm/nerdm-pub-context.jsonld", + { + "@base": "ark:/88434/mds2-2106" + } + ], + "@type": [ + "nrdp:PublicDataResource", + "dcat:Dataset" + ], + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/PublicDataResource" + ], + "@id": "ark:/88434/mds2-2106", + "title": "Data from: Collaborative Guarded-Hot-Plate Tests between the National Institute of Standards and Technology and the National Physical Laboratory", + "contactPoint": { + "fn": "John L. Pagliaro", + "hasEmail": "mailto:john.pagliaro@nist.gov" + }, + "modified": "2019-08-12", + "ediid": "ark:/88434/mds2-2106", + "landingPage": "https://data.nist.gov/od/id/mds2-2106", + "description": [ + "A bilateral study to compare guarded-hot-plate measurements at extended temperatures between laboratories at the National Institute of Standards and Technology (NIST) and the National Physical Laboratory (NPL) is presented. Measurements were conducted in accordance with standardized test methods (ISO 8302 or ASTM C177) over a temperature range from 20 \u00b0C to 160 \u00b0C (293 K to 433 K). Following a blind round-robin format, specimens of non-woven fibrous glass mat, approximately 22 mm thick and having a nominal bulk density of 200 kg/m3, were prepared and studied. Results of the study show that the thermal conductivity measurements agree over the temperature range of interest to within \u00b11.0 %, or less. See also related \"Data from: Collaborative Guarded-Hot-Plate Tests between the Laboratoire national de m\u00e9trologie et d'essais and the National Institute of Standards and Technology,\" accessible at https://doi.org/10.18434/T4XK5G" + ], + "keyword": [ + "bilateral", + "comparison", + "fibrous glass mat", + "guarded hot plate", + "industrial insulation", + "interlaboratory", + "thermal conductivity" + ], + "theme": [ + "Buildings and Construction: Building materials", + "Materials: Materials characterization" + ], + "topic": [ + { + "@type": "Concept", + "scheme": "https://data.nist.gov/od/dm/nist-themes/v1.1", + "tag": "Buildings and Construction: Building materials" + }, + { + "@type": "Concept", + "scheme": "https://data.nist.gov/od/dm/nist-themes/v1.1", + "tag": "Materials: Materials characterization" + } + ], + "accessLevel": "public", + "license": "https://www.nist.gov/open/license", + "publisher": { + "name": "National Institute of Standards and Technology", + "@type": "org:Organization" + }, + "language": [ + "en" + ], + "bureauCode": [ + "006:55" + ], + "programCode": [ + "006:052" + ], + "doi": "doi:10.18434/M32106", + "components": [ + { + "accessURL": "https://doi.org/10.18434/M32106", + "@type": [ + "nrd:Hidden", + "dcat:Distribution" + ], + "@id": "#doi:10.18434/M32106" + }, + { + "@id": "cmps/NIST_NPL_InterlabData2019.csv.sha256", + "@type": [ + "nrdp:ChecksumFile", + "nrdp:DownloadableFile", + "dcat:Distribution" + ], + "filepath": "NIST_NPL_InterlabData2019.csv.sha256", + "downloadURL": "https://data.nist.gov/od/ds/mds2-2106/NIST_NPL_InterlabData2019.csv.sha256", + "algorithm": { + "tag": "sha256", + "@type": "Thing" + }, + "describes": "cmps/NIST_NPL_InterlabData2019.csv", + "description": "SHA-256 checksum value for NIST_NPL_InterlabData2019.csv", + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/ChecksumFile" + ], + "mediaType": "text/plain", + "title": "SHA256 File for NIST/NPL Interlaboratory Guarded-Hot-Plate Data", + "size": 64, + "checksum": { + "hash": "7819de5f2716864b6b7cfe756fdac176439712805f713c3751d360176c303554", + "algorithm": { + "tag": "sha256", + "@type": "Thing" + } + }, + "valid": true + }, + { + "@id": "cmps/Readme.txt", + "@type": [ + "nrdp:DataFile", + "nrdp:DownloadableFile", + "dcat:Distribution" + ], + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" + ], + "filepath": "Readme.txt", + "downloadURL": "https://data.nist.gov/od/ds/mds2-2106/Readme.txt", + "mediaType": "text/plain", + "description": "Data dictionary for intercomparison data from guarded-hot-plate laboratories at the National Institute of Standards and Technology and at the National Physical Laboratory", + "title": "Data Dictionary for NIST/NPL Interlaboratory Comparison", + "size": 1666, + "checksum": { + "hash": "bcdf748936c17d3a62ca81d3fb3b31acce0c32b4a34e529dc3326ed047ae392c", + "algorithm": { + "tag": "sha256", + "@type": "Thing" + } + } + }, + { + "@id": "cmps/Readme.txt.sha256", + "@type": [ + "nrdp:ChecksumFile", + "nrdp:DownloadableFile", + "dcat:Distribution" + ], + "filepath": "Readme.txt.sha256", + "downloadURL": "https://data.nist.gov/od/ds/mds2-2106/Readme.txt.sha256", + "algorithm": { + "tag": "sha256", + "@type": "Thing" + }, + "describes": "cmps/Readme.txt", + "description": "SHA-256 checksum value for Readme.txt", + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/ChecksumFile" + ], + "mediaType": "text/plain", + "title": "SHA256 File for Data Dictionary for NIST/NPL Interlaboratory Comparison", + "size": 64, + "checksum": { + "hash": "568c759c5d4fa8e61c0e475fda937d7122fe4474daf4a6b03066849485f56a22", + "algorithm": { + "tag": "sha256", + "@type": "Thing" + } + }, + "valid": true + }, + { + "@id": "cmps/NIST_NPL_InterlabData2019.csv", + "@type": [ + "nrdp:DataFile", + "nrdp:DownloadableFile", + "dcat:Distribution" + ], + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/DataFile" + ], + "filepath": "NIST_NPL_InterlabData2019.csv", + "downloadURL": "https://data.nist.gov/od/ds/mds2-2106/NIST_NPL_InterlabData2019.csv", + "mediaType": "application/vnd.ms-excel", + "format": { + "description": "comma-separated values file" + }, + "description": "A description of the guarded-hot-plate data is in the meta-data file Readme.txt.", + "title": "NIST/NPL Interlaboratory Guarded-Hot-Plate Data", + "size": 2222, + "checksum": { + "hash": "38e4b8f90b81c3e894f666f50471a03788d1a2e61c74dbedef3f3c8fddd50308", + "algorithm": { + "tag": "sha256", + "@type": "Thing" + } + } + } + ], + "references": [ + { + "@type": [ + "npg:Article" + ], + "@id": "#ref:10.6028/NIST.TN.2059", + "refType": "IsCitedBy", + "location": "https://doi.org/10.6028/NIST.TN.2059", + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#/definitions/DCiteReference" + ], + "title": "NIST-NPL bilateral comparison of guarded-hot-plate laboratories from 20 \u00b0C to 160 \u00b0C", + "issued": "2020-01", + "citation": "Zarr, R. R., Wu, J., & Liu, H.-K. (2020). NIST-NPL bilateral comparison of guarded-hot-plate laboratories from 20 \u00c2\u00b0C to 160 \u00c2\u00b0C. doi:10.6028/nist.tn.2059\n" + } + ], + "version": "1.6.0", + "issued": "2019-12-31", + "releaseHistory": { + "@id": "ark:/88434/mds2-2106.rel", + "@type": [ + "nrdr:ReleaseHistory" + ], + "hasRelease": [ + { + "version": "1.0.0", + "issued": "2019-08-12 00:00:00", + "@id": "ark:/88434/mds2-2106", + "location": "https://data.nist.gov/od/id/ark:/88434/mds2-2106", + "description": "initial release" + }, + { + "version": "1.1.0", + "issued": "2019-08-12 00:00:00", + "@id": "ark:/88434/mds2-2106", + "location": "https://data.nist.gov/od/id/ark:/88434/mds2-2106", + "description": "data update" + }, + { + "version": "1.2.0", + "issued": "2019-08-12 00:00:00", + "@id": "ark:/88434/mds2-2106", + "location": "https://data.nist.gov/od/id/ark:/88434/mds2-2106", + "description": "data update" + }, + { + "version": "1.3.0", + "issued": "2019-08-12 00:00:00", + "@id": "ark:/88434/mds2-2106", + "location": "https://data.nist.gov/od/id/ark:/88434/mds2-2106", + "description": "data update" + }, + { + "version": "1.4.0", + "issued": "2019-08-12 00:00:00", + "@id": "ark:/88434/mds2-2106", + "location": "https://data.nist.gov/od/id/ark:/88434/mds2-2106", + "description": "data update" + }, + { + "version": "1.5.0", + "issued": "2019-08-12 00:00:00", + "@id": "ark:/88434/mds2-2106", + "location": "https://data.nist.gov/od/id/ark:/88434/mds2-2106", + "description": "data update" + }, + { + "version": "1.6.0", + "issued": "2019-08-12", + "@id": "ark:/88434/mds2-2106", + "location": "https://data.nist.gov/od/id/ark:/88434/mds2-2106", + "description": "data update" + } + ] + } +} diff --git a/model/examples/ncnrexp0-annotated.txt b/model/examples/ncnrexp0-annotated.txt new file mode 100644 index 0000000..1a770f7 --- /dev/null +++ b/model/examples/ncnrexp0-annotated.txt @@ -0,0 +1,113 @@ +# This is a annotated version of the example record, ncnrexp0.json. It represents an example of record +# that could be submitted to the Programmatic Data Publishing service (pdp1). It describes an experiement +# from the NCNR facility. +{ + # These two properties enable validation (using nerdmvalidate). + # _schema identifies the core NERDm schema + # _extensionSchemas identifies the extension specific to submissions to the publishing service + # as well as the extension supporting metadata describing experiments + "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#", + "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/sip/v0.1#/definitions/PDRSubmission", + "https://data.nist.gov/od/dm/nerdm-schema/exp/v0.1#/definitions/ExperimentalContext" ], + + # @type identifies this as a submission to publishing service (required) + "@type": [ "nrdp:PDRSubmission" ], + + # @id is the requested ID to assign; if this ID cannot be assigned (because it is already being used + # or it has an incorrect form, then the submission will be rejected. If not provided, one will be + # automatically defined. (Optional) + "@id": "cnr0:22872", + + "title": "Neutron Reflectometry of Electrochemically Grown Hydrous Iridium Oxide Films", # required + "contactPoint": { + "hasEmail": "mailto:joseph.dura@nist.gov", # required + "fn": "Joe Dura", # required + "proxyFor": "https://orchid.org/0000-0001-6877-959X" # optional + }, + "authors": [ # author list is optional + { + "fn": "Eric Rus", # required; recommended rendering of full name + "familyName": "Rus", # optional + "firstName": "Eric", # optional + "orcid": "0000-0001-5031-6888" # optional + }, + { + "fn": "Joseph A. Dura", + "familyName": "Dura", + "firstName": "Joseph", + "middleName": "A.", + "orcid": "0000-0001-6877-959X" + } + ], + + # each string element should be thought of as a separate paragraph in the description, with the + # first one being most important. (required) + "description": [ + "" + ], + "keywords": [ "electrochemistry", "oxide films" ], # optional + "accessLevel": "public", # required (for the moment) + + "modified": "2018-04-05T17:16:54", # optional but recommended; set to the latest activity endtime + + "isPartOfProjects": [ + { + "localID": "22872" # the NCNR project ID + } + ], + + # instruments used + "instrumentsUsed": [ + { + "localID": "bt7", # use either the number or abbreviation + "title": "Double-focusing triple-axis spectrometer", # optional + "abbrev": ["BT-7"], # used for display purposes; more than one can be given + "location": "https://www.nist.gov/ncnr/bt-7-double-focusing-triple-axis-spectrometer" # optional + } + ], + + # we propose that each activity be listed as a component of this publication (these can be + # submitted separately or as part of this main resource metadata. + "components": [ + { + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/AccessPage", + "https://data.nist.gov/od/dm/nerdm-schema/exp/v0.1#/definitions/ExperimentalContext" + ], + "@id": "components/201706", # required: follow this form, including the activity ID + "@type": [ "nrde:AcquisitionActivity", "nrdp:AccessPage" ], # required + + # required; this is a link to the page where the activity can be downloaded from + "accessURL": "https://pub/ncnrdata/cgd/201706/22872/data", + + # optional but recommended: this text would appear as the link text on the landing page + "title": "Download Access for Activity 201706", + + # optional but recommended; the description would appear as a tool tip text indicating what the + # user will see if they click on the accessURL link + "format": { + "description": "NCNR Repository web page" + }, + + # The start and stop times in Epoch seconds. + # Note: the PDR will covert these to human-friendly strings downstream + "acquisitionStartTime": 1498848003, + "acquisitionEndTime": 1499352540 + }, + { + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/AccessPage", + "https://data.nist.gov/od/dm/nerdm-schema/exp/v0.1#/definitions/ExperimentalContext" + ], + "@id": "components/201803", + "@type": [ "nrde:AcquisitionActivity", "nrdp:AccessPage" ], + "accessURL": "https://pub/ncnrdata/cgd/201803/22872/data", + "title": "Download Access for Activity 201803", + "format": { + "description": "NCNR Repository web page" + }, + "acquisitionStartTime": 1522513412, + "acquisitionEndTime": 1522948614 + } + ] +} diff --git a/model/examples/ncnrexp0.json b/model/examples/ncnrexp0.json new file mode 100644 index 0000000..bc161a8 --- /dev/null +++ b/model/examples/ncnrexp0.json @@ -0,0 +1,79 @@ +{ + "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#", + "_extensionSchemas": [ "https://data.nist.gov/od/dm/nerdm-schema/sip/v0.1#/definitions/PDRSubmission", + "https://data.nist.gov/od/dm/nerdm-schema/exp/v0.1#/definitions/ExperimentalContext" ], + "@type": [ "nrdp:PDRSubmission" ], + "@id": "ncnr0:22872", + "title": "Neutron Reflectometry of Electrochemically Grown Hydrous Iridium Oxide Films", + "contactPoint": { + "hasEmail": "mailto:joseph.dura@nist.gov", + "fn": "Joe Dura", + "proxyFor": "https://orchid.org/0000-0001-6877-959X" + }, + "authors": [ + { + "fn": "Eric Rus", + "familyName": "Rus", + "firstName": "Eric", + "orcid": "0000-0001-5031-6888" + }, + { + "fn": "Joseph A. Dura", + "familyName": "Dura", + "firstName": "Joseph", + "middleName": "A.", + "orcid": "0000-0001-6877-959X" + } + ], + "description": [ + "" + ], + "keywords": [ "electrochemistry", "oxide films" ], + "accessLevel": "public", + "modified": "2018-04-05T17:16:54", + "isPartOfProjects": [ + { + "localID": "22872" + } + ], + "instrumentsUsed": [ + { + "localID": "bt7", + "title": "Double-focusing triple-axis spectrometer", + "abbrev": [ "BT-7" ], + "location": "https://www.nist.gov/ncnr/bt-7-double-focusing-triple-axis-spectrometer" + } + ], + "components": [ + { + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/AccessPage", + "https://data.nist.gov/od/dm/nerdm-schema/exp/v0.1#/definitions/ExperimentalContext" + ], + "@id": "components/201706", + "@type": [ "nrde:AcquisitionActivity", "nrdp:AccessPage" ], + "accessURL": "https://ncnr.nist.gov/pub/ncnrdata/cgd/201706/22872/data", + "title": "Download Access for Activity 201706", + "format": { + "description": "NCNR Repository web page" + }, + "acquisitionStartTime": 1498848003, + "acquisitionEndTime": 1499352540 + }, + { + "_extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/AccessPage", + "https://data.nist.gov/od/dm/nerdm-schema/exp/v0.1#/definitions/ExperimentalContext" + ], + "@id": "components/201803", + "@type": [ "nrdp:AccessPage" ], + "accessURL": "https://ncnr.nist.gov/pub/ncnrdata/cgd/201803/22872/data", + "title": "Download Access for Activity 201803", + "format": { + "description": "NCNR Repository web page" + }, + "acquisitionStartTime": 1522513412, + "acquisitionEndTime": 1522948614 + } + ] +} diff --git a/model/field-help-schema.json b/model/field-help-schema.json index d75ce30..36b05c7 100644 --- a/model/field-help-schema.json +++ b/model/field-help-schema.json @@ -1,7 +1,7 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], - "id": "https://www.nist.gov/od/dm/field-help/v0.1#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/field-help/v0.1#", "rev": "wd1", "tile": "Metadata Field Help Information", "description": "types for encoding helpful information about metadata fields", diff --git a/model/nerdm-agg-schema-0.1.json b/model/nerdm-agg-schema-0.1.json new file mode 100644 index 0000000..4043276 --- /dev/null +++ b/model/nerdm-agg-schema-0.1.json @@ -0,0 +1,136 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": [ + "https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#" + ], + "id": "https://data.nist.gov/od/dm/nerdm-schema/agg/v0.1#", + "rev": "wd1", + "title": "The JSON Schema for specific science domains", + "description": "A JSON Schema which helps add different science initiatives and related projects together", + "definitions": { + "Aggregation": { + "description": "A formal collection of individually identified resources that share a common intent or theme, as defined by the creator of the collection", + "notes": [ + "This is the most general form of collection, and it purpose and membership is determined by the creator", + "It is recommended that the resource description describe what unifies the collection", + "Though not required, member datasets may declare their membership by pointing to an Aggregation via the isPartOf property", + "An Aggregation can list its member directly or indirectly within its components property. A direct reference can be done with an IncludedResource; an indirect pointer can be done with either a SearchPage (which returns a browsable page that dynamically lists its members), a DynamicResourceSet (which returns a dynamically-created, machine-readable listing of the members), or a combination of the two." + ], + "allOf": [ + { + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Resource" + }, + { + "type": "object", + "properties": { + "creators": { + "title": "creators", + "description": "The individuals that created this aggregation and initially defined its memebership", + "type": "array", + "items": { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/Person" }, + "uniqueItems": true + } + } + } + ] + }, + + "ScienceTheme": { + "description": "A collection of resources that all relate to a common science topic or theme", + "notes": [ + "This type implies a more specific intent for a collection compared to an Aggregation, along with more active support, as supported through the facilitators property", + "Though not required, member datasets may declare their membership by pointing to an Aggregation via the isPartOf property" + ], + "allOf": [ + { + "$ref": "#/definitions/Aggregation" + }, + { + "type": "object", + "properties": { + "facilitators": { + "description": "A set of people play specific roles in the maintenance of this collection and the creation and support of its member resources", + "type": "array", + "items": { "$ref": "#/definitions/Facilitator" }, + "uniqueItems": true + } + } + } + ] + }, + + "Facilitator": { + "description": "a person with specified role", + "notes": [ + + ], + "allOf": [ + { + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/Person" + }, + { + "type": "object", + "properties": { + "jobTitle": { + "description": "The name for the job or role that the person fulfills in the maintenance of this collection and its members", + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Job Title", + "referenceClass": "proton:JobTitle" + } + } + } + } + ] + }, + + "DynamicResourceSet": { + "description": "a Component that represents set of machine-readable resource that is dynamically generated from a URL that invokes a search on a remote database", + "notes": [ + ], + "allOf": [ + { + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" + }, + { + "properties": { + "searchURL": { + "description": "the URL for querying service to get list of resources", + "type": "string", + "format": "uri" + }, + + "resultSetMediaType": { + "title": "Media Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the result set returned by searchURL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Media Type", + "referenceProperty": "dcat:mediaType" + } + }, + + "schema": { + "description": "the URI identifying the schema that results are expected to be compliant with", + "type": "string", + "format": "uri" + } + }, + "required": [ + "searchURL" + ] + } + ] + } + } +} diff --git a/model/nerdm-agg-schema.json b/model/nerdm-agg-schema.json index 4043276..aca09d7 100644 --- a/model/nerdm-agg-schema.json +++ b/model/nerdm-agg-schema.json @@ -3,7 +3,7 @@ "$extensionSchemas": [ "https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#" ], - "id": "https://data.nist.gov/od/dm/nerdm-schema/agg/v0.1#", + "id": "https://data.nist.gov/od/dm/nerdm-schema/agg/v0.2#", "rev": "wd1", "title": "The JSON Schema for specific science domains", "description": "A JSON Schema which helps add different science initiatives and related projects together", @@ -18,7 +18,7 @@ ], "allOf": [ { - "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Resource" + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Resource" }, { "type": "object", @@ -27,7 +27,7 @@ "title": "creators", "description": "The individuals that created this aggregation and initially defined its memebership", "type": "array", - "items": { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/Person" }, + "items": { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/Person" }, "uniqueItems": true } } @@ -66,7 +66,7 @@ ], "allOf": [ { - "$ref": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#/definitions/Person" + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/Person" }, { "type": "object", @@ -91,7 +91,7 @@ ], "allOf": [ { - "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Component" }, { "properties": { diff --git a/model/nerdm-bib-schema-0.4.json b/model/nerdm-bib-schema-0.4.json index 58852b2..3ea4c65 100644 --- a/model/nerdm-bib-schema-0.4.json +++ b/model/nerdm-bib-schema-0.4.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.4#", "rev": "wd1", "title": "The NERDm extension metadata for bibliographic information", diff --git a/model/nerdm-bib-schema-0.5.json b/model/nerdm-bib-schema-0.5.json index 5c4ad8d..0e395d9 100644 --- a/model/nerdm-bib-schema-0.5.json +++ b/model/nerdm-bib-schema-0.5.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.5#", "rev": "wd1", "title": "The NERDm extension metadata for bibliographic information", diff --git a/model/nerdm-bib-schema-0.6.json b/model/nerdm-bib-schema-0.6.json new file mode 100644 index 0000000..ced722c --- /dev/null +++ b/model/nerdm-bib-schema-0.6.json @@ -0,0 +1,134 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#", + "rev": "wd1", + "title": "The NERDm extension metadata for bibliographic information", + "description": "This schema extends NERDm's BibliographicReference to provide more specific ways to describe scholory references.", + "definitions": { + + "DCiteRefType": { + "description": "a bibliographical reference type with a defined set of values that are drawn from the DataCite Metadata Schema's relationType", + "notes": [ + "The values are defined to be identical to (or more specific than) the corresponding terms in the DataCite Metadata Schema", + "The subset of defined here are those that are considered most applicable to data resources provided by the NIST Public Data Repository (PDR) and for which the PDR may recognize and provide special handling for." + ], + "type": "string", + "enum": [ "IsDocumentedBy", "IsSupplementTo", "IsSupplementedBy", + "IsCitedBy", "Cites", "IsReviewedBy", + "IsReferencedBy", "References", + "IsSourceOf", "IsDerivedFrom", + "IsNewVersionOf", "IsPreviousVersionOf" ], + "valueDocumentation": { + "IsDocumentedBy": { + "description": "The referenced document provides documentation of this resource.", + "notes": [ + "This type should be applied to the reference that provides the best, most complete, or otherwise most preferred description of how the data in this resource was created.", + "This resource is expected to be or include a human-readable document." + ] + }, + "IsSupplementTo": { + "description": "The referenced document is a supplement to this resource.", + "notes": [ + "A supplement typically refers to data that is closely associated with a journal article that serves as the primary description of how the data was taken and what results were extracted from them.", + "This resource is expected to be or include a human-readable document.", + "The resource is usually considered the primary reference for this data resource" + ] + }, + "IsSupplementedBy": { + "description": "The referenced document (or dataset) is a supplement to this resource.", + "notes": [ + "This can be applied to refer to other datasets that may have resulted from the same experimental/observational activity but which might be considered less central of useful to other, e.g. calibration data." + ] + }, + "IsCitedBy": { + "description": "The referenced document cites the resource in some way.", + "notes": [ + "This relationship indicates is lighter than IsReferenceBy: the referenced document may discuss this resource without drawing on and using data or information from this resource." + ] + }, + "Cites": { + "description": "This resource cites the referenced document.", + "notes": [ + "Human readable descriptions can refer to this type of resource via its label, e.g. '...previous research [Smith98; Jones10]...'", + "Like IsCitedBy, the relationship indicated is lighter than References: this resource makes reference to the referenced resource in discussion without necessarily drawing on and using data or information from that resource." + ] + }, + "IsReviewedBy": { + "description": "The referenced document reviews this resource.", + "notes": [ + "This is a lighter relationship than the resource property, describedBy; the latter refers to a document that is the primary, detailed description and/or analysis of this resource" + ] + }, + "IsReferencedBy": { + "description": "The resource is used as a source of information by the referenced document.", + "notes": [ + ] + }, + "References": { + "description": "The referenced document is used as a source of information by the resource.", + "notes": [ + "This type is provided as a general purpose reference to documents or data that are indirectly relate to this one", + "This type is recommend for references provided for citations in the textual description (e.g. the description property) of this resource." + ] + }, + "IsSourceOf": { + "description": "The resource is the source of upon which the referenced resource is based.", + "notes": [ + "In other words, the referenced document is derived from the resource.", + "This is a stronger relationship than 'References'" + ] + }, + "IsDerivedFrom": { + "description": "The referenced document is the source upon which the resource is based.", + "notes": [ + "In other words, the resource is derived from the referenced document.", + "This is a stronger relationship than 'IsReferencedBy'" + ] + }, + "IsNewVersionOf": { + "description": "The referenced resource is a previous version of this resource.", + "notes": [ + "This usually means that the referenced resource is deprecated by this one." + ] + }, + "IsPreviousVersionOf": { + "description": "The referenced resource is a newer version of this resource.", + "notes": [ + "This usually means that the referenced resource deprecates this one." + ] + }, + "IsVariantOf": { + "description": "The referenced resource contains the content of this resource in a different form.", + "notes": [ + "As an example, the referenced resource may be based on the same raw data but calibrated differently." + ] + } + } + }, + + "DCiteReference": { + "description": "a bibliographical reference with a controlled vocabulary for its reference type (refType)", + "notes": [ + "Note that some refType values are specifically for references of type npg:Document: 'isDocumentedBy', 'isReviewedBy'; 'isSupplementTo' typically labels a reference of type npg:Document", + "Use 'isSupplementTo' or 'isDocumentedBy' to indicate documents that provide the most comprehensive explanation of the contents of the resource. 'isSupplementTo' is preferred when the document presents results drawn from the data. List these documents in order of importance (as the first one will be exported as the 'describedBy' document when converted to the POD schema).", + "It is recommended that such multiple classifications of the same reference should be avoided." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/BibliographicReference" }, + { + "properties": { + "refType": { + "description": "a term indicating the nature of the relationship between this resource and the one being referenced", + "notes": [ + "Note that with this term, the subject of relationship is the resource described by this NERDm record and the object is the referenced resource given by the @id property in this node. Although this violates the JSON-LD semantics that properties in this node should describe what's given with the local @id--the referenced resource, in this case--it is more consistant with their use in the DataCite schema." + ], + "$ref": "#/definitions/DCiteRefType" + } + }, + "required": [ "refType" ] + } + ] + } + } +} diff --git a/model/nerdm-bib-schema.json b/model/nerdm-bib-schema.json index b27a747..d514ad3 100644 --- a/model/nerdm-bib-schema.json +++ b/model/nerdm-bib-schema.json @@ -1,7 +1,7 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], - "id": "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.6#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.7#", "rev": "wd1", "title": "The NERDm extension metadata for bibliographic information", "description": "This schema extends NERDm's BibliographicReference to provide more specific ways to describe scholory references.", @@ -115,7 +115,7 @@ "It is recommended that such multiple classifications of the same reference should be avoided." ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/BibliographicReference" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/BibliographicReference" }, { "properties": { "refType": { diff --git a/model/nerdm-exp-schema.json b/model/nerdm-exp-schema.json new file mode 100644 index 0000000..476b3cd --- /dev/null +++ b/model/nerdm-exp-schema.json @@ -0,0 +1,112 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/exp/v0.1#", + "rev": "wd1", + "title": "The NERDm extension metadata for describing data and other resources resulting from research experiments", + "description": "This extension schema defines extension metadata for describing data, collections, tools and other resources related to an experiment process. It can capture the context and details of that process. It supports a model where an experiment is made up of one or more data-producing activities. The output of an experiment can be published as a Resource.", + "definitions": { + + "ExperimentalContext": { + "description": "properties of an experimental process or result", + "notes": [ + "This type can be used to augment an arbitrary object with metadata about an experiment" + ], + "properties": { + "instrumentsUsed": { + "type": "array", + "items": { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/RelatedResource" }, + "description": "references to identified instruments, instrument components, or other apparatuses used in the experiment", + "notes": [ + "As references, these are not intended to be complete descriptions of instruments, but would rather contain just summary information displayable in lieu of full descriptions" + ], + "asOntology": { + "preflabel": "Project", + "referenceProperty": "modsci:ScientificInstrument" + } + }, + "isPartOfProjects": { + "items": { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/RelatedResource" }, + "description": "references to an identified projects under which this particular experiment was conducted", + "notes": [ + "In most cases, it is expected that there is only one project associated with an experiment; however, this allows for multiple projects to be referenced." + ], + "asOntology": { + "preflabel": "Project", + "referenceProperty": "dc:isPartOf" + } + }, + "acquisitionStartTime": { + "description": "an (epoch) timestamp for the start of data acquisition", + "notes": [ + "As a float, this value is by default taken as an epoch time in decimal seconds (i.e. seconds since midnight, Jan 1, 1970); however, a local time standard may used; in this case, use hasAcquisitionStart to provide an interoperable ISO-8601 format value", + "This property assumes a model of an experiment that is made up of multiple acquisition activities. When this property is used to describe an activity, it represents the start of that particular activity; when used to describe an experiment, it represents a start of the experiment as a whole or, more specifically, the start of the earliest acquisition activity that is part of the experiment." + ], + "type": "number", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Start time", + "range": "xsd:decimal", + "referenceProperty": "w3t:hasBeginning" + } + }, + "hasAcquisitionStart": { + "description": "a nominal start time of data acquisition for this experiement or acquisition activity in ISO-8601 format", + "notes": [ + "This value can be an approximate time; if acquisitionStartTime is provided, it should be a conversion of that value to strict ISO-8601 format." + ], + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/FlexibleDate", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Start time", + "range": "xsd:date", + "referenceProperty": "w3t:hasBeginning" + } + }, + "acquisitionEndTime": { + "description": "an (epoch) timestamp for the end of data acquisition", + "notes": [ + "As a float, this value is by default taken as an epoch time in decimal seconds (i.e. seconds since midnight, Jan 1, 1970); however, a local time standard may used; in this case, use hasAcquisitionEnd to provide an interoperable ISO-8601 format value", + "This property assumes a model of an experiment that is made up of multiple acquisition activities. When this property is used to describe an activity, it represents the end of that particular activity; when used to describe an experiment, it represents the end of the experiment as a whole or, more specifically, the start of the latest acquisition activity that is part of the experiment." + ], + "type": "number", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Start time", + "range": "xsd:decimal", + "referenceProperty": "w3t:hasEnd" + } + }, + "hasAcquisitionEnd": { + "description": "a nominal end time of data acquisition for this experiement or acquisition activity in ISO-8601 format", + "notes": [ + "This value can be an approximate time; if acquisitionEndTime is provided, it should be a conversion of that value to strict ISO-8601 format." + ], + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/FlexibleDate", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "End time", + "range": "xsd:date", + "referenceProperty": "w3t:hasEnd" + } + } + } + }, + + "AcquisitionActivity": { + "description": "a resource component describing a data acquisition session that is part of an experiment", + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Component" }, + { "$ref": "#/definitions/ExperimentalContext" } + ] + }, + + "ExperimentalData": { + "description": "a data resource describing a data acquisition session that is part of an experiment", + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#/definitions/PublicDataResource" }, + { "$ref": "#/definitions/ExperimentalContext" } + ] + } + } +} diff --git a/model/nerdm-pub-schema-0.1.json b/model/nerdm-pub-schema-0.1.json index 1303e5d..816b376 100644 --- a/model/nerdm-pub-schema-0.1.json +++ b/model/nerdm-pub-schema-0.1.json @@ -213,7 +213,7 @@ "mediaType": { "title": "Media Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's downloadURL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -302,7 +302,7 @@ "describedByType": { "title": "Data Dictionary Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -508,7 +508,7 @@ "describedByType": { "title": "API Descriptions Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", diff --git a/model/nerdm-pub-schema-0.2.json b/model/nerdm-pub-schema-0.2.json index 9ef2223..3dc5a07 100644 --- a/model/nerdm-pub-schema-0.2.json +++ b/model/nerdm-pub-schema-0.2.json @@ -213,7 +213,7 @@ "mediaType": { "title": "Media Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's downloadURL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -302,7 +302,7 @@ "describedByType": { "title": "Data Dictionary Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -508,7 +508,7 @@ "describedByType": { "title": "API Descriptions Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", diff --git a/model/nerdm-pub-schema-0.3.json b/model/nerdm-pub-schema-0.3.json index 3943b96..694100a 100644 --- a/model/nerdm-pub-schema-0.3.json +++ b/model/nerdm-pub-schema-0.3.json @@ -213,7 +213,7 @@ "mediaType": { "title": "Media Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's downloadURL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -302,7 +302,7 @@ "describedByType": { "title": "Data Dictionary Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -508,7 +508,7 @@ "describedByType": { "title": "API Descriptions Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", diff --git a/model/nerdm-pub-schema-0.4.json b/model/nerdm-pub-schema-0.4.json index 11a1872..4cb8384 100644 --- a/model/nerdm-pub-schema-0.4.json +++ b/model/nerdm-pub-schema-0.4.json @@ -213,7 +213,7 @@ "mediaType": { "title": "Media Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's downloadURL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -302,7 +302,7 @@ "describedByType": { "title": "Data Dictionary Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -508,7 +508,7 @@ "describedByType": { "title": "API Descriptions Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", diff --git a/model/nerdm-pub-schema-0.5.json b/model/nerdm-pub-schema-0.5.json index 60cebd0..6fd35f7 100644 --- a/model/nerdm-pub-schema-0.5.json +++ b/model/nerdm-pub-schema-0.5.json @@ -214,7 +214,7 @@ "mediaType": { "title": "Media Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's downloadURL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -303,7 +303,7 @@ "describedByType": { "title": "Data Dictionary Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -509,7 +509,7 @@ "describedByType": { "title": "API Descriptions Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", diff --git a/model/nerdm-pub-schema-0.6.json b/model/nerdm-pub-schema-0.6.json new file mode 100644 index 0000000..4649f40 --- /dev/null +++ b/model/nerdm-pub-schema-0.6.json @@ -0,0 +1,758 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#", + "rev": "wd1", + "title": "The NERDm extension metadata for Public Data", + "description": "These classes extend the based NERDm schema to different types of published data", + "definitions": { + + "PublicDataResource": { + "description": "a resource that can/should have a record in NIST's public data listing (PDL)", + "notes": [ + "This must be convertable to a compliant and complete POD record; thus, this class adds all remaining POD properties missing from the core", + "In addition to the core properties, this also inherits release-related information, including version, releaseHistory, and isVersionOf", + "The ancestor types require the following properties: @id, publisher, accessLevel, title, and contactPoint." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/rls/v0.2#/definitions/ReleasedResource"}, + { + "type": "object", + "properties": { + + "accrualPeriodicity": { + "title": "Frequency", + "description": "Frequency with which dataset is published.", + "anyOf": [ + { + "enum": [ + "irregular" + ], + "valueDocumentation": { + "irregular": { + "description": "the data is updated or republished on an irregular schedule" + } + } + }, + { + "type": "string", + "pattern": "^R\\/P(?:\\d+(?:\\.\\d+)?Y)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?W)?(?:\\d+(?:\\.\\d+)?D)?(?:T(?:\\d+(?:\\.\\d+)?H)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?S)?)?$" + }, + { + "type": "null" + } + ], + "asOntology": { + "@conxtext": "profile-schema-onto.json", + "prefLabel": "Frequency", + "referenceProperty": "dc:accrualPeriodicity" + } + }, + + "bureauCode": { + "title": "Bureau Code", + "description": "an identifier provided by the OMB Circular A-11, Appendix C that identifies the originating federal agency", + "notes": [ + "OMB Circular A-11, Appendix C is available via https://obamawhitehouse.archives.gov/sites/default/files/omb/assets/a11_current_year/app_c.pdf", + "A machine-readable listing of the defined codes is available via https://project-open-data.cio.gov/data/omb_bureau_codes.csv", + "Codes have the format of 015:01" + ], + "type": "array", + "items": { + "type": "string", + "pattern": "^[0-9]{3}:[0-9]{2}$" + }, + "uniqueItems": true, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Bureau Code", + "referenceProperty": "pod:bureauCode" + } + }, + + "programCode": { + "title": "Program Code", + "description": "an identifier provided by the Federal Program Inventory that identifies the primary program related to this data asset", + "notes": [ + "A machine-readable listing of the defined codes is available via https://obamaadministration.archives.performance.gov/s3fs-public/files/FederalProgramInventory_FY13_MachineReadable_091613.xls", + "Codes have the format of 015:001" + ], + "type": "array", + "items": { + "type": "string", + "pattern": "^[0-9]{3}:[0-9]{3}$" + }, + "uniqueItems": true, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Program Code", + "referenceProperty": "pod:programCode" + } + }, + + "dataQuality": { + "title": "Data Quality", + "description": "Whether the dataset meets the agency's Information Quality Guidelines", + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Quality", + "referenceProperty": "pod:dataQuality" + } + }, + + "primaryITInvestmentUII": { + "title": "Primary IT Investment UII", + "description": "The IT Unique Investment Identifier (UII) that is associated with this dataset", + "anyOf": [ + { + "type": "string", + "pattern": "[0-9]{3}-[0-9]{9}" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Primary IT Investment UII", + "referenceProperty": "pod:primaryITInvestmentUII" + } + }, + + "systemOfRecords": { + "title": "System of Records", + "description": "The URL to the System of Records Notice related to this dataset if is so designated under the Privacy Act of 1974", + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "type": "null" + } + ] + }, + + "dataHierarchy": { + "description": "a hierarchical organization of the Subcollection and DataFile components", + "notes": [ + "This optional property can be provided to aid display of the data components" + ], + "type": "array", + "items": { "$ref": "#/definitions/DataHierarchyNode"} + } + + }, + "required": [ "description", "landingPage", "bureauCode", "programCode", "ediid" ] + } + ] + }, + + "DataHierarchyNode": { + "description": "a description of a node (either a DataFile or a Subcollection) in a data hierarchy", + "notes": [ + "Other metadata related to the node may appear as additional properties of this object" + ], + "type": "object", + "properties": { + "filepath": { + "description": "the name of the node reflecting its position in the hierarchy", + "notes": [ + "This value must uniquely match a component listed under a resource's components" + ], + "type": "string" + }, + + "children": { + "description": "the nodes that are direct children of this node", + "notes": [ + "This should only appear if the node is of type Subcollection." + ], + "type": "array", + "items": { "$ref": "#/definitions/DataHierarchyNode" } + } + }, + "required": [ "filepath" ] + }, + + "DownloadableFile": { + "description": "a description of a downloadable, finite stream of data", + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { + "properties": { + + "filepath": { + "description": "a name for the data file reflecting its hierarchical location in the data source collection", + "notes": [ + "Forward slashes delimit the hierarchical names in the path", + "If there are no slashes in this value, this file is assumed to be at the top of the file hierarchy", + "The base name of this value (i.e. the last field in the path) can be used as the default filename to give to the file if downloaded.", + "The component title may have the same value." + ], + "type": "string" + }, + + "downloadURL": { + "title": "Download URL", + "description": "URL providing direct access to a downloadable file of a dataset", + "type": "string", + "format": "uri", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Download URL", + "referenceProperty": "dcat:downloadURL" + } + }, + + "mediaType": { + "title": "Media Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's downloadURL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Media Type", + "referenceProperty": "dcat:mediaType" + } + }, + + "format": { + "title": "Format", + "description": "A human-readable description of the file format of a distribution", + "$ref": "#/definitions/Format", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Format", + "referenceProperty": "dc:format" + } + }, + + "checksum": { + "title": "Checksum", + "description": "a checksum for the file", + "$ref": "#/definitions/Checksum" + }, + + "size": { + "description": "the size of the file in bytes", + "type": "integer", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "file size", + "referenceProperty": "schema:fileSize" + } + } + + }, + "required": [ "filepath" ], + + "dependencies": { + "downloadURL": { + "properties": { + "mediaType": { + "type": "string", + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$" + } + }, + "required": [ "mediaType" ] + } + } + } + ] + }, + + "DataFile": { + "description": "a description of a downloadable file that was provided by the authors (as opposed to a system or checksum file produced by the publication system).", + "allOf": [ + { "$ref": "#/definitions/DownloadableFile" }, + { + "properties": { + "describedBy": { + "title": "Data Dictionary", + "description": "URL to the data dictionary for the distribution found at the downloadURL", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary", + "referenceProperty": "http://www.w3.org/2007/05/powder-s#describedby" + } + }, + + "describedByType": { + "title": "Data Dictionary Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary Type", + "referenceProperty": "pod:describedByType" + } + } + } + } + ] + }, + + "ChecksumFile": { + "description": "a downloadable file that contains the checksum value for a DataFile.", + "allOf": [ + { "$ref": "#/definitions/DownloadableFile" }, + { + "properties": { + "algorithm": { + "description": "the algorithm used to produce the checksum hash", + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Topic" + }, + + "valid": { + "type": "boolean", + "description": "A flag, if True, indicating the the hash value contained in this ChecksumFile is confirmed to be correct for its associated data file." + }, + + "describes": { + "type": "string", + "format": "uri-reference", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Checksum for", + "referenceProperty": "ov:describes" + } + } + } + } + ] + }, + + "Checksum": { + "description": "a checksum with its algorithm noted", + "type": "object", + "properties": { + "algorithm": { + "description": "the algorithm used to produce the checksum hash", + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Topic" + }, + "hash": { + "description": "the checksum value", + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "checksum", + "referenceProperty": "dataid:checksum" + } + } + }, + "required": [ "hash" ] + }, + + "Subcollection": { + "description": "A grouping of components within a named subcollection of the resource", + "notes": [ + "This Component subtype implements hierarchical resources; a subcollection is equivalent to a directory that can contain other components, including other subcollections." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { + "properties": { + + "filepath": { + "description": "a name for the data file reflecting its hierarchical location in the data source collection", + "notes": [ + "Forward slashes delimit the hierarchical names in the path", + "If there are no slashes in this value, this file is assumed to be at the top of the file hierarchy", + "The base name of this value (i.e. the last field in the path) can be used as the default filename to give to the file if downloaded.", + "The component title may have the same value." + ], + "type": "string" + }, + + "contains": { + "description": "a listing of resource components that are directly part of this subcollection", + "notes": [ + "Each item is a URI identifier (possibly abbreviated)" + ], + "type": "array", + "items": { + "type": "string" + }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "contains", + "referenceProperty": "ldp:contains" + } + }, + + "hasParent": { + "description": "The identifier for the parent collection that contains this subcollection", + "type": "string" + } + + }, + "required": [ "filepath" ] + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Subcollection", + "referenceProperty": "fedora:Container" + } + }, + + "AccessPage": { + "description": "a web page that provides indirect access to the resource", + "notes": [ + "This type should not be used to capture a resource's home page as this would be redundant with the landingPage resource property." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { + "properties": { + "accessURL": { + "description": "the URL for accessing this indirect access to the resource", + "type": "string", + "format": "uri" + }, + + "format": { + "title": "Format", + "description": "A human-readable description of the file format of a distribution", + "$ref": "#/definitions/Format", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Format", + "referenceProperty": "dc:format" + } + } + }, + "required": [ "accessURL" ] + } + ] + }, + + "SearchPage": { + "description": "a web page that can be used to search the contents of the resource", + "notes": [ + "Provide this component even if the accessURL is the same as the landing page; this indicates that the landing page provides a search tool in it." + ], + "allOf": [ + { "$ref": "#/definitions/AccessPage" } + ] + }, + + "API": { + "description": "an application programming interface to the resource", + "notes": [ + "This is typically a web-based interface", + "When converting an API component to a POD distribution, the output format should set to 'API'." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { + "properties": { + "accessURL": { + "description": "the URL for accessing this indirect access to the resource", + "type": "string", + "format": "uri" + }, + + "describedBy": { + "title": "API Description", + "description": "URL to a formal or informal description of the API", + "notes": [ + "Use describedByType to help distinguish between formal and informal (i.e. human readable) descriptions." + ], + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary", + "referenceProperty": "http://www.w3.org/2007/05/powder-s#describedby" + } + }, + + "describedByType": { + "title": "API Descriptions Type", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", + "anyOf": [ + { + "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", + "type": "string" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Dictionary Type", + "referenceProperty": "pod:describedByType" + } + } + } + } + ] + }, + + "Format": { + "description": "a description of a file format that a file employs", + "type": "object", + "properties": { + "description": { + "description": "a human-readable description of the format", + "type": "string" + }, + + "scheme": { + "description": "a URI that identifies the format type registry or identification system that the value is defined in.", + "type": "string", + "format": "uri", + "asOnotology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Schema", + "referenceProperty": "vold:vocabulary" + } + }, + + "@id": { + "description": "the unique identifier for the format", + "type": "string", + "format": "uri" + }, + + "tag": { + "description": "a short, display-able token or abbreviation for the format", + "notes": [ + "As a token, it is intended that applications can search for this value and find all files having the same format. Thus, regardless of whether the @id field is provided, all references to the same format should use the same tag value." + ], + "type": "string" + } + } + }, + + "DataPublication": { + "description": "Data presented by one or more authors as citable publication", + "allOf": [ + { "$ref": "#/definitions/PublicDataResource" }, + { + "type": "object", + "properties": { + "subtitle": { + "description": "a secondary or sub-title for the resource", + "type": "array", + "items": { "type": "string" } + }, + "aka": { + "description": "other (unofficial) titles that this resource is sometimes known as", + "type": "array", + "items": { "type": "string" } + }, + "authors": { + "description": "the ordered list of authors of this data publication", + "notes": [ + "Authors should generally be assumed to be considered creators of the data; where this is is not true or insufficient, the contributors property can be used ot add or clarify who contributed to data creation." + ], + "type": "array", + "items": { "$ref": "#/definitions/Person" }, + "asOntology": { + "@conxtext": "profile-schema-onto.json", + "prefLabel": "Authors", + "referenceProperty": "bibo:authorList" + } + }, + "recommendedCitation": { + "description": "a recommended formatting of a citation to this data publication", + "type": "string", + "asOntology": { + "@conxtext": "profile-schema-onto.json", + "prefLabel": "Cite as", + "referenceProperty": "dc:bibliographicCitation" + } + } + } + } + ] + }, + + "Person": { + "description": "an identification a Person contributing to the publication of a resource", + "notes": [ + "The information here is intended to reflect information about the person at teh time of the contribution or publication." + ], + "type": "object", + "properties": { + "@type": { + "description": "the class indicating that this is a Person", + "type": "string", + "enum": [ + "foaf:Person" + ] + }, + + "fn": { + "description": "the author's full name in the preferred format", + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Name", + "referenceProperty": "vcard:fn" + } + }, + + "givenName": { + "description": "the author's given name", + "notes": [ + "Often referred to in English-speaking conventions as the first name" + ], + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "First Name", + "referenceProperty": "foaf:givenName" + } + }, + + "familyName": { + "description": "the author's family name", + "notes": [ + "Often referred to in English-speaking conventions as the last name" + ], + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Last Name", + "referenceProperty": "foaf:familyName" + } + }, + + "middleName": { + "description": "the author's middle names or initials", + "notes": [ + "Often referred to in English-speaking conventions as the first name" + ], + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Middle Names or Initials", + "referenceProperty": "vcard:middleName" + } + }, + + "orcid": { + "description": "the author's ORCID", + "notes:": [ + "The value should not include the resolving URI base (http://orcid.org)" + ], + "$ref": "#/definitions/ORCIDpath", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Last Name", + "referenceProperty": "vivo:orcidid" + } + }, + + "affiliation": { + "description": "The institution the person was affiliated with at the time of publication", + "type": "array", + "items": { + "$ref": "#/definitions/Affiliation" + }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Affiliation", + "referenceProperty": "schema:affiliation" + } + }, + + "proxyFor": { + "description": "a local identifier representing this person", + "notes": [ + "This identifier is expected to point to an up-to-date description of the person as known to the local system. The properties associated with that identifier may be different those given in the current record." + ], + "type": "string", + "format": "uri", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Current Person Information", + "referenceProperty": "ore:proxyFor" + } + } + }, + "required": [ "fn" ] + }, + + "ORCIDpath": { + "description": "the format of the path portion of an ORCID identifier (i.e. without the preceding resolver URL base)", + "type": "string", + "pattern": "^[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{3}[0-9X]$" + }, + + "Affiliation": { + "description": "a description of an organization that a person is a member of", + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/ResourceReference" }, + { + "properties": { + "subunits": { + "description": "sub-units of the main organization the that the person is a member of", + "notes": [ + "The order of the array elements should be treated as significant. Typically (though not required), each element will reflect a more specific unit contained in unit nameed in the previous element." + ], + "type": "array", + "items": { "type": "string" }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Sub-unit", + "referenceProperty": "org:OrganizationalUnit" + } + } + }, + "required": [ "@type" ] + } + ] + } + + } +} + diff --git a/model/nerdm-pub-schema.json b/model/nerdm-pub-schema.json index 5452020..9a77d5d 100644 --- a/model/nerdm-pub-schema.json +++ b/model/nerdm-pub-schema.json @@ -1,7 +1,7 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], - "id": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.6#", + "id": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.7#", "rev": "wd1", "title": "The NERDm extension metadata for Public Data", "description": "These classes extend the based NERDm schema to different types of published data", @@ -11,10 +11,11 @@ "description": "a resource that can/should have a record in NIST's public data listing (PDL)", "notes": [ "This must be convertable to a compliant and complete POD record; thus, this class adds all remaining POD properties missing from the core", - "In addition to the core properties, this also inherits release-related information, including version, releaseHistory, and isVersionOf" + "In addition to the core properties, this also inherits release-related information, including version, releaseHistory, and isVersionOf", + "The ancestor types require the following properties: @id, publisher, accessLevel, title, and contactPoint." ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/rls/v0.2#/definitions/ReleasedResource"}, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/rls/v0.3#/definitions/ReleasedResource"}, { "type": "object", "properties": { @@ -52,7 +53,7 @@ "title": "Bureau Code", "description": "an identifier provided by the OMB Circular A-11, Appendix C that identifies the originating federal agency", "notes": [ - "OMB Circular A-11, Appendix C is available via http://www.whitehouse.gov/sites/default/files/omb/assets/a11_current_year/app_c.pdf", + "OMB Circular A-11, Appendix C is available via https://obamawhitehouse.archives.gov/sites/default/files/omb/assets/a11_current_year/app_c.pdf", "A machine-readable listing of the defined codes is available via https://project-open-data.cio.gov/data/omb_bureau_codes.csv", "Codes have the format of 015:01" ], @@ -73,7 +74,7 @@ "title": "Program Code", "description": "an identifier provided by the Federal Program Inventory that identifies the primary program related to this data asset", "notes": [ - "A machine-readable listing of the defined codes is available via https://www.performance.gov/sites/default/files/files/FederalProgramInventory_FY13_MachineReadable_091613.xls", + "A machine-readable listing of the defined codes is available via https://obamaadministration.archives.performance.gov/s3fs-public/files/FederalProgramInventory_FY13_MachineReadable_091613.xls", "Codes have the format of 015:001" ], "type": "array", @@ -150,7 +151,7 @@ } }, - "required": [ "bureauCode", "programCode", "ediid" ] + "required": [ "description", "landingPage", "bureauCode", "programCode", "ediid" ] } ] }, @@ -185,7 +186,7 @@ "DownloadableFile": { "description": "a description of a downloadable, finite stream of data", "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Component" }, { "properties": { @@ -214,7 +215,7 @@ "mediaType": { "title": "Media Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s downloadURL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's downloadURL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -303,7 +304,7 @@ "describedByType": { "title": "Data Dictionary Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -332,7 +333,7 @@ "properties": { "algorithm": { "description": "the algorithm used to produce the checksum hash", - "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Topic" + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Topic" }, "valid": { @@ -360,7 +361,7 @@ "properties": { "algorithm": { "description": "the algorithm used to produce the checksum hash", - "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Topic" + "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Topic" }, "hash": { "description": "the checksum value", @@ -381,7 +382,7 @@ "This Component subtype implements hierarchical resources; a subcollection is equivalent to a directory that can contain other components, including other subcollections." ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Component" }, { "properties": { @@ -434,7 +435,7 @@ "This type should not be used to capture a resource's home page as this would be redundant with the landingPage resource property." ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Component" }, { "properties": { "accessURL": { @@ -476,7 +477,7 @@ "When converting an API component to a POD distribution, the output format should set to 'API'." ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Component" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Component" }, { "properties": { "accessURL": { @@ -509,7 +510,7 @@ "describedByType": { "title": "API Descriptions Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the file's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -730,7 +731,7 @@ "Affiliation": { "description": "a description of an organization that a person is a member of", "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/ResourceReference" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/ResourceReference" }, { "properties": { "subunits": { diff --git a/model/nerdm-rls-schema-0.2.json b/model/nerdm-rls-schema-0.2.json new file mode 100644 index 0000000..9bb36f6 --- /dev/null +++ b/model/nerdm-rls-schema-0.2.json @@ -0,0 +1,206 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/rls/v0.2#", + "rev": "wd1", + "title": "The NERDm extension metadata for describing versions and releases of resources", + "description": "These classes extend the base NERDm schema to capture version history. It defines a special Resource, a ReleaseCollection, that collects all the versions of an evolving resource, as well as an informative resource reference, a ReleaseHistory, to reference that resource. The ReleasedResource can serve as a base Resource type for resources that are versioned.", + "definitions": { + + "ReleasedResource": { + "description": "a Resource that can be revised and released under a sequence of versions", + "notes": [ + "The base Resource type requires properties, title and contactPoint" + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Resource" }, + { + "properties": { + "isVersionOf": { + "description": "the ID for the general resource that this resource is a version of", + "notes": [ + "This value refers to a notion of the resource that is evolving over time as each version is released and typically should resolve to the last version", + "Set this property if @id is set to a value refering to a specific version" + ], + "type": "string", + "format": "uri" + }, + + "version": { + "title": "Version", + "description": "a string indicating the version of the release of this resource", + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Version", + "referenceProperty": "schema:version" + } + }, + + "releaseHistory": { + "title": "Release History", + "description": "a reference to the sequence of releases that this resource is a part of", + "notes": [ + "This property may include a listing of the releases that make up the release history" + ], + "$ref": "#/definitions/ReleaseHistory", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Release History", + "referenceProperty": "dc:isPartOf" + } + }, + + "replaces": { + "title": "Replaces", + "description": "a listing of other existing resources that are deprecated by this resource", + "notes": [ + "This replicates information provided in the releaseHistory property. While the latter is preferred for this information, replaces can be added for ease of processing by certain applications." + ], + "type": "array", + "items": { + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { "required": [ "@id", "issued" ] } + ] + }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Replaces", + "referenceProperty": "dc:replaces" + } + }, + + "isReplacedBy": { + "title": "Replaced by", + "description": "another existing resources that should be considered a replacement for this resource", + "notes": [ + "This typically refers to a newer version of this resource", + "This value is inferrable from the releaseHistory property. While the latter is preferred for this information, isReplacedBy can be added for ease of processing by certain applications.", + "This property may be set explicitly when this resource is intended to be replaced by another resource that is not simple a newer version of this resource." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { "required": [ "@id", "issued" ] } + ], + + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Is replaced by", + "referenceProperty": "dc:isReplacedBy" + } + } + }, + "required": [ "@id", "publisher", "accessLevel" ] + } + ] + }, + + "ReleaseHistory": { + "title": "Release History", + "description": "a reference to a release history collection that contains all the releases of the resources", + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { + "properties": { + "hasRelease": { + "title": "Releases", + "description": "the list of known releases that are part of this release history collection", + "type": "array", + "items": { "$ref": "#/definitions/Release" } + } + }, + "required": [ "@id" ] + } + ] + }, + + "Release": { + "title": "Release", + "description": "a reference to a release of a RevisableResource", + "notes": [ + "A release entry provides an abbreviated description of one of the versions in series of releases." + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { + "properties": { + "version": { + "title": "Version", + "description": "a string indicating a version of the release of this resource", + "notes": [ + "label could contain the same value" + ], + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Version", + "referenceProperty": "schema:version" + } + }, + "status": { + "title": "Status", + "description": "a label indicating the availability status of the release", + "notes": [ + "As some releases may disappear or be embargoed, this label is used indicate whether this release is currently available and, if not, why.", + "See ReleaseStatus value documentation for allowed values. If this property is not provided, the assumed value should 'available'" + ], + "$ref": "#/definitions/ReleaseStatus" + } + }, + "required": [ "@id", "version", "issued" ] + } + ] + }, + + "ReleaseCollection": { + "description": "a special collection that enumerates the different versions available for a particular resouce", + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Resource" }, + { + "properties": { + "hasRelease": { + "title": "Releases", + "description": "the list of known releases that are part of this release history collection", + "type": "array", + "items": { "$ref": "#/definitions/Release" } + } + }, + "required": [ "hasRelease" ] + } + ] + }, + + "ReleaseStatus": { + "description": "A label indicating a resource's release and availability status", + "type": "string", + "enum": [ "available", "deprecated", "embargoed", "unavailable", "missing", "removed" ], + "valueDocumentation": { + "available": { + "description": "the resource is currently available via its landing page and the links therein as well as via its component links" + }, + "deprecated": { + "description": "the resource is still available; however, the publisher wishes to indicate that the use of the data is no longer recommended" + }, + "embargoed": { + "description": "the resource is currently unavailable but is expected to become available sometime in the future." + }, + "unavailable": { + "description": "the resource is temporarily unavailable, usuallly for known reasons (other than being in an embargoed state)", + "notes": [ + "This might be set if the server that provides the resource is going to be down for an extended period of time" + ] + }, + "missing": { + "description": "the resource is unavailable for unknown reasons", + "notes": [ + "The publisher may apply the landing page and/or component links are unresponsive for an extended period of time." + ] + }, + "removed": { + "description": "the resource was unpublished--that is, intentionally and permanently made unavailable, usually to prevent its use" + } + } + } + } +} diff --git a/model/nerdm-rls-schema.json b/model/nerdm-rls-schema.json index b2a3b3c..68a11e5 100644 --- a/model/nerdm-rls-schema.json +++ b/model/nerdm-rls-schema.json @@ -1,16 +1,19 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], - "id": "https://data.nist.gov/od/dm/nerdm-schema/rls/v0.2#", + "id": "https://data.nist.gov/od/dm/nerdm-schema/rls/v0.3#", "rev": "wd1", "title": "The NERDm extension metadata for describing versions and releases of resources", - "description": "These classes extend the based NERDm schema to capture version history. It defines a special Resource, a ReleaseCollection, that collects all the versions of an evolving resource, as well as an informative resource reference, a ReleaseHistory, to reference that resource. The ReleasedResource can serve as a base Resource type for resources that are versioned.", + "description": "These classes extend the base NERDm schema to capture version history. It defines a special Resource, a ReleaseCollection, that collects all the versions of an evolving resource, as well as an informative resource reference, a ReleaseHistory, to reference that resource. The ReleasedResource can serve as a base Resource type for resources that are versioned.", "definitions": { "ReleasedResource": { "description": "a Resource that can be revised and released under a sequence of versions", + "notes": [ + "The base Resource type requires properties, title and contactPoint" + ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Resource" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Resource" }, { "properties": { "isVersionOf": { @@ -57,7 +60,7 @@ "type": "array", "items": { "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/RelatedResource" }, { "required": [ "@id", "issued" ] } ] }, @@ -77,7 +80,7 @@ "This property may be set explicitly when this resource is intended to be replaced by another resource that is not simple a newer version of this resource." ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/RelatedResource" }, { "required": [ "@id", "issued" ] } ], @@ -87,7 +90,8 @@ "referenceProperty": "dc:isReplacedBy" } } - } + }, + "required": [ "@id", "publisher", "accessLevel" ] } ] }, @@ -96,7 +100,7 @@ "title": "Release History", "description": "a reference to a release history collection that contains all the releases of the resources", "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/RelatedResource" }, { "properties": { "hasRelease": { @@ -118,7 +122,7 @@ "A release entry provides an abbreviated description of one of the versions in series of releases." ], "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/RelatedResource" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/RelatedResource" }, { "properties": { "version": { @@ -152,7 +156,7 @@ "ReleaseCollection": { "description": "a special collection that enumerates the different versions available for a particular resouce", "allOf": [ - { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#/definitions/Resource" }, + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Resource" }, { "properties": { "hasRelease": { diff --git a/model/nerdm-schema-0.1.json b/model/nerdm-schema-0.1.json index 29b4598..b96864f 100644 --- a/model/nerdm-schema-0.1.json +++ b/model/nerdm-schema-0.1.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.1#", "rev": "wd1", "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", @@ -103,7 +103,7 @@ "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." ], "$ref": "#/definitions/Organization", "asOntology": { diff --git a/model/nerdm-schema-0.2.json b/model/nerdm-schema-0.2.json index e41da89..7860002 100644 --- a/model/nerdm-schema-0.2.json +++ b/model/nerdm-schema-0.2.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.2#", "rev": "wd2", "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", @@ -149,7 +149,7 @@ "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." ], "$ref": "#/definitions/Organization", "asOntology": { diff --git a/model/nerdm-schema-0.3.json b/model/nerdm-schema-0.3.json index be41180..7cb67c2 100644 --- a/model/nerdm-schema-0.3.json +++ b/model/nerdm-schema-0.3.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.3#", "rev": "wd1", "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", @@ -149,7 +149,7 @@ "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." ], "$ref": "#/definitions/Organization", "asOntology": { diff --git a/model/nerdm-schema-0.4.json b/model/nerdm-schema-0.4.json index 92d7296..0866bd7 100644 --- a/model/nerdm-schema-0.4.json +++ b/model/nerdm-schema-0.4.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.4#", "rev": "wd1", "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", @@ -149,7 +149,7 @@ "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." ], "$ref": "#/definitions/Organization", "asOntology": { diff --git a/model/nerdm-schema-0.5.json b/model/nerdm-schema-0.5.json index 03fe51f..058d5a3 100644 --- a/model/nerdm-schema-0.5.json +++ b/model/nerdm-schema-0.5.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.5#", "rev": "wd1", "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", @@ -167,7 +167,7 @@ "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." ], "$ref": "#/definitions/Organization", "asOntology": { diff --git a/model/nerdm-schema-0.6.json b/model/nerdm-schema-0.6.json new file mode 100644 index 0000000..e83c403 --- /dev/null +++ b/model/nerdm-schema-0.6.json @@ -0,0 +1,1081 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#", + "rev": "wd1", + "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", + "description": "A JSON Schema specfying the core NERDm classes", + "definitions": { + + "Resource": { + "description": "a resource (e.g. data collection, service, website or tool) that can participate in a data-driven application", + "properties": { + + "title": { + "title": "Title", + "description": "Human-readable, descriptive name of the resource", + "notes": [ + "Acronyms should be avoided" + ], + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Title", + "referenceProperty": "dc:title" + } + }, + + "description": { + "title": "Description", + "description": "Human-readable description (e.g., an abstract) of the resource", + "notes": [ + "Each element in the array should be considered a separate paragraph" + ], + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Description", + "referenceProperty": "dc:description" + } + }, + + "keyword": { + "title": "Tags", + "description": "Tags (or keywords) help users discover your dataset; please include terms that would be used by technical and non-technical users.", + "notes": [ + "Surround each keyword with quotes. Separate keywords with commas. Avoid duplicate keywords in the same record." + ], + "type": "array", + "items": { "type": "string", "minLength": 1 }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Tags", + "referenceProperty": "dcat:keyword" + } + }, + + "topic": { + "description": "Identified tags referring to things or concepts that this resource addresses or speaks to", + "type": "array", + "items": { "$ref": "#/definitions/Topic" }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Topic", + "referenceProperty": "foaf:topic" + } + }, + + "modified": { + "description": "Most recent date on which the main content of this resource was modified.", + "notes": [ + "Dates should be ISO 8601 of highest resolution. In other words, as much of YYYY-MM-DDThh:mm:ss.sTZD as is relevant to this dataset. If there is a need to reflect that the dataset is continually updated, ISO 8601 formatting can account for this with repeating intervals. For instance, R/P1D for daily, R/P2W for every two weeks, and R/PT5M for every five minutes." + ], + "$ref": "#/definitions/ISO8601DateRange", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Last Update", + "referenceProperty": "dc:modified" + } + }, + + "revised": { + "title": "Release Date", + "description": "The date of the last formal issuance of the resource due to any change in the resource, including the addition of new content to this resource", + "$ref": "#/definitions/FlexibleDate", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Release Date", + "referenceProperty": "dc:issued", + "range": "xsd:date" + } + }, + + "issued": { + "title": "Release Date", + "description": "Date of formal issuance of the resource", + "notes": [ + "At NIST, this is the date when this resource was issued under its EDI ID and/or DOI", + "At NIST, this value does not change when new data is added to the resource" + ], + "$ref": "#/definitions/FlexibleDate", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Release Date", + "referenceProperty": "dc:issued", + "range": "xsd:date" + } + }, + + "firstIssued": { + "title": "Initial Release Date", + "description": "Date of the first formal issuance of the resource", + "$ref": "#/definitions/FlexibleDate", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Release Date", + "referenceProperty": "dc:issued", + "range": "xsd:date" + } + }, + + "annotated": { + "title": "Last Annotation Date", + "description": "Date of the last minor update to metadata or other ancillary information that (by itself) did not affect the main content of the resource", + "notes": [ + "This will always get updated for both minor and major changes; that is, when the revision date changes, this date will change as well." + ], + "$ref": "#/definitions/FlexibleDate", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Release Date", + "range": "xsd:date" + } + }, + + "status": { + "description": "A label indicating this resource's release and availability status", + "type": "string", + "notes": [ + "This value is meant to flag a resource as perhaps unavailable--temporarily or permanently--with some indication as to why; further explanation could be amended to the resource description.", + "This value is intended to be orthogonal to the accessLevel property.", + "See ReleaseStatus value documentation (from the nerdm-schema/rls schema) for the full set of values recognized by the NIST PDR. If this property is not provided, the assumed value should 'available'" + ], + "valueDocumentation": { + "available": { + "description": "the resource is currently available via its landing page and the links therein as well as via its component links" + }, + "unavailable": { + "description": "the resource is temporarily unavailable, usuallly for known reasons (other than being in an embargoed state)", + "notes": [ + "This might be set if the server that provides the resource is going to be down for an extended period of time" + ] + }, + "missing": { + "description": "the resource is unavailable for unknown reasons", + "notes": [ + "The publisher may apply the landing page and/or component links are unresponsive for an extended period of time." + ] + }, + "removed": { + "description": "the resource was unpublished--that is, intentionally and permanently made unavailable, usually to prevent its use" + } + } + }, + + "publisher": { + "description": "The publishing entity and optionally their parent organization(s).", + "notes": [ + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + ], + "$ref": "#/definitions/Organization", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Publisher", + "referenceProperty": "dc:publisher" + } + }, + + "contactPoint": { + "description": "Contact information for getting more information about this resource", + "notes": [ + "This should include at least a name and an email address", + "The information can reflect either a person or a group (such as a help desk)" + ], + "$ref": "#/definitions/ContactInfo", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Information", + "referenceProperty": "dcat:contactPoint" + } + }, + + "accessLevel": { + "title": "Public Access Level", + "description": "The degree to which this dataset could be made publicly-available, regardless of whether it has been made available", + "notes": [ + ], + "type": "string", + "enum": [ "public", "restricted public", "non-public" ], + "valueDocumentation": { + "public": { + "description": "Data asset is or could be made publicly available to all without restrictions" + }, + "restricted public": { + "description": "Data asset is available under certain use restrictions" + }, + "non-public": { + "description": "Data asset is not available to members of the public" + } + }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Public Access Level", + "referenceProperty": "pod:accessLevel" + } + }, + + "license": { + "title": "License", + "description": "A pointer to the primary license or non-license (i.e. Public Domain) statement with which the dataset or API has been published", + "notes": [ + "Software and data developed primarily by federal employees must be considered in the public domain; software primarily developed by contract can be assigned a license, including an open source license.", + "By default, NIST-produced data and software should point to http://www.nist.gov/data/license.cfm", + "See Open Licenses (https://project-open-data.cio.gov/open-licenses/) for more information." + ], + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "License", + "referenceProperty": "dc:license" + } + }, + + "rights": { + "title": "Rights", + "description": "information regarding access or restrictions based on privacy, security, or other policies", + "notes": [ + "This should also provide an explanation for the selected \"accessLevel\" including instructions for how to access a restricted file, if applicable, or explanation for why a \"non-public\" or \"restricted public\" data assetis not \"public,\" if applicable.", + "Text must be 255 or fewer characters." + ], + "anyOf": [ + { + "type": "string", + "minLength": 1, + "maxLength": 255 + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Rights", + "referenceProperty": "dc:rights" + } + }, + + "inventory": { + "description": "a summary of the components that are associated with this resource", + "notes": [ + "This property is an aid for processing resources with a large number of components. This summary allows the actual components content to be missing or incomplete in a trasmitted version of the record." + ], + "$ref": "#/definitions/Inventory" + }, + + "components": { + "description": "a listing of the various component resources, tools, and distributions of this resource", + "notes": [ + "Records for the resources referenced in this list should specify this resource in its isPartOf field.", + "The @type property will indicate which type of component it is. The first value in the @type list should be considered the primary or most important classification.", + "If an item's @type includes dcat:Distribution, the item should be convertable to a distribution in the POD schema.", + "The order should be considered meaningful (at least for components of the same type). The meaning or intention behind the order can depend on the type; however, generally, display of the components of a common type should preserve the order. For clarity then, it is recommended that items of the same primary type should be grouped together in the list." + ], + "type": "array", + "items": { "$ref": "#/definitions/Component" }, + "minLength": 1, + "uniqueItems": true + }, + + "conformsTo": { + "title": "Standard", + "description": "URI used to identify a standardized specification the resource conforms to", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Standard", + "referenceProperty": "dc:conformsTo", + "owl:range": "xsd:anyURI" + } + }, + + "isPartOf": { + "title": "Collections", + "description": "The collections that this dataset formally belongs to", + "notes": [ + "This property indicates relation of different resources" + ], + "type": "array", + "items": { "$ref": "#/definitions/ResourceReference" }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Collection", + "referenceProperty": "dc:isPartOf" + } + }, + + "language": { + "title": "Language", + "description": "The primary language used in the dataset", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string", + "pattern": "^(((([A-Za-z]{2,3}(-([A-Za-z]{3}(-[A-Za-z]{3}){0,2}))?)|[A-Za-z]{4}|[A-Za-z]{5,8})(-([A-Za-z]{4}))?(-([A-Za-z]{2}|[0-9]{3}))?(-([A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(-([0-9A-WY-Za-wy-z](-[A-Za-z0-9]{2,8})+))*(-(x(-[A-Za-z0-9]{1,8})+))?)|(x(-[A-Za-z0-9]{1,8})+)|((en-GB-oed|i-ami|i-bnn|i-default|i-enochian|i-hak|i-klingon|i-lux|i-mingo|i-navajo|i-pwn|i-tao|i-tay|i-tsu|sgn-BE-FR|sgn-BE-NL|sgn-CH-DE)|(art-lojban|cel-gaulish|no-bok|no-nyn|zh-guoyu|zh-hakka|zh-min|zh-min-nan|zh-xiang)))$" + } + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Language", + "referenceProperty": "dc:language" + } + }, + + "landingPage": { + "title": "Homepage URL", + "description": "a URL to a human-friendly web page that represents the home or gateway to the resources that are part of this dataset.", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Homepage URL", + "referenceProperty": "dcat:landingPage" + } + }, + + "references": { + "title": "Related Resources", + "description": "Related documents such as technical information about a dataset, developer documentation, etc.", + "type": "array", + "items": { "$ref": "#/definitions/BibliographicReference" }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "References", + "referenceProperty": "dc:references" + } + }, + + "theme": { + "title": "Category", + "description": "Main thematic category of the dataset.", + "notes": [ + "Could include ISO Topic Categories (http://www.isotopicmaps.org/)" + ], + "anyOf": [ + { + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "minItems": 1, + "uniqueItems": true + }, + { + "type": "null" + } + ] + }, + + "@id": { + "description": "A (primary) unique identifier for the resource", + "notes": [ + "It is expected that this field will contain a type of identifier that is of a uniform type across all resource descriptions in the system", + "This identifier should, by default, resolve to the metadata record (or some rendering of it) rather than to the resource itself (e.g. via its landing page)." + ], + "type": "string", + "minLength": 5, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Unique Identifier", + "referenceProperty": "dc:identifier" + } + }, + + "doi": { + "description": "A Digital Object Identifier (DOI) in non-resolving form.", + "notes": [ + "The DOI value should not have the resolver URI base, but should have the form 'doi:NNNNN/XXXXXX'", + "It is expected that this will primarily be Datacite DOIs" + ], + "type": "string", + "pattern": "^doi:[0-9]+\\.[0-9]+/.*$", + "minLength": 5, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Unique Identifier", + "referenceProperty": "dc:identifier" + } + }, + + "ediid": { + "description": "the NIST EDI identifier assigned to the resource", + "type": "string", + "minLength": 5, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "EDI Identifier", + "referenceProperty": "dc:identifier" + } + }, + + "abbrev": { + "description": "an abbreviated form of the resource's title", + "notes": [ + "this can be used as a label for a compact display or text for a link to this resource", + "this may be helpful for discovery when an abbreviation is included in a free-text search" + ], + "type": "array", + "items": { + "type": "string", + "maxLength": 24 + } + }, + + "@type": { + "description": "the linked-data class types for this resource", + "notes": [ + "The value must always be given as an array, even when only one type is provided.", + "Multiple values indicate that the Resource can be considered of multiple types", + "If this resource is not to be considered a subtype of the nrd:Resource, its value should be 'nrd:Resource'." + + ], + "type": "array", + "items": { "type": "string" } + } + + }, + + "required": [ + "title", "description", "landingPage", "publisher", + "contactPoint", "accessLevel" + ] + }, + + "FlexibleDate": { + "description": "a flexible format for a resource-related date", + "anyOf": [ + { + "type": "string", + "pattern": "^([\\+-]?\\d{4}(?!\\d{2}\\b))((-?)((0[1-9]|1[0-2])(\\3([12]\\d|0[1-9]|3[01]))?|W([0-4]\\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\\d|[12]\\d{2}|3([0-5]\\d|6[1-6])))([T\\s]((([01]\\d|2[0-3])((:?)[0-5]\\d)?|24\\:?00)([\\.,]\\d+(?!:))?)?(\\17[0-5]\\d([\\.,]\\d+)?)?([zZ]|([\\+-])([01]\\d|2[0-3]):?([0-5]\\d)?)?)?)?$" + }, + { + "type": "null" + } + ] + }, + + "RelatedResource": { + "description": "a resource that is related in some way to this resource", + "notes": [ + "This serves as a base type for various kinds of references." + ], + "type": "object", + + "properties": { + "@id": { + "description": "an identifier for the reference", + "notes": [ + "The most well-known PID used to identify the resource (see also notes for proxyFor", + "This identifier can be a relative URL which can be used in special cases (See the notes for BibliographicReference:@type for when this ID should not refer to the referenced work).", + "Though not required by this schema, providing an identifier for a component is critical for merging information from different sources.", + "A relative identifier requires that @base be set to the Resource identifier in the @context." + ], + "type": "string" + }, + "@type": { + "anyOf": [ + { + "type": "string", + "enum": [ + "deo:BibliographicReference", + "org:Organization" + ] + }, + { + "type": "array", + "items": { "type": "string" } + } + ], + "notes": [ + "The single string value option is provided for backward compatibility (and may be removed in future versions; see BibliographicReference). When this is used, the node represents a reference and not the thing being referenced; the @id contains a relative URI, representing this reference" + ] + }, + + + "title": { + "description": "the name of the resource being referenced", + "notes": [ + "This value is intended for display.", + "This can be, but is not required to be, the same title given in the metadata record describing the resource" + ], + "type": "string", + "minLength": 1 + }, + "proxyFor": { + "description": "a local identifier representing this resource", + "notes": [ + "This identifier is expected to point to an up-to-date description of the resource as known to the local system. The properties associated with that identifier may be different those given in the current record." + ], + "type": "string", + "format": "uri", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Current Person Information", + "referenceProperty": "ore:proxyFor" + } + }, + "location": { + "description": "the URL for accessing the resource", + "type": "string", + "format": "uri" + }, + + "label": { + "description": "a recommended label or title to display as the text for a link to the document", + "notes": [ + "This is intended to be briefer than a citation. It should be short enough to use as tag to an entry in reference list as inserted into a document's text" + ], + "type": "string" + }, + + "issued": { + "description": "The date the referenced resource was issued", + "notes": [ + "This is intended for determining relative newness--i.e. being newer or older than the referring resource." + ], + "$ref": "#/definitions/FlexibleDate" + }, + + "description": { + "description": "a brief, human-readable description of what this reference refers to and/or why it is being referenced.", + "type": "string" + } + + }, + + "dependencies": { + "proxyFor": { + "required": [ "@type" ] + } + } + }, + + "ResourceReference": { + "description": "a reference to another resource that may have an associated ID", + "notes": [ + "While providing a resType property is recommended, it is required if the proxyFor ID is given." + ], + "allOf": [ + { "$ref": "#/definitions/RelatedResource" }, + { + "required": [ "title" ] + } + ] + }, + + "VersionRelease": { + "description": "notes about a versioned release of the resource", + "allOf": [ + { "$ref": "#/definitions/RelatedResource" }, + { + "properties": { + "version": { + "title": "Version", + "description": "a string indicating a version of the release of this resource", + "notes": [ + "label could contain the same value" + ], + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Version", + "referenceProperty": "schema:version" + } + } + }, + "required": [ "version", "issued" ] + } + ] + }, + + "BibliographicReference": { + "description": "a reference to a creative work that provides information or data that is important to this resource.", + "notes": [ + "Recognized @type list values include:", + "npg:Document -- a work that is primarily meant to be human readable and could appropriately be identified with a CrossRef DOI", + "npg:Dataset -- a reference that could appropriately be identified with a DataCite DOI", + "npg:Article -- a work that is published in a book, journal, or other periodical", + "schema:Book -- a book, typically physically-bound.", + "deo:BibliographicReference -- a generic citable reference. This is considered a superclass of all reference types and the default type when the specific type cannot be determined.", + "npg:Document and npg:Dataset should be considered mutually exclusive and should not appear in the same list. npg:Article and schema:Book should be considered subclasses of npg:document", + "In the context of a NERDm Resource record, the @id would identify the reference to the document (i.e. as part of a reference list); use proxyFor to provide an ID for the document itself (e.g. a DOI; highly recommended)" + ], + "allOf": [ + { "$ref": "#/definitions/RelatedResource" }, + { + "properties": { + "citation": { + "description": "a full formated citation string for the reference, appropriate for inclusion in a bibliography", + "type": "string", + "asOntology": { + "@conxtext": "profile-schema-onto.json", + "prefLabel": "Cite as", + "referenceProperty": "dc:bibliographicCitation" + } + }, + "refType": { + "description": "the type of relationship that this document has with the resource", + "notes": [ + "This is equivalent to the Datacite relationType in that the term is a predicate that connects the resource as the subject to the referenced document as the object (e.g. resource IsDocumentedBy referenced-doc)", + "The DCiteReference type from Bibliographic extension sets DataCite terms as controlled vocabulary" + ], + "type": "string" + } + }, + "required": [ "@id", "@type" ] + } + ] + }, + + "Inventory": { + "description": "an inventory summary of the components that are part of a Resource", + "type": "array", + "minItems": 1, + "items": { "$ref": "#/definitions/CollectionInventory" } + }, + + "TypeInventory": { + "description": "a count of the number of components of a given type", + "notes": [ + "This type is intended for use in the context of a (named) collection of components wihtin a hierarchy of components. In the property documentation, the 'current collection' refers to that context." + ], + "type": "object", + "properties": { + "forType": { + "description": "the type of component", + "notes": [ + "This should be an abbreviated URI as used for @type values" + ], + "type": "string" + }, + "childCount": { + "description": "the number of components of this type that are a direct child of the current collection", + "type": "integer" + }, + "descCount": { + "description": "the number of components of this type that are anywhere below the current collection", + "type": "integer" + }, + "label": { + "description": "a recommended label for the type and count", + "type": "string" + } + }, + "required": [ "forType", "descCount" ] + }, + + "CollectionInventory": { + "description": "an inventory that breaks down numbers by the hierarchical collection, where this type describes a node in the hierarchy", + "notes": [ + "The idea here is to provide numbers for each subdirectory" + ], + "type": "object", + "properties": { + "forCollection": { + "description": "the name of the collection that this inventory applies to", + "notes": [ + "This is intended to be a /-delimited name that reflects the position of the subcollection in the hierarchy, as given in the Subcollection filepath field of the collection component.", + "The root collection should be refered to with an empty string" + ], + "type": "string" + }, + "childCount": { + "description": "the number of relevent components that are a direct child of the collection", + "type": "integer" + }, + "descCount": { + "description": "the number of relevent components that are anywhere below the collection", + "type": "integer" + }, + "byType": { + "description": "a breakdown of the inventory of components in this collection by type", + "type": "array", + "items": { "$ref": "#/definitions/TypeInventory" } + }, + "childCollections": { + "description": "an array listing the names of the subcollections that are direct children of the collection", + "notes": [ + "Each name should be the full filepath value that refleces the position of the subcollection in the hierarchy, relative to the root, as given in the Subcollection filepath field of the collection component. This makes it possible to select the subcollection's inventory (via forCollection) or its Subcollection component (via filepath)", + "The order of the listing should be considered significant" + ], + "type": "array", + "items": { "type": "string" } + } + }, + "required": [ "forCollection" ] + }, + + "Component": { + "description": "a description of a component of a resource", + "type": "object", + "notes": [ + "Include a $extensionSchema property to validate this description against a Component sub-type" + ], + "properties": { + "@id": { + "description": "a (relative) identifier for the distribution", + "notes": [ + "Though not required by this schema, providing an identifier for a component is critical for merging information from different sources.", + "A relative identifier requires that @base be set to the Resource identifier in the @context." + ], + "type": "string" + }, + "@type": { + "description": "the types of components that this component can be classified as", + "notes": [ + "the first value should be considered its primary type. This is usually a subtype of Component." + ], + "type": "array", + "items": { "type": "string" } + }, + "title": { + "description": "a descriptive title for the component", + "type": "string" + }, + "description": { + "description": "a description of the nature and contents of the component, including the role it plays as part of the resource", + "type": "string" + }, + "topic": { + "description": "Identified tags referring to things or concepts that this component addresses or speaks to", + "type": "array", + "items": { "$ref": "#/definitions/Topic" }, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Topic", + "referenceProperty": "foaf:topic" + } + }, + "conformsTo": { + "title": "Standard", + "description": "URI used to identify a standardized specification the component conforms to", + "anyOf": [ + { + "type": "string", + "format": "uri" + }, + { + "type": "null" + } + ], + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Data Standard", + "referenceProperty": "dc:conformsTo" + } + } + + } + }, + + "IncludedResource": { + "description": "A reference to another resource (which has its own record) that is a part of this resource", + "notes": [ + "Include 'nrd:IncludedResource' as a value to @type", + "The title should be (but is not required to be) the title provided in the included resource's metadata record." + ], + "allOf": [ + { "$ref": "#/definitions/Component" }, + { "$ref": "#/definitions/ResourceReference" }, + { + "properties": { + "@id": { + "description": "a relative identifier for this component", + "notes": [ + "@id should be the ID for the included resource as a component of this resource; use the required proxyFor property to give the native ID of the included resource." + ] + }, + "resourceType": { + "description": "the linked-data class types for the included resource", + "notes": [ + "The @type should contain values that are subclasses of Component; resourceType is provided to list the Resource types for the included resource.", + "Just like a Resource:@type, the value must always be given as an array, even when only one type is provided.", + "Multiple values indicate that the Resource can be considered of multiple types", + "If this resource is not to be considered a subtype of the nrd:Resource, its value should be 'nrd:Resource'." + ], + "type": "array", + "items": { "type": "string" } + } + }, + "required": [ "proxyFor", "resourceType" ] + } + ] + }, + + "ISO8601DateRange": { + "title": "Last Update", + "description": "a single date-time or a date-time range", + "anyOf": [ + { + "type": "string", + "pattern": "^([\\+-]?\\d{4}(?!\\d{2}\\b))((-?)((0[1-9]|1[0-2])(\\3([12]\\d|0[1-9]|3[01]))?|W([0-4]\\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\\d|[12]\\d{2}|3([0-5]\\d|6[1-6])))([T\\s]((([01]\\d|2[0-3])((:?)[0-5]\\d)?|24\\:?00)([\\.,]\\d+(?!:))?)?(\\17[0-5]\\d([\\.,]\\d+)?)?([zZ]|([\\+-])([01]\\d|2[0-3]):?([0-5]\\d)?)?)?)?$" + }, + { + "type": "string", + "pattern": "^(R\\d*\\/)?P(?:\\d+(?:\\.\\d+)?Y)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?W)?(?:\\d+(?:\\.\\d+)?D)?(?:T(?:\\d+(?:\\.\\d+)?H)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?S)?)?$" + }, + { + "type": "string", + "pattern": "^(R\\d*\\/)?([\\+-]?\\d{4}(?!\\d{2}\\b))((-?)((0[1-9]|1[0-2])(\\4([12]\\d|0[1-9]|3[01]))?|W([0-4]\\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\\d|[12]\\d{2}|3([0-5]\\d|6[1-6])))([T\\s]((([01]\\d|2[0-3])((:?)[0-5]\\d)?|24\\:?00)([\\.,]\\d+(?!:))?)?(\\18[0-5]\\d([\\.,]\\d+)?)?([zZ]|([\\+-])([01]\\d|2[0-3]):?([0-5]\\d)?)?)?)?(\\/)P(?:\\d+(?:\\.\\d+)?Y)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?W)?(?:\\d+(?:\\.\\d+)?D)?(?:T(?:\\d+(?:\\.\\d+)?H)?(?:\\d+(?:\\.\\d+)?M)?(?:\\d+(?:\\.\\d+)?S)?)?$" + } + ] + }, + + "Topic": { + "description": "a container for an identified concept term or proper thing used to describe a resource's subject matter", + "notes": [ + "A concept term refers to a subject or keyword term, like 'magnetism' while a proper thing is a particular instance of a concept that has a name, like the planet 'Saturn' or the person called 'Abraham Lincoln'", + "The meaning of concept is that given by the OWL ontology (owl:Concept); the meaning of thing is that given by the SKOS ontology (skos:Thing). See also the FOAF ontology." + ], + "type": "object", + "properties": { + "@type": { + "description": "a label indicating whether the value refers to a concept or a thing", + "type": "string", + "enum": [ "Concept", "Thing" ], + "valueDocumentation": { + "Concept": { + "description": "label indicating that the value refers to a concept (as in owl:Concept)" + }, + "Thing": { + "description": "label indicating that the value refers to a named person, place, or thing (as in skos:Thing)" + } + } + }, + + "scheme": { + "description": "a URI that identifies the controlled vocabulary, registry, or identifier system that the value is defined in.", + "type": "string", + "format": "uri", + "asOnotology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Schema", + "referenceProperty": "vold:vocabulary" + } + }, + + "@id": { + "description": "the unique identifier identifying the concept or thing", + "type": "string", + "format": "uri" + }, + + "tag": { + "description": "a short, display-able token that locally represents the concept or thing", + "notes": [ + "As a token, it is intended that applications can search for this value and find all resources that are talking about the same thing. Thus, regardless of whether the @id field is provided, all references to the same concept or thing should use the same tag value." + ], + "type": "string" + } + }, + "required": [ "@type", "tag" ] + }, + + "Organization": { + "description": "a named organization that may be part of a larger organization", + "type": "object", + "properties": { + "@type": { + "title": "Metadata Context", + "description": "IRI for the JSON-LD data type. This should be org:Organization for each publisher", + "type": "string", + "enum": [ "org:Organization" ] + }, + "name": { + "title": "Publisher Name", + "description": "The plain-text name of the organization", + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "referenceProperty": "skos:prefLabel" + } + }, + "subOrganizationOf": { + "description": "A parent organizational entity", + "$ref": "#/definitions/Organization", + "asOntology": { + "@context": "profile-schema-onto.json", + "referenceProperty": "org:subOrganizationOf" + } + } + }, + "required": [ "name" ] + }, + + "ContactInfo": { + "description": "Information describing various ways to contact an entity", + "notes": [ + ], + "properties": { + "@type": { + "type": "string", + "enum": [ "vcard:Contact" ] + }, + "fn": { + "title": "Contact Name", + "description": "full name of the contact person, role, or organization", + "type": "string", + "minLength": 1, + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Name", + "referenceProperty": "vcard:fn" + } + }, + + "hasEmail": { + "title": "Email", + "description": "The email address of the resource contact", + "type": "string", + "pattern": "^[\\w\\_\\~\\!\\$\\&\\'\\(\\)\\*\\+\\,\\;\\=\\:.-]+@[\\w.-]+\\.[\\w.-]+?$", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Email", + "referenceProperty": "vcard:hasEmail" + } + }, + + "postalAddress": { + "description": "the contact mailing address", + "notes": [ + ], + "$ref": "#/definitions/PostalAddress", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Address", + "referenceProperty": "vcard:hasAddress" + } + }, + + "phoneNumber": { + "description": "the contact telephone number", + "notes": [ "Complete international dialing codes should be given, e.g. '+1-410-338-1234'" ], + "type" : "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Phone Number", + "referenceProperty": "vcard:hasTelephone" + } + }, + + "timezone": { + "description": "the time zone where the contact typically operates", + "type" : "string", + "pattern": "^[-+][0-9]{4}$", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Contact Address", + "referenceProperty": "transit:timezone" + } + }, + + "proxyFor": { + "description": "a local identifier representing this person", + "notes": [ + "This identifier is expected to point to an up-to-date description of the person as known to the local system. The properties associated with that identifier may be different those given in the current record." + ], + "type": "string", + "format": "uri", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Current Person Information", + "referenceProperty": "ore:proxyFor" + } + } + + }, + "asOntology": { + "@context": "profile-schema-onto.json", + "@id": "pod:ContactPerson", + "@type": "owl:Class", + "prefLabel": "Contact Information", + "referenceClass": "vcard:Contact" + } + + }, + + "PostalAddress": { + "description": "a line-delimited listing of a postal address", + "type": "array", + "items": { "type": "string", "minLength": 1 }, + "asOntology": { + "@context": "profile-schema-onto.json", + "referenceProperty": "vcard:hasAddress" + } + }, + + "Identifier": { + "description": "a complete description of an identifier, including the scheme that it adheres to", + "title": "Identifier", + "properties": { + "scheme": { + "description": "a label indicating the system that the identifier adheres to", + "notes": [ + "this label may imply a particular resolver to use" + ], + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Identifier System", + "referenceProperty": "mads:idScheme" + } + }, + + "value": { + "description": "the value of the identifier", + "notes": [ + "if no scheme is provided, a URI form of the identifier should be given" + ], + "type": "string", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Identifier System", + "referenceProperty": "mads:idValue" + } + } + + }, + "required": [ "value" ], + "asOntology": { + "@context": "profile-schema-onto.json", + "@id": "mads:Identifier", + "@type": "owl:Class", + "prefLabel": "Identifier information", + "referenceClass": "mads:Identifier" + } + } + }, + + "$ref": "#/definitions/Resource" +} diff --git a/model/nerdm-schema.json b/model/nerdm-schema.json index e83c403..e4ba5cf 100644 --- a/model/nerdm-schema.json +++ b/model/nerdm-schema.json @@ -1,7 +1,7 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], - "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.6#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#", "rev": "wd1", "title": "The JSON Schema for the NIST Extended Resource Data model (NERDm)", "description": "A JSON Schema specfying the core NERDm classes", @@ -164,10 +164,54 @@ } }, + "annotated": { + "title": "Last Annotation Date", + "description": "Date of the last minor update to metadata or other ancillary information that (by itself) did not affect the main content of the resource", + "notes": [ + "This will always get updated for both minor and major changes; that is, when the revision date changes, this date will change as well." + ], + "$ref": "#/definitions/FlexibleDate", + "asOntology": { + "@context": "profile-schema-onto.json", + "prefLabel": "Release Date", + "range": "xsd:date" + } + }, + + "status": { + "description": "A label indicating this resource's release and availability status", + "type": "string", + "notes": [ + "This value is meant to flag a resource as perhaps unavailable--temporarily or permanently--with some indication as to why; further explanation could be amended to the resource description.", + "This value is intended to be orthogonal to the accessLevel property.", + "See ReleaseStatus value documentation (from the nerdm-schema/rls schema) for the full set of values recognized by the NIST PDR. If this property is not provided, the assumed value should 'available'" + ], + "valueDocumentation": { + "available": { + "description": "the resource is currently available via its landing page and the links therein as well as via its component links" + }, + "unavailable": { + "description": "the resource is temporarily unavailable, usuallly for known reasons (other than being in an embargoed state)", + "notes": [ + "This might be set if the server that provides the resource is going to be down for an extended period of time" + ] + }, + "missing": { + "description": "the resource is unavailable for unknown reasons", + "notes": [ + "The publisher may apply the landing page and/or component links are unresponsive for an extended period of time." + ] + }, + "removed": { + "description": "the resource was unpublished--that is, intentionally and permanently made unavailable, usually to prevent its use" + } + } + }, + "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields, name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the format below." ], "$ref": "#/definitions/Organization", "asOntology": { @@ -310,7 +354,7 @@ "title": "Collections", "description": "The collections that this dataset formally belongs to", "notes": [ - "This property indicates relation of different resources" + "This property indicates a type of relationship between this and other resources" ], "type": "array", "items": { "$ref": "#/definitions/ResourceReference" }, @@ -465,10 +509,7 @@ }, - "required": [ - "title", "description", "landingPage", "publisher", - "contactPoint", "accessLevel" - ] + "required": [ "title", "contactPoint" ] }, "FlexibleDate": { @@ -485,9 +526,11 @@ }, "RelatedResource": { - "description": "a resource that is related in some way to this resource", + "description": "a resource that is related in some way to a subject resource", "notes": [ - "This serves as a base type for various kinds of references." + "This serves as a base type for various kinds of references to other resources. When used as a property value, the property defines a relationship between a subject resource and the resource described by this type", + "The object resource is identified by @id (and possibly by proxyFor); the other properties can be provided as a way to provide a view of the related resource without resolving the identifier. The values of the other properties are typically copies from the refered resource's metadata; however, it is not guaranteed. In general, the view of this resource provided by these properties represent how the resource should be perceived as part of its relationship with the subject reosurce.", + "When the proxyFor property is used, then this related resource description is specifically about its relationship to the subject resource. The proxyFor value should contain an identifier that is commonly known by outside of the relationship; the @id value, then, refers to the related resource in the context of the relationship. In the example of a bibliographic reference, the proxyFor property would contain the DOI of the paper or other work, while @id represents its entry in a references list for the subject resource." ], "type": "object", @@ -531,17 +574,30 @@ "type": "string", "minLength": 1 }, + "abbrev": { + "description": "an abbreviated form of the resource's title", + "notes": [ + "this can be used as a label for a compact display or text for a link to this resource", + "this may be helpful for discovery when an abbreviation is included in a free-text search" + ], + "type": "array", + "items": { + "type": "string", + "maxLength": 24 + } + }, "proxyFor": { - "description": "a local identifier representing this resource", + "description": "A commonly-known identifier for the resource independent of its relationship to the subject resource", "notes": [ - "This identifier is expected to point to an up-to-date description of the resource as known to the local system. The properties associated with that identifier may be different those given in the current record." + "This property should be used when the @id is meant to identify the related resource in the context of its relationship (e.g. a paper appearing in a reference list).", + "There are no lexical constraints on what the identifier looks like; this allows for non-URI identifiers." ], "type": "string", "format": "uri", "asOntology": { "@context": "profile-schema-onto.json", - "prefLabel": "Current Person Information", - "referenceProperty": "ore:proxyFor" + "prefLabel": "Identified locally as", + "referenceProperty": "ctlog:localIdentifier" } }, "location": { @@ -593,31 +649,6 @@ ] }, - "VersionRelease": { - "description": "notes about a versioned release of the resource", - "allOf": [ - { "$ref": "#/definitions/RelatedResource" }, - { - "properties": { - "version": { - "title": "Version", - "description": "a string indicating a version of the release of this resource", - "notes": [ - "label could contain the same value" - ], - "type": "string", - "asOntology": { - "@context": "profile-schema-onto.json", - "prefLabel": "Version", - "referenceProperty": "schema:version" - } - } - }, - "required": [ "version", "issued" ] - } - ] - }, - "BibliographicReference": { "description": "a reference to a creative work that provides information or data that is important to this resource.", "notes": [ diff --git a/model/nerdm-sip-schema.json b/model/nerdm-sip-schema.json new file mode 100644 index 0000000..fc8731f --- /dev/null +++ b/model/nerdm-sip-schema.json @@ -0,0 +1,35 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/nerdm-schema/sip/v0.1#", + "rev": "wd1", + "title": "The NERDm extension metadata for describing resource publication submissions", + "description": "This extension schema defines resources that are being submitted for publication (i.e. as part of a Submission Information Package (SIP). In this case, the metadata requirements reflect this pre-publication status which generally will be lighter than those for a published resource.", + "definitions": { + "ResourceSubmission": { + "description": "a Resource being submitted for inclusion in a repository or registry", + "notes": [ + "The base Resource type requires properties, title and contactPoint" + ], + "allOf": [ + { "$ref": "https://data.nist.gov/od/dm/nerdm-schema/v0.7#/definitions/Resource" }, + { + "required": [ "description" ] + } + ] + }, + + "PDRSubmission": { + "description": "a Resource being submitted for inclusion in a repository or registry", + "notes": [ + "The base types require the following properties: title, description, and contactPoint" + ], + "allOf": [ + { "$ref": "#/definitions/ResourceSubmission" }, + { + "required": [ "accessLevel" ] + } + ] + } + } +} diff --git a/model/pod-relaxed-schema.json b/model/pod-relaxed-schema.json index 07dfc60..bf3b6b6 100644 --- a/model/pod-relaxed-schema.json +++ b/model/pod-relaxed-schema.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/pod-schema/relaxed-1.1#", "title": "US Project Open Data Schema as Extended JSON Schema", "description": "This JSON Schema expresses the POD schema (v1.1) so that instances can be validated via a JSON Schema validater", @@ -116,7 +116,7 @@ "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." ], "$ref": "#/definitions/Organization", "asOntology": { @@ -127,9 +127,9 @@ }, "contactPoint": { - "description": "Contact person’s name and email for the asset", + "description": "Contact person's name and email for the asset", "notes": [ - "This is a container for two fields that together make up the contact information for the dataset. contactPoint should always contain both the person’s appropriately formatted full name (fn) and email (hasEmail)." + "This is a container for two fields that together make up the contact information for the dataset. contactPoint should always contain both the person's appropriately formatted full name (fn) and email (hasEmail)." ], "$ref": "#/definitions/ContactPoint", "asOntology": { @@ -142,7 +142,7 @@ "identifier": { "description": "A unique identifier for the dataset or API as maintained within an Agency catalog or database", "notes": [ - "This field allows third parties to maintain a consistent record for datasets even if title or URLs are updated. Agencies may integrate an existing system for maintaining unique identifiers. Each identifier must be unique across the agency’s catalog and remain fixed. It is highly recommended that a URI (preferably an HTTP URL) be used to provide a globally unique identifier. Identifier URLs should be designed and maintained to persist indefinitely regardless of whether the URL of the resource itself changes." + "This field allows third parties to maintain a consistent record for datasets even if title or URLs are updated. Agencies may integrate an existing system for maintaining unique identifiers. Each identifier must be unique across the agency's catalog and remain fixed. It is highly recommended that a URI (preferably an HTTP URL) be used to provide a globally unique identifier. Identifier URLs should be designed and maintained to persist indefinitely regardless of whether the URL of the resource itself changes." ], "type": "string", "minLength": 1, @@ -626,7 +626,7 @@ }, "ContactPoint": { - "description": "Contact person’s name and email for the asset", + "description": "Contact person's name and email for the asset", "type": "object", "properties": { "@type": { diff --git a/model/pod-schema.json b/model/pod-schema.json index e8bc71d..9413034 100644 --- a/model/pod-schema.json +++ b/model/pod-schema.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], "id": "https://data.nist.gov/od/dm/pod-schema/v1.1#", "title": "US Project Open Data Schema as Extended JSON Schema", "description": "This JSON Schema expresses the POD schema (v1.1) so that instances can be validated via a JSON Schema validater", @@ -107,7 +107,7 @@ "publisher": { "description": "The publishing entity and optionally their parent organization(s).", "notes": [ - "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization’s hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." + "This is a container for a publisher object which groups together the fields: name and subOrganization. The subOrganization field can also contain a publisher object which allows one to describe an organization's hierarchy. Where greater specificity is desired, include as many levels of publisher as is useful, in ascending order, using the below format." ], "$ref": "#/definitions/Organization", "asOntology": { @@ -118,9 +118,9 @@ }, "contactPoint": { - "description": "Contact person’s name and email for the asset", + "description": "Contact person's name and email for the asset", "notes": [ - "This is a container for two fields that together make up the contact information for the dataset. contactPoint should always contain both the person’s appropriately formatted full name (fn) and email (hasEmail)." + "This is a container for two fields that together make up the contact information for the dataset. contactPoint should always contain both the person's appropriately formatted full name (fn) and email (hasEmail)." ], "$ref": "#/definitions/ContactPoint", "asOntology": { @@ -133,7 +133,7 @@ "identifier": { "description": "A unique identifier for the dataset or API as maintained within an Agency catalog or database", "notes": [ - "This field allows third parties to maintain a consistent record for datasets even if title or URLs are updated. Agencies may integrate an existing system for maintaining unique identifiers. Each identifier must be unique across the agency’s catalog and remain fixed. It is highly recommended that a URI (preferably an HTTP URL) be used to provide a globally unique identifier. Identifier URLs should be designed and maintained to persist indefinitely regardless of whether the URL of the resource itself changes." + "This field allows third parties to maintain a consistent record for datasets even if title or URLs are updated. Agencies may integrate an existing system for maintaining unique identifiers. Each identifier must be unique across the agency's catalog and remain fixed. It is highly recommended that a URI (preferably an HTTP URL) be used to provide a globally unique identifier. Identifier URLs should be designed and maintained to persist indefinitely regardless of whether the URL of the resource itself changes." ], "type": "string", "minLength": 1, @@ -628,7 +628,7 @@ }, "ContactPoint": { - "description": "Contact person’s name and email for the asset", + "description": "Contact person's name and email for the asset", "type": "object", "properties": { "@type": { diff --git a/model/scrap-schema.json b/model/scrap-schema.json index 8334d31..76ea3eb 100644 --- a/model/scrap-schema.json +++ b/model/scrap-schema.json @@ -84,7 +84,7 @@ "mediaType": { "title": "Media Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the distribution’s downloadURL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the distribution's downloadURL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", @@ -220,7 +220,7 @@ "describedByType": { "title": "Data Dictionary Type", - "description": "The machine-readable file format (IANA Media Type or MIME Type) of the distribution’s describedBy URL", + "description": "The machine-readable file format (IANA Media Type or MIME Type) of the distribution's describedBy URL", "anyOf": [ { "pattern": "^[-\\w]+/[-\\w]+(\\.[-\\w]+)*([+][-\\w]+)?$", diff --git a/model/simple-taxonomy-schema.json b/model/simple-taxonomy-schema.json index 260f67b..56e0192 100644 --- a/model/simple-taxonomy-schema.json +++ b/model/simple-taxonomy-schema.json @@ -1,7 +1,7 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$extensionSchemas": ["https://www.nist.gov/od/dm/enhanced-json-schema/v0.1#"], - "id": "https://www.nist.gov/od/dm/simple-taxonomy/v1.0", + "$extensionSchemas": ["https://data.nist.gov/od/dm/enhanced-json-schema/v0.1#"], + "id": "https://data.nist.gov/od/dm/simple-taxonomy/v1.0", "rev": "1", "tile": "Simple Taxonomy", "description": "A simple encoding of terms from a hierarchical taxonomy", diff --git a/python/nistoar/base/__init__.py b/python/nistoar/base/__init__.py new file mode 100644 index 0000000..018c7c5 --- /dev/null +++ b/python/nistoar/base/__init__.py @@ -0,0 +1,134 @@ +""" +Some common classes and functions useful across the nistoar namespace +""" +import logging + +class SystemInfoMixin(object): + """ + an interface for accessing and using information about a particular OAR system a component + is a part of. This class can be used either as a mixin parent class or as the class of + an internal class variable. + """ + + # the currently set global system; see get_global_system() below + __globalsys = None + + def __init__(self, sysname: str, sysabbrev: str, subsname: str, subsabbrev: str, version: str): + self._sysn = sysname + self._abbrev = sysabbrev + self._subsys = subsname + self._subabbrev = subsabbrev + self._ver = version + + @property + def system_name(self): + return self._sysn + + @property + def system_abbrev(self): + return self._abbrev + + @property + def subsystem_name(self): + return self._subsys + + @property + def subsystem_abbrev(self): + return self._subabbrev + + @property + def system_version(self): + return self._ver + + def getSysLogger(self): + """ + return the default logger for this subsystem + """ + out = logging.getLogger(self.system_abbrev) + if self.subsystem_abbrev: + out = out.getChild(self.subsystem_abbrev) + return out + + @classmethod + def get_global_system(cls): + """ + return the system instance currently set as the global system. This instance is intended to + be set at __main__/script level when the script is intended to operate as part of a single + system. Some class constructors may consult this to determine which system the instance should + be considered part of. If None, no system has been so set. + """ + return cls.__globalsys + + def make_global(self): + """ + register this system instance as the global system. This method is intended to be called at + the __main__/script level to indicate that the script is operating as part of this system. + There can only be one global system registered, so if this method has been called on any other + system object, the previous one is replaced. + """ + self.__class__.__globalsys = self + +def get_global_system() -> SystemInfoMixin: + """ + return the SystemInfoMixin instance currently set as the global system. This is a convenience + function for ``SystemInfoMixin.get_global_system()``. + """ + return SystemInfoMixin.get_global_system() + + +class OARException(Exception): + """ + a common base class for all OAR-related exceptions + """ + + def __init__(self, message=None, cause=None, sys=None): + """ + instantiate the exception + :param str message: the description of and reason for the exception + :param Exception cause: the intercepted exception that is the underlying cause for this + exception. None (default) indicates there is no other underlying + cause beyond what is provided by the message. + """ + if not message: + if cause: + message = str(cause) + else: + message = "Unknown OAR exception occurred" + if not sys: + sys = SystemInfoMixin.get_global_system() + self.system = sys + super(OARException, self).__init__(message) + self.__cause__ = cause + + @property + def cause(self): + """ + return the exception instance that was set as the underlying cause for this exception. This + returns __cause__ and is provide for backward compatibility with legacy PDR code. + """ + return self.__cause__ + +class OARWarning(Warning): + """ + a common base class for all OAR-related warnings + """ + + def __init__(self, message=None, cause=None, sys=None): + """ + instantiate the exception + :param str message: the description of and reason for the exception + :param Exception cause: the intercepted exception that is the underlying cause for this + exception. None (default) indicates there is no other underlying + cause beyond what is provided by the message. + """ + if not message: + if cause: + message = str(cause) + else: + message = "Unknown OAR exception occurred" + self.cause = cause + if not sys: + sys = SystemInfoMixin.get_global_system() + self.system = sys + super(OARException, self).__init__(message) + diff --git a/python/nistoar/rmm/config.py b/python/nistoar/base/config.py similarity index 51% rename from python/nistoar/rmm/config.py rename to python/nistoar/base/config.py index 4d2e43e..3221e98 100644 --- a/python/nistoar/rmm/config.py +++ b/python/nistoar/base/config.py @@ -1,12 +1,13 @@ """ -Utilities for obtaining a configuration for RMM services +Utilities for obtaining configuration data for services """ -from __future__ import print_function -import os, sys, logging, json, yaml, re, collections, time -from urlparse import urlparse, urlunparse + +import os, sys, logging, json, yaml, time, re import requests +from collections.abc import Mapping +from urllib.parse import urlparse -from .exceptions import ConfigurationException +from . import OARException oar_home = None try: @@ -16,71 +17,63 @@ pass if not oar_home: - oar_home = os.environ.get('OAR_HOME', '/app/oar') + oar_home = os.environ.get('OAR_HOME', '/app/pdr') -urlre = re.compile(r'\w+:') +class ConfigurationException(OARException): + """ + a class indicating an error in the configuration of the PDR system + """ + def __init__(self, msg=None, cause=None, sys=None): + """ + create the exception + :param str message: the description of and reason for the exception + :param Exception cause: the intercepted exception that is the underlying cause for this + exception. None (default) indicates there is no other underlying + cause beyond what is provided by the message. + """ + if not msg and not cause: + msg = "Unknown Configuration Error" + super(ConfigurationException, self).__init__(msg, cause, sys) -def resolve_configuration(location, baseurl=None): +def resolve_configuration(location: str) -> Mapping: """ - return a dictionary for configuring a system component. The absolute - location of the configuration will depend on the combination of the - two input parameters, location and baseurl: - :*: if location is in the form of a full URL (including starting with - file:), location is considered solely as the location of the - configuration data; baseurl is ignored. - :*: if location is in the form of a path (absolute or relative) and - baseurl is not given, location is interpreted as a file path on - the local filesystem. - :*: if both location and baseurl are given, they are combined according - to the rules for combining base URLs and relative URLs: if location - is an absolute path, any path component provided with baseurl will - be dropped before they are combined. - :*: baseurl can be set to "file:" to force the interpretation of location - as a filesystem path. baseurl cannot contain a server name. - - :param location str: the location of the configuration data, either as a - relative path, an absolute path, or a URL. - :param baseurl str: a base URL to combine with location to form the - absolute location of the configuration data. + return a dictionary for configuring the metadata service. + + :param str location: a filename, file path, or URL where the configuration + can be found. """ - if not isinstance(location, (str, unicode)): - raise TypeError("resolve_configuration(): location is not a string") if not location: - raise ValueError("resolve_configuration(): location not provided") - - locurl = urlparse(location) - if not locurl.scheme: - if baseurl: - if not isinstance(baseurl, (str, unicode)): - raise TypeError("resolve_configuration(): baseurl is not a string") - baseurl = list(urlparse(baseurl)) + raise ValueError("resolve_configration(): location arg not provided") + + if location.startswith('file:') or ':' not in location: + # From a file in the filesystem + if location.startswith('file://'): + location = location[len('file://'):] + elif location.startswith('file:'): + location = location[len('file:'):] + + if not location.startswith('/') and os.path.isabs(location): + cfgfile = os.path.join(oar_home, 'etc', 'config', location) + if not os.path.exists(cfgfile): + raise ConfigurationException("Config file not found: " + + cfgfile) else: - # default location of config files: "$OAR_HOME/etc/config/" - baseurl=list(urlparse("file:"+os.path.join(oar_home,"etc","config"))) + cfgfile = location + return load_from_file(cfgfile) - # combine baseurl and location - if locurl.path.startswith('/'): - baseurl[2] = locurl.path - else: - baseurl[2] = '/'.join([ baseurl[2].rstrip('/'), locurl.path ]) - baseurl[3] = locurl.query - baseurl[4] = locurl.fragment - locurl = urlparse(urlunparse(baseurl)) - - if locurl.scheme == 'file': - if locurl.netloc: - raise ValueError("resolve_configuration(): server name not allowed "+ - "in file URL: " + locurl.netloc) - return load_from_file(locurl.path) - - elif locurl.scheme in "http https": - return load_from_url(urlunparse(locurl)) + if location.startswith('configserver:'): + # retrieve from a configuration service + return load_from_service(location[len('configserver:'):]) + + if ':' in location: + # simple URL; do not feed a configuration service URL through this + # as the response will be not parsed correctly + raise NotImplementedError() - else: - raise ValueError("resolve_configuration(): unsupported URL scheme:" + - locurl.scheme) + raise ConfigurationException("Config file location could not be "+ + "interpreted: " + location) -def load_from_file(configfile): +def load_from_file(configfile: str) -> Mapping: """ read the configuration from the given file and return it as a dictionary. The file name extension is used to determine its format (with YAML as the @@ -91,116 +84,128 @@ def load_from_file(configfile): return json.load(fd) else: # YAML format - return yaml.load(fd) - -def load_from_url(configurl): - """ - read the configuration from the configuration server - - :param configurl str: the URL for retrieving the configuration - """ - try: - resp = requests.get(configurl) - if resp.status_code >= 400: - raise ConfigurationException( - "Server returned erroneous response: {0} {1}" - .format(resp.status_code, resp.reason)) - - ct = resp.headers.get('content-type','') - if '/yaml' in ct: - # it's in YAML format - fmt = 'YAML' - data = yaml.loads(resp.text) - elif ct or '/json' in ct: - # response is in JSON format by default - fmt = 'JSON' - data = resp.json() - - out = data - if 'propertySources' in data: - # this data is from the configuration server - out = ConfigService.extract(data, flat=True) - - return out - - except ValueError, ex: - raise ConfigurationException("Failed to parse %s data from URL". - format(fmt), cause=ex) - except requests.RequestException, ex: - raise ConfigurationException("Failed to pull configuration from URL: " + - str(ex), cause=ex) - + return yaml.safe_load(fd) LOG_FORMAT = "%(asctime)s %(name)s %(levelname)s: %(message)s" _log_handler = None - -def configure_log(logfile=None, level=None, format=None, config=None, +global_logdir = None # this is set when configure_log() is run +global_logfile = None # this is set when configure_log() is run +_log_levels_byname = { + "NOTSET": logging.NOTSET, + "DEBUG": logging.DEBUG, + "NORM": 15, + "NORMAL": 15, + "INFO": logging.INFO, + "WARN": logging.WARNING, + "WARNING": logging.WARNING, + "ERROR": logging.ERROR, + "CRITICAL": logging.CRITICAL +} +NORMAL = _log_levels_byname["NORMAL"] + +def configure_log(logfile: str=None, level: int=None, format: str=None, config: Mapping=None, addstderr=False): """ configure the log file, setting the output file, threshold, and format as necessary. These can be provided explicitly or provided via the configuration; the former takes precedence. - :param logfile str: the path to the output logfile. If given as a relative + If this is called a second time, it will first close the previously opened logfile, + reconfigure the logging to given inputs. + + :param str logfile: the path to the output logfile. If given as a relative path, it will be assumed that it is relative to a configured log directory. - :param level int: the logging threshold to set for sending messages to + :param int level: the logging threshold to set for sending messages to the logfile. - :param format str: the formatting string to configure the logfile with - :param config dict: a configuration dictionary to draw logging configuration + :param str format: the formatting string to configure the logfile with + :param dict config: a configuration dictionary to draw logging configuration values from. - :param addstderr bool: If True, send ERROR and more severe messages to - the standard error stream (default: False). + :param addstderr: If True, send ERROR and more severe messages + to the standard error stream (default: False). If + provided as a str, it is the formatting string for + messages sent to standard error. + :type addstderr: bool or str """ + global global_logdir + global global_logfile if not config: config = {} if not logfile: - logfile = config.get('logfile', 'rmm.log') + logfile = config.get('logfile', 'pdr.log') if not os.path.isabs(logfile): # The log directory can be set either from the configuration or via # the OAR_LOG_DIR environment variable; the former takes precedence deflogdir = os.path.join(oar_home,'var','logs') - logdir = config.get('logdir', os.environ.get('OAR_LOG_DIR', deflogdir)) - if not os.path.exists(logdir): - logdir = "/tmp" + logdir = config.get('logdir', determine_default_logdir()) + global_logdir = logdir logfile = os.path.join(logdir, logfile) + if not os.path.exists(os.path.dirname(logfile)): + os.makedirs(os.path.dirname(logfile)) + global_logfile = logfile if level is None: - level = logging.DEBUG + level = config.get('loglevel', logging.DEBUG) + if not isinstance(level, int): + level = _log_levels_byname.get(str(level), level) + if not isinstance(level, int): + raise ConfigurationException("Unrecognized loglevel value: "+str(level)) + if not format: - format = LOG_FORMAT + format = config.get('logformat', LOG_FORMAT) frmtr = logging.Formatter(format) global _log_handler + rootlogger = logging.getLogger() + if _log_handler: + rootlogger.removeHandler(_log_handler) + if hasattr(_log_handler, 'close'): + _log_handler.close() + _log_handler = None _log_handler = logging.FileHandler(logfile) _log_handler.setLevel(level) _log_handler.setFormatter(frmtr) - rootlogger = logging.getLogger() rootlogger.addHandler(_log_handler) - rootlogger.setLevel(logging.DEBUG) + rootlogger.setLevel(logging.DEBUG-1) + + # jsonmerge is way too chatty at the DEBUG level + if level >= logging.DEBUG: + jmlevel = max(level, logging.INFO) + logging.getLogger("jsonmerge").setLevel(jmlevel) + + # filelock is one level too chatty + if level >= logging.DEBUG: + logging.getLogger("filelock").setLevel(level+10) if addstderr: + if not isinstance(addstderr, str): + addstderr = format handler = logging.StreamHandler(sys.stderr) handler.setLevel(logging.ERROR) - handler.setFormatter(logging.Formatter(format)) + handler.setFormatter(logging.Formatter(addstderr)) rootlogger.addHandler(handler) rootlogger.error("FYI: Writing log messages to %s",logfile) - +def determine_default_logdir(): + out = os.environ.get('OAR_LOG_DIR', os.path.join(oar_home, 'var', 'logs')) + if not os.path.exists(out): + out = "/tmp" + return out + class ConfigService(object): """ an interface to the configuration service """ - def __init__(self, urlbase, envprof=None): + def __init__(self, urlbase: str, envprof: str=None): """ initialize the service. - :param urlbase str: the base URL for the service which must include + :param str urlbase: the base URL for the service which must include the scheme (either http: or https:), the server, and the base path. It can also include a port number. - :param envprof str: the label indicating the default environment + :param str envprof: the label indicating the default environment profile (usually, one of 'local', 'dev', 'test', or 'prod'). """ @@ -216,16 +221,16 @@ def __init__(self, urlbase, envprof=None): if not u.netloc: raise ConfigurationException(msg.format("missing server name")) - def url_for(self, component, envprof=None): + def url_for(self, component: str, envprof: str=None) -> str: """ return the proper URL for access the configuration for a given component. - :param component the name for the service or component that - configuration data is desired for - :param envprof the desired version of the configuration given - its environment/profile name. If not provided, - the profile set at construction time will - be assumed. + :param str component: the name for the service or component that + configuration data is desired for + :param str envprof: the desired version of the configuration given + its environment/profile name. If not provided, + the profile set at construction time will + be assumed. """ if not envprof: envprof = self._prof @@ -234,7 +239,7 @@ def url_for(self, component, envprof=None): return self._base + component - def is_up(self): + def is_up(self) -> bool: """ return true if the service appears to be up. """ @@ -244,15 +249,15 @@ def is_up(self): except requests.exceptions.RequestException: return False - def wait_until_up(self, timeout=10, rais=True, verboseout=None): + def wait_until_up(self, timeout: int=10, rais: bool=True, verboseout=None) -> bool: """ poll the service until responds. - :param timeout int: the maximum number of seconds to wait before + :param int timeout: the maximum number of seconds to wait before timing out. - :param rais bool: if True, raise a ConfifigurationException if + :param bool rais: if True, raise a ConfifigurationException if the timeout period is reached without a response from the service. - :param verboseout file: a file stream to send message about waiting; + :param file verboseout: a file stream to send message about waiting; if None, no messages are printed. :return bool: True if the service is detected as up; False, if the timeout period is exceeded (unless rais=True). @@ -262,46 +267,46 @@ def wait_until_up(self, timeout=10, rais=True, verboseout=None): start = time.time() if self.is_up(): if verboseout: - print("RMM: configuration service is ready", file=verboseout) + print("PDR: configuration service is ready", file=verboseout) return True if verboseout: - print("RMM: Waiting for configuration service...", file=verboseout) + print("PDR: Waiting for configuration service...", file=verboseout) updated = start while time.time()-start < timeout: if verboseout and time.time()-updated > 10: - print("RMM: ...waiting...") + print("PDR: ...waiting...") updated = time.time() time.sleep(2) if self.is_up(): if verboseout: - print("RMM: ...ready", file=verboseout) + print("PDR: ...ready", file=verboseout) return True if verboseout: - print("RMM: ...timed out!") + print("PDR: ...timed out!") if rais: raise ConfigurationException("Waiting for configuration service "+ "timed out") return False - def get(self, component, envprof=None, flat=False): + def get(self, component: str, envprof: str=None, flat: bool=False): """ retrieve the configuration for the service or component with the given name. Internally, this will transform the raw output from the service into a configuration ready to give to the PDR component (including combining the profile specializations with default values). - :param component str: the name for the service or component that + :param str component: the name for the service or component that configuration data is desired for - :param envprof str: the desired version of the configuration given + :param str envprof: the desired version of the configuration given its environment/profile name. If not provided, the profile set at construction time will be assumed. - :param flat bool: if true, keep the flat structure provided directly + :param bool flat: if true, keep the flat structure provided directly by the config server. :return dict: the parsed configuration data """ @@ -320,8 +325,8 @@ def _extract(self, rawdata, comp="unknown", flat=False): @classmethod def _deep_update(cls, defdict, upddict): - for k, v in upddict.iteritems(): - if isinstance(v, collections.Mapping): + for k, v in upddict.items(): + if isinstance(v, Mapping): defdict[k] = cls._deep_update(defdict.get(k, v.__class__()), v) else: defdict[k] = v @@ -340,7 +345,7 @@ def _inflate(cls, flat): if len(levs) == 0: pv[lev] = flat[key] else: - if not isinstance(pv.get(lev), collections.Mapping): + if not isinstance(pv.get(lev), Mapping): pv[lev] = flat.__class__() pv = pv[lev] @@ -348,14 +353,14 @@ def _inflate(cls, flat): @classmethod def _cvtarrays(cls, md): - if not isinstance(md, collections.Mapping): + if not isinstance(md, Mapping): return md - keys = md.keys() + keys = list(md.keys()) m = [cls._idxre.match(k) for k in keys] if all(m): ary = [( int(m[i].group(1)), md[keys[i]] ) for i in range(len(m))] - ary.sort(lambda x,y: cmp(x[0], y[0])) + ary.sort(key=lambda x: x[0]) return [ cls._cvtarrays(el[1]) for el in ary ] else: for k in keys: @@ -363,7 +368,7 @@ def _cvtarrays(cls, md): return md @classmethod - def extract(cls, rawdata, comp="unknown", flat=False): + def extract(cls, rawdata: Mapping, comp: str="unknown", flat: bool=False) -> Mapping: """ extract component configuration from the config service response. This includes combining the environment/profile-specific data @@ -372,7 +377,7 @@ def extract(cls, rawdata, comp="unknown", flat=False): try: name = rawdata.get('name') or comp vers = rawdata['propertySources'] - except KeyError, ex: + except KeyError as ex: raise ConfigurationException("Missing config param for label="+name+ ": "+str(ex)) if not isinstance(vers, list): @@ -420,4 +425,54 @@ def from_env(cls): service = ConfigService.from_env() except: pass - + +def merge_config(primary: Mapping, defconf: Mapping) -> Mapping: + """ + do a deep merge of a default configuration dictionary into a primary one + provided to an application. A value in the primary dictionary will override + those given in the default. This function may update one of the input + dictionaries. + + :param Mapping primary: the configuration dictionary provided to the + application at run-time. + :param Mapping defconf: the default configuration dictionary + :return Mapping: the merged dictionary. This may be one of the input + instances updated as needed (i.e. not a copy) + """ + for key in primary: + if isinstance(primary[key], Mapping) and \ + isinstance(defconf.get(key), Mapping): + defconf[key] = merge_config(primary[key], defconf[key]) + else: + defconf[key] = primary[key] + + return defconf + +def load_from_service(handle: str) -> Mapping: + """ + retrieve the metadata server's configuration from the configuration server. + The handle identifies what configuration to pull from the service. If not + fully specified, defaults are determined by lookup_config_server(), called + internally. The handle has the form: + + [[http:|https:]//server[:port]/]component/env + + where, + [http:|https:] is the web URL scheme to use, either unencrypted or + encrypted (optional) + server is the host name of the configuration server + part is the port to access the server by + component the name of component registered with the service to + retrieve the configuration for + env the environemnt (local|dev|test|prod) to retrieve the + configuration for. + """ + raise NotImplementedError() + +def lookup_config_server(serverport): + """ + consult the discovery service to get the location of the configuration + service. + """ + raise NotImplementedError() + diff --git a/python/nistoar/doi/datacite.py b/python/nistoar/doi/datacite.py index 70a25d4..358cb51 100644 --- a/python/nistoar/doi/datacite.py +++ b/python/nistoar/doi/datacite.py @@ -9,7 +9,7 @@ import re from collections import OrderedDict, Mapping from copy import deepcopy -from StringIO import StringIO +from io import StringIO import requests from .utils import strip_DOI, is_DOI @@ -367,7 +367,7 @@ def _format_error(self, error): out.write(error.get('source','(data)')) if 'detail' in error and error['detail']: out.write(": ") - out.write(error['detail']) + out.write(str(error['detail'])) return out.getvalue() def explain(self): diff --git a/python/nistoar/doi/resolving/crossref.py b/python/nistoar/doi/resolving/crossref.py index 5af1503..4aae223 100644 --- a/python/nistoar/doi/resolving/crossref.py +++ b/python/nistoar/doi/resolving/crossref.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import from .common import DOIInfo, default_doi_resolver from . import common as _comm diff --git a/python/nistoar/id/__init__.py b/python/nistoar/id/__init__.py index 327c65e..52845f2 100644 --- a/python/nistoar/id/__init__.py +++ b/python/nistoar/id/__init__.py @@ -4,7 +4,6 @@ The PDR assigns ARK identifiers to all the resources it manages. """ -from __future__ import absolute_import from .version import __version__ from .minter import NIST_ARK_NAAN from .persist import PDRMinter diff --git a/python/nistoar/id/minter.py b/python/nistoar/id/minter.py index 2d874e6..71eae37 100644 --- a/python/nistoar/id/minter.py +++ b/python/nistoar/id/minter.py @@ -7,11 +7,10 @@ import os, abc import pynoid as noid -class IDMinter(object): +class IDMinter(object, metaclass=abc.ABCMeta): """ An abstract class for creating of identifier strings. """ - __metaclass__ = abc.ABCMeta @abc.abstractmethod def mint(self, data=None): diff --git a/python/nistoar/id/persist.py b/python/nistoar/id/persist.py index b0c42ed..5544797 100644 --- a/python/nistoar/id/persist.py +++ b/python/nistoar/id/persist.py @@ -214,7 +214,7 @@ def _hash(self, hexid): d = int(hexid, 16) while d > 0: s ^= d % self._div - d /= self._div + d //= self._div return s def mint(self, data=None): diff --git a/python/nistoar/jq.py b/python/nistoar/jq/__init__.py similarity index 97% rename from python/nistoar/jq.py rename to python/nistoar/jq/__init__.py index 2e174e2..2c127bc 100644 --- a/python/nistoar/jq.py +++ b/python/nistoar/jq/__init__.py @@ -37,7 +37,7 @@ def version(self): verre = re.compile(r'^jq[^-]*-') cmd = [ self.jqexe, "--version" ] try: - vers = subproc.check_output(cmd) + vers = subproc.check_output(cmd, universal_newlines=True) if verre.match(vers): vers = verre.sub('', vers).strip() return vers @@ -69,7 +69,7 @@ def process_data(self, jqfilter, datastr, args=None): cmd = self.form_cmd(jqfilter, args) proc = subproc.Popen(cmd, stdout=subproc.PIPE, stderr=subproc.PIPE, - stdin=subproc.PIPE) + stdin=subproc.PIPE, universal_newlines=True) (out, err) = proc.communicate(datastr) if proc.returncode != 0: @@ -90,7 +90,8 @@ def process_file(self, jqfilter, filepath, args=None): argopts = self.form_argopts(args) cmd = self.form_cmd(jqfilter, args, filepath) - proc = subproc.Popen(cmd, stdout=subproc.PIPE, stderr=subproc.PIPE) + proc = subproc.Popen(cmd, stdout=subproc.PIPE, stderr=subproc.PIPE, + universal_newlines=True) (out, err) = proc.communicate() if proc.returncode != 0: diff --git a/python/nistoar/nerdm/__init__.py b/python/nistoar/nerdm/__init__.py index e27b8a1..c91c8cf 100644 --- a/python/nistoar/nerdm/__init__.py +++ b/python/nistoar/nerdm/__init__.py @@ -1,9 +1,11 @@ """ library for supporting NERDm metadata """ -from __future__ import absolute_import + from .version import __version__ from .validate import validate as validate_nerdm, ValidationError from .constants import CORE_SCHEMA_URI, PUB_SCHEMA_URI +from .utils import * + diff --git a/python/nistoar/nerdm/constants.py b/python/nistoar/nerdm/constants.py index ef0c3b2..c876173 100644 --- a/python/nistoar/nerdm/constants.py +++ b/python/nistoar/nerdm/constants.py @@ -4,10 +4,14 @@ """ core_schema_base = "https://data.nist.gov/od/dm/nerdm-schema/" -schema_versions = ["v0.6", "v0.5", "v0.4", "v0.3", "v0.2", "v0.1"] +schema_versions = ["v0.7", "v0.6", "v0.5", "v0.4", "v0.3", "v0.2", "v0.1"] core_ver = schema_versions[0] pub_ver = schema_versions[0] bib_ver = schema_versions[0] +rls_ver = schema_versions[-3] +exp_ver = schema_versions[-1] +sip_ver = schema_versions[-1] +agg_ver = schema_versions[-2] def core_schema_uri_for(version): """ @@ -46,6 +50,10 @@ def schema_uri_for(schema_base, version): CORE_SCHEMA_URI = core_schema_uri_for(core_ver) PUB_SCHEMA_URI = pub_schema_uri_for(pub_ver) BIB_SCHEMA_URI = bib_schema_uri_for(bib_ver) +RLS_SCHEMA_URI = schema_uri_for(core_schema_base+"rls/", rls_ver) +EXP_SCHEMA_URI = schema_uri_for(core_schema_base+"exp/", exp_ver) +SIP_SCHEMA_URI = schema_uri_for(core_schema_base+"sip/", sip_ver) +AGG_SCHEMA_URI = schema_uri_for(core_schema_base+"agg/", agg_ver) TAXONOMY_VOCAB_BASE_URI = "https://data.nist.gov/od/dm/nist-themes/" TAXONOMY_VOCAB_INIT_URI = "https://www.nist.gov/od/dm/nist-themes/v1.0" diff --git a/python/nistoar/nerdm/convert/__init__.py b/python/nistoar/nerdm/convert/__init__.py index 4b5b19a..694a484 100644 --- a/python/nistoar/nerdm/convert/__init__.py +++ b/python/nistoar/nerdm/convert/__init__.py @@ -3,3 +3,5 @@ """ from .pod import * # from .datacite import * +from .latest import * + diff --git a/python/nistoar/nerdm/convert/latest.py b/python/nistoar/nerdm/convert/latest.py new file mode 100644 index 0000000..1e7be4e --- /dev/null +++ b/python/nistoar/nerdm/convert/latest.py @@ -0,0 +1,349 @@ +""" +Module for converting a NERDm record to the latest schema versions. +""" +import re +from collections import OrderedDict, Mapping +from copy import deepcopy +from urllib.parse import urlparse + +from .. import constants as NERDM_CONST +from .. import utils + +_nrdpat = re.compile(r"^("+NERDM_CONST.core_schema_base+"\S+/)v\d[\w\.]*((#.*)?)$") +_oldnrdpat = re.compile(r"^https?://www.nist.gov/od/dm/nerdm-schema/") +def _schuripatfor(uribase): + return re.compile(r"^("+uribase+")v\d[\w\.]*((#.*)?)$") + +__all__ = [ 'RELHIST_EXTENSION', 'VERSION_EXTENSION_RE', 'to_version_ext', 'NERDm2Latest', + 'update_to_latest_schema' ] + +RELHIST_EXTENSION = "/pdr:v" +VERSION_EXTENSION_RE = re.compile(RELHIST_EXTENSION+r"/\d+(\.\d+)*$") + +def to_version_ext(version): + return RELHIST_EXTENSION + '/' + version + +class NERDm2Latest(object): + """ + a transformation engine for converting NERDm records to conform to the latest schema versions. + + The engine can be configured with additional named record "massagers" which will make additional + changes to a record according to a named convention. Multiple conventions can be applied; these + convetions massagers are applied after migrating a record to the latest schemas + """ + _verextre = VERSION_EXTENSION_RE + + def __init__(self, logger=None, resolver=None, massagers=None, defconv=[], defver=None, byext={}): + """ + initialize the updater with a set of available massagers provided as a dictionary: the keys + are names of conventions for how information should be captured in a NERDm record, and the + values are functions that take a NERDm record as input + :param Logger logger: the log to write messages to; if None, no messages will be written + :param str resolver: the base URL for the ID resolver service. This is used to set + location of versioned releases. + :param dict massagers: a dictionary mapping convention names to functions to be applied to + a NERDm record. + :param str|list defconv: the default convention or conventions to apply. These will be + applied in the order that they are listed. + :param str defver: the default version to update to. This value typically + starts with the character, "v". All schemas not + referenced by the byext parameter will be set to this + version. If not provided, the version will be the + latest supported version as specified by + nistoar.nerdm.constants. + :param dict byext: a dictionary in which provides versions on a per- + extension schema basis. The keys represent extension + schemas given either by the extension field in the + standard NERDm schema URI or the entire base URL + for the extension schema up to the verison field. + Each value gives the version of the extension schema + that that schema should be updated to. An empty string + for the key represents the core schema, and an empty + string for the value means that the version for that + extension should not be changed. + """ + self.log = logger + if not defver: + defver = NERDM_CONST.core_ver + self.defver = defver + self.byext = byext + self._massagers = massagers + self.resolver_baseurl = resolver + if self.resolver_baseurl: + self.resolver_baseurl = self.resolver_baseurl.rstrip('/') + '/' + if defconv is None: + defconv = [] + elif isinstance(defconv, str): + defconv = [defconv] + unkn = [c for c in defconv if c not in self._massagers] + if unkn: + raise ValueError("Undefined convention massagers: "+str(unkn)) + self._defconv = defconv + + def convert(self, nerdmd, conv=[], version=None, byext=None, inplace=False): + """ + convert the nerdm record to conform to the latest schemas + :param dict nerdmd: the NERDm record to upgrade + :param str|list conv: the names of conventions to apply to the input record, where the names + correspond to the names set as massagers. If None, the default set are + applied; if an empty list (default), none are applied + :param str version: the version to update to; this overrides the value set at construction time. + :param dict byext: a dictionary of extension versions by label; this overrides the set + set at construction time. + """ + if conv is None: + conv = self._defconv + + if not inplace: + nerdmd = deepcopy(nerdmd) + + # update the schema references + self.update_nerdm_schema(nerdmd, version, byext, inplace=True) + + # handle version-specific changes + schver = utils.get_nerdm_schema_version(nerdmd) + if utils.cmp_versions(schver, "0.5") >= 0 and \ + 'versionHistory' in nerdmd and 'releaseHistory' not in nerdmd: + # change from versionHistory to releaseHistory + nerdmd['releaseHistory'] = self.create_release_history(nerdmd) + del nerdmd['versionHistory'] + + if utils.cmp_versions(schver, "0.1") >= 0: + if 'references' in nerdmd: + for i, ref in enumerate(nerdmd['references']): + if 'refid' in ref: + if '@id' not in ref: + ref['@id'] = ref['refid'] + del ref['refid'] + if not ref.get('@id'): + ref['@id'] = "#ref-%d" % i + + if utils.cmp_versions(schver, "0.6") >= 0: + if 'isPartOf' in nerdmd and not isinstance(nerdmd['isPartOf'], list): + nerdmd['isPartOf'] = [ nerdmd['isPartOf'] ] + + # now apply any massagers + for convention in conv: + nerdmd = messagers[convention](nerdmd) + + return nerdmd + + def create_release_history(self, nerdmd, idext=RELHIST_EXTENSION): + """ + return a NERDm ReleaseHistory object from version history information in the given NERDm + Resource object + """ + out = OrderedDict([("@id", nerdmd['@id'] + idext), ("@type", ["nrdr:ReleaseHistory"])]) + out['hasRelease'] = nerdmd.get("versionHistory", []) + + if len(out['hasRelease']) == 0: + out['hasRelease'].append(self.create_release_ref(nerdmd)) + + return out + + def create_release_ref_for(self, version, baseid=None): + """ + create a bare-bones release reference object for a given base ID and version. This + implementation reflects PDR-specific conventions for IDs and ID resolution. + :param str version: the version to create the reference object for + :param str baseid: the base PDR identifier (i.e. not version-specific) + """ + out = OrderedDict([ ('version', version) ]) + if baseid: + verid = baseid.rstrip('/') + if not self._verextre.search(verid): + verid += to_version_ext(version) + out['@id'] = verid + + if self.resolver_baseurl: + out['location'] = self.resolver_baseurl + verid + + v = version.split('.') + for i in range(len(v), 3): + v.append('0') + if len(v) == 3: + if v[2] != '0': + out['description'] = "metadata update" + elif v[1] != '0': + out['description'] = "data update" + elif v[0] == '1': + out['description'] = "initial release" + + return out + + def create_release_ref(self, nerdm, defver="1.0.0"): + """ + create a NERDm Release object (a reference to a versioned release of a reousrce) for the + given NERDm Resource. + :param dict nerdm: the NERDm Resource record to create a Release for + :param str devver: the default version to assume if the given Resource does not have a + version property set + :rtype: OrderedDict + :return: the Release object refering to the nerdm input + """ + out = self.create_release_ref_for(nerdm.get('version', defver), nerdm.get('@id')) + + issued = None + for prop in "annotated revised issued modified".split(): + issued = nerdm.get(prop) + if issued: + break + + if issued: + out['issued'] = issued + if not out.get('location') and nerdm.get('landingPage'): + out['location'] = nerdm.get('landingPage') + + return out + + def update_nerdm_schema(self, nerdmd, version=None, byext=None, inplace=False): + """ + return a converted version of the input record that is updated to the latest (or specified) + versions of the NERDm schema. The "_schema" property of the output record will reflect + the requested schema, and all "_extensionSchemas" properties will found and updated. + :param dict nerdmd: the input NERDm record to convert + :param str version: the default version to update to; this overrides the value set at + construction time. + :param dict byext: a dictionary of extension versions by label; this overrides the set + set at construction time. + :param inplace bool: if True, the input record will be edited directly; otherwise, the + input record will not be changed. + :return: the converted record + """ + # detect the metatag character and do an initial sanity check on the input + # metadata record + mtc = utils.meta_prop_ch(nerdmd) + + defver = version + if not version: + defver = self.defver + + # prep the byext map + if byext is None: + byext = self.byext + byext = dict(byext) + if "pub" not in byext: + byext["pub"] = version or NERDM_CONST.pub_ver + if "bib" not in byext: + byext["bib"] = version or NERDM_CONST.bib_ver + if "rls" not in byext: + byext["rls"] = version or NERDM_CONST.rls_ver + if "exp" not in byext: + byext["exp"] = version or NERDM_CONST.exp_ver + if "sip" not in byext: + byext["sip"] = version or NERDM_CONST.sip_ver + if "agg" not in byext: + byext["agg"] = version or NERDM_CONST.agg_ver + if "" not in byext: + byext[""] = defver + + matchrs = {} + for ext in byext: + uribase = ext + parsed = urlparse(ext) + if not parsed.scheme: + uribase = NERDM_CONST.core_schema_base+ext + if ext: + uribase += "/" + matchrs[ _schuripatfor(uribase) ] = byext[ext] + + if not inplace: + nerdmd = deepcopy(nerdmd) + + # update the core schema + updated = self._upd_schema_ver(nerdmd[mtc+"schema"], matchrs, defver) + if updated: + nerdmd[mtc+"schema"] = updated + self._upd_schema_ver_on_node(nerdmd, mtc+"extensionSchemas", matchrs, defver) + + # correct to start using bib extension if needed + if any(mtc+"extensionSchemas" in r for r in nerdmd.get('references',[])): + for ref in nerdmd['references']: + for i, ext in enumerate(ref.get(mtc+"extensionSchemas", [])): + if ext.startswith(NERDM_CONST.core_schema_base+"v") and '#/definitions/DCite' in ext: + ref[mtc+"extensionSchemas"][i] = NERDM_CONST.core_schema_base+"bib/" + byext['bib'] + \ + ext[ext.index('#'):] + + return nerdmd + + def _upd_schema_ver_on_node(self, node, schprop, byext, defver): + # node - a JSON node to examine + # schprop - the property, e.g. "_extensionSchemas" or "_schema" to examime + # byext - uri-re to new version map + # defurire - defurire to check in lieu of a match in byext + # defver - default version to update URIs matching defurire + if schprop in node: + if isinstance(node[schprop], (list, tuple)): + for i in range(len(node[schprop])): + updated = self._upd_schema_ver(node[schprop][i], byext, defver) + if updated: + node[schprop][i] = updated + else: + updated = self._upd_schema_ver(node[schprop], byext, defver) + if updated: + node[schprop] = updated + + for prop in node: + if isinstance(node[prop], Mapping): + self._upd_schema_ver_on_node(node[prop], schprop, byext, defver) + elif isinstance(node[prop], (list, tuple)): + self._upd_schema_ver_on_array(node[prop], schprop, byext, defver) + + def _upd_schema_ver_on_array(self, array, schprop, byext, defver): + for el in array: + if isinstance(el, Mapping): + self._upd_schema_ver_on_node(el, schprop, byext, defver) + elif isinstance(el, (list, tuple)): + self._upd_schema_ver_on_array(el, schprop, byext, defver) + + def _upd_schema_ver(self, schuri, byext, defver): + schuri = _oldnrdpat.sub(NERDM_CONST.core_schema_base, schuri) + for r in byext: + match = r.search(schuri) + if match: + if byext[r]: + return match.group(1)+byext[r]+match.group(2) + else: + return None + match = _nrdpat.match(schuri) + if match and defver: + return match.group(1)+defver+match.group(2) + return None + + +def update_nerdm_schema(nerdmd, version=None, byext={}): + """ + update the given NERDm record to the latest (or specified) version + of the NERDm schemas. This will update the "_schema" property of the + given JSON record to reflect the requested schema. In addition, all + "_extensionSchemas" properties will found and references to any version + of a NERDm schema will be updated to requested version. Note that the + input record will be changed in-place. + + :param dict nerdmd: the NERDm record + :param str version: the default version to update to. This value typically + starts with the character, "v". All schemas not + referenced by the byext parameter will be set to this + version. If not provided, the version will be the + latest supported version as specified by + nistoar.nerdm.const.core_ver. + :param dict byext: a dictionary in which provides versions on a per- + extension schema basis. The keys represent extension + schemas given either by the extension field in the + standard NERDm schema URI or the entire base URL + for the extension schema up to the verison field. + Each value gives the version of the extension schema + that that schema should be updated to. An empty string + for the key represents the core schema, and an empty + string for the value means that the version for that + extension should not be changed. + """ + return NERDm2Latest().update_nerdm_schema(nerdmd, version=version, byext=byext, inplace=True) + +def update_to_latest_schema(nerdmd, inplace=True): + """ + update the given NERDm record to the latest versions of the NERDm schemas, transforming the + data for compliance. + """ + return NERDm2Latest().convert(nerdmd, inplace=inplace) + diff --git a/python/nistoar/nerdm/convert/pod.py b/python/nistoar/nerdm/convert/pod.py index 187c6c2..4bb779b 100644 --- a/python/nistoar/nerdm/convert/pod.py +++ b/python/nistoar/nerdm/convert/pod.py @@ -558,7 +558,7 @@ def _doiinfo2reference(info, resolver): if tp == 'dataset': out['@type'] = ['schema:Dataset'] out['refType'] = "References" - elif tp.startswith('article'): + elif tp == 'journal-article' or tp.startswith('article'): out['@type'] = ['schema:Article'] out['refType'] = "IsCitedBy" elif tp == 'book': @@ -617,7 +617,7 @@ def citeproc_author2nerdm_author(author): out['affiliation'] = [] for affil in author['affiliation']: outa = OrderedDict() - if isinstance(affil, (str, unicode)): + if isinstance(affil, str): outa['title'] = affil elif 'name' in affil: outa['title'] = affil['name'] @@ -698,7 +698,7 @@ def datacite_creator2nerdm_author(creator): # affiliation if creator.get('affiliation'): out['affiliation'] = [] - if isinstance(creator.get('affiliation'), (str, unicode)): + if isinstance(creator.get('affiliation'), str): out['affiliation'].append( OrderedDict( [("@type", "schema:affiliation"), ('title', creator.get('affiliation'))] ) diff --git a/python/nistoar/nerdm/convert/rmm.py b/python/nistoar/nerdm/convert/rmm.py new file mode 100644 index 0000000..78e6dd1 --- /dev/null +++ b/python/nistoar/nerdm/convert/rmm.py @@ -0,0 +1,216 @@ +""" +Classes and functions for converting NERDm records to and from NERDm objects as stored in the RMM. + +The RMM includes three relevant collections: + * ``records`` -- contains the latest versions of NERDm records. This used by default for searches + via the Science Data Portal (SDP). The ``releaseHistory`` property included in + each record is expected to be up to date. The value of the ``@id`` property will + be of the general ARK form without any qualifying extensions. + * ``releasesets`` -- contains ``ReleaseCollection`` resource records corresponding to each resource in + ``records``. The ``releaseHistory`` property included in each record must include + all known (released) versions. The ``@id`` property with include the "/pdr:v" + extension, qualifying it as a ReleaseCollection ID. + * ``versions`` -- contains all of the different versions of the NERDm records in ``records``. The + ``releaseHistory`` property in each record is not expected to be up to date but + rather reflect the history at the time the version was ingested. The ``@id`` + property with include the version extension of the form /pdr:v/M.N.P, qualifying it + as a version-specific ID. +""" +import re +from collections import OrderedDict, Mapping +from urllib.parse import urljoin +from copy import deepcopy + +from .. import validate +from .. import utils +from ..constants import RLS_SCHEMA_URI +from .latest import NERDm2Latest, VERSION_EXTENSION_RE, RELHIST_EXTENSION, to_version_ext + + +class NERDmForRMM(object): + """ + a transformation engine for turning a "latest" NERDm record into records to be loaded into the RMM. + """ + _pfxre = re.compile("^[^:]+:") + _verextre = VERSION_EXTENSION_RE + + def __init__(self, logger=None, schemadir=None, pubeps={}): + """ + create the converter. + + The pubeps parameter sets public PDR endpoint URLs to assume: those used by this converter + can be provided via these key names: + * portalBase -- the common base URL for all PDR/SDP endpoints. This must be an absolute + URL and will be combined with any other relative URLs in this dictionary. + (Default: https://data.nist.gov/). + * landingPageService -- the base URL that, when combined with an ID, resolves to a landing + page. If relative, it will be combined with the value for "portalBase" (Default: "/od/id/") + * distributionService -- the base URL that, when combined with an ID and filepath, downloads a + dataset. If relative, it will be combined with the value for "portalBase" (Default: "/od/ds/") + :param dict config: a dictionary with conversion configuration data + in it (see class documentation) + :param Logger logger: a logger object that can be used to write warning + messages + :param str schemadir: path to the directory containing NERDm schemas; provide this to + enable automatic validation + :param dict pubeps: a dictionary of public PDR endpoints that should be assumed when filling + out URL values into the converted record. + """ + if pubeps is None: + pubeps = {} + self.cfg = pubeps + self._log = logger + + self._valid8r = None + if schemadir: + self._valid8r = validate.create_validator(schemadir, "_") + + self._2latest = NERDm2Latest() + + self._lpsbase = urljoin(self.cfg.get("portalBase", "https://data.nist.gov/"), + self.cfg.get("landingPageService", "od/id/")) + self._distbase = urljoin(self.cfg.get("portalBase", "https://data.nist.gov/"), + self.cfg.get("distributionService", "od/ds/")) + + + def to_rmm(self, nerdm, defver="1.0.0"): + """ + convert the NERDm record to an RMM-ready record. The input NERDm record is taken to be the + "latest" record (thus the ``@id`` identifier should not have a trailing /_v extension). + The output record will be a dictionary with three properties: + * ``record`` -- the input record massaged to serve as the "latest" record in the RMM database + * ``releaseSet`` -- a ``ReleaseCollection`` record derived from the ``releaseHistory`` of + the input record + * ``version`` -- the input record massaged to serve as the versioned copied of the record; in + particular, the ``@id`` property will be appended with a /_v/M.N.P extension for + version indicated by the ``version`` property. If the ``version`` property + is not present, the value of the defval parameter will be used. + + :param dict nerdm: the NERDm resource record to convert + :param str defver: the default version to assume if the record does not include a ``version`` + property + :param bool validate: if True, validate the output before returning. Not that the input is + not validated. + :raises ValueError: if the input NERDm record is not sufficiently NERDm-like or appears to be + the wrong type of input. + """ + if '@id' not in nerdm or '@type' not in nerdm: + raise ValueError("Input apparently not a NERDm record (must have @id and @type)") + + if utils.is_type(nerdm, "ReleaseCollection") or nerdm.get('version', '').endswith(RELHIST_EXTENSION): + raise ValueError("Input NERDm must not be a ReleaseCollection resource") + + rec = self._2latest.convert(nerdm) + if 'version' not in rec: + rec['version'] = defver + if 'releaseHistory' in rec: + for vref in rec['releaseHistory'].get('hasRelease',[]): + vext = to_version_ext(vref['version']) if vref.get('version') else None + if not vref.get('@id') or not self._verextre.search(vref['@id']): + if vext: + vref['@id'] = nerdm['@id'] + to_version_ext(vref['version']) + elif 'refid' in vref: + vref['@id'] = vref['refid'] + else: + vref['@id'] = rec.get('@id') + if 'refid' in vref: + del vref['refid'] + + out = { + 'record': rec, + 'version': deepcopy(rec) # deep copy + } + + # massage the identifiers to match the PDR convention for "latest" and "versioned" + rec['@id'] = self._verextre.sub('', rec['@id']) + if not self._verextre.search(out['version']['@id']): + out['version']['@id'] += to_version_ext(rec['version']) + + # massage URLs to point to versioned copies + # tweak the PDR landing page + if out['version'].get('landingPage'): + m = re.match(r'^https?://[^/]+/od/id/(ark:/\d+/)?([^/]+)', + out['version']['landingPage']) + if m and not out['version']['landingPage'][m.end():].startswith(RELHIST_EXTENSION): + out['version']['landingPage'] += to_version_ext(rec['version']) + + # tweak PDR download URLs + dsre = re.compile(r'^https?://[^/]+/od/ds/(ark:/\d+/)?([^/]+)') + for cmp in [c for c in out['version'].get('components', []) if c.get('downloadURL')]: + m = dsre.match(cmp['downloadURL']) + if m and not cmp['downloadURL'][m.end():].startswith("_v/"): + cmp['downloadURL'] = m.group() + "/_v/" + rec['version'] + cmp['downloadURL'][m.end():] + + def fromkeys(fromdict, todict, keys): + for key in keys: + if key in fromdict: + todict[key] = fromdict[key] + + # construct the ReleaseCollection from the base record + vc = OrderedDict([ + ('_schema', rec['_schema']), + ('_extensionSchemas', [ RLS_SCHEMA_URI ]), + ('@type', ['nrdr:ReleaseCollection', 'dcat:Catalog']) + ]) + vc['@id'] = rec['@id'] + RELHIST_EXTENSION + fromkeys(rec, vc, "ediid title description keyword firstIssued publisher contactPoint theme".split()) + fromkeys(rec, vc, "abbrev version".split()) + + if 'releaseHistory' in rec: + vc['hasRelease'] = rec['releaseHistory'].get('hasRelease', []) + else: + vc['hasRelease'] = [] + + if len(vc['hasRelease']) == 0: + vc['hasRelease'] = [ self._2latest.create_release_ref(out['version']) ] + + # make sure the location property in release history points to the version specific value. + # do this for all three renditions. + for rel in vc['hasRelease'] + out['version'].get('releaseHistory',{}).get('hasRelease',[]): + rel['location'] = self._lpsbase + rel['@id'] + + out['releaseSet'] = vc + + return out + + def validate_rmm(self, rmmmd): + """ + validate each of the objects under the "record", "version", and "releaseSet" are valid. In + particular, this ensures that a NERDm record of the proper type appears under each property. + :param dict rmmmd: the RMM-format record, as is returned by to_rmm(). + :raise ValidationError: if there any of the objects under the three properties are invalid. + """ + if not self._valid8r: + raise RuntimeError("NERDmForRMM: not configured for validation") + if 'record' in rmmmd: + if not isinstance(rmmmd['version'], Mapping): + raise validate.ValidationError("'record' property does not contain a NERDm record (type: "+ + str(type(rmmmd['record']))) + if not utils.is_any_type(rmmmd['record'], ["Resource", "PublicDataResource", "DataPublication"]): + raise validate.ValidationError("Unexpected @type for 'record': "+str(rmmmd['record']['@type'])) + self._valid8r.validate(rmmmd['record']) + if 'version' in rmmmd: + if not isinstance(rmmmd['version'], Mapping): + raise validate.ValidationError("'version' property does not contain a NERDm record (type: "+ + str(type(rmmmd['version']))) + if not utils.is_any_type(rmmmd['version'], ["Resource", "PublicDataResource", "DataPublication"]): + raise validate.ValidationError("Unexpected @type for 'version': " + +str(rmmmd['version']['@type'])) + self._valid8r.validate(rmmmd['version']) + if 'releaseSet' in rmmmd: + if not isinstance(rmmmd['releaseSet'], Mapping): + raise validate.ValidationError("'releaseSet' property does not contain a NERDm record (type: "+ + str(type(rmmmd['releaseSet']))) + if not utils.is_type(rmmmd['releaseSet'], "ReleaseCollection"): + raise validate.ValidationError("'releaseSet' not a 'ReleaseCollection': " + +str(rmmmd['releaseSet']['@type'])) + self._valid8r.validate(rmmmd['releaseSet']) + + + def convert(self, nerdm, validate=False, defver="1.0.0"): + out = self.to_rmm(nerdm, defver) + if validate: + self.validate_rmm(out) # may raise an exception + return out + + diff --git a/python/nistoar/nerdm/exceptions.py b/python/nistoar/nerdm/exceptions.py index 4bcd1db..167ceab 100644 --- a/python/nistoar/nerdm/exceptions.py +++ b/python/nistoar/nerdm/exceptions.py @@ -1,43 +1,70 @@ """ Exceptions issued by the NERDm utilities """ +from ..base import OARException -class MetadataError(Exception): +class MetadataError(OARException): """ an exception indicating an error handling metadata + :param str msg: the message describing the error that occurred + :param Exception cause: the exception that originally caught the error; if None, the + error was discovered by catching an exception + :param src: the file containing the errant metadata; if None, no such + file source was involved or is known + :type src: str or Path + :param SysInfoMixin sys: the OAR system that generated the error; if None, the system + will be discerned """ - def __init__(self, msg=None, cause=None): - if not msg: - if cause: - msg = str(cause) - else: - msg = "Unknown Metadata System Error" - super(MetadataError, self).__init__(msg) - self.cause = cause + def __init__(self, msg=None, cause=None, src=None, sys=None): + if not msg and not cause: + msg = "Unknown Metadata Error" + super(MetadataError, self).__init__(msg, cause, sys) + self.source = src + +class PODError(MetadataError): + """ + an exception indicating an error handling POD metadata + """ + def __init__(self, msg=None, cause=None, src=None, sys=None): + if not msg and not cause: + msg = "Unknown POD metadata error" + super(PODError, self).__init__(msg, cause, src, sys) + pass class NERDError(MetadataError): """ an exception indicating an error handling NERDm metadata """ + """ + an exception indicating an error handling POD metadata + """ + def __init__(self, msg=None, cause=None, src=None, sys=None): + if not msg and not cause: + msg = "Unknown NERD metadata error" + super(NERDError, self).__init__(msg, cause, src, sys) pass -class NERDTypeError(MetadataError): +class NERDTypeError(NERDError): """ an exception indicating that a NERm metadata value is of an invalid or unexpected type. """ - def __init__(self, need=None, got=None, property=None, msg=None, cause=None): + def __init__(self, need=None, got=None, property=None, msg=None, cause=None, src=None, sys=None): """ create the exception - :param need str or list/tuple of str: the value type or types - that were expected - :param got str or type: the type actualy found or given - :param property str: the name of the property with the incorrect type - :param msg str: a message that overrides the default based on - need and got. - :param cause Exception: an exception that was a result of this - problem. + :param need: the value type or types that were expected + :type need: str or list/tuple of str + :param got: the type actualy found or given + :type got: str or type + :param str property: the name of the property with the incorrect type + :param str msg: a message that overrides the default based on + need and got. + :param Exception cause: an exception that was a result of this + problem. + :param src: the file containing the errant metadata; if None, no such + file source was involved or is known + :type src: str or Path """ self.need = need self.got = got @@ -55,7 +82,7 @@ def __init__(self, need=None, got=None, property=None, msg=None, cause=None): if got: msg += (need and ",") or ":" + " got " + str(got) - super(NERDTypeError, self).__init__(msg, cause) + super(NERDTypeError, self).__init__(msg, cause, sys) class MergeError(MetadataError): diff --git a/python/nistoar/nerdm/merge.py b/python/nistoar/nerdm/merge.py index 73583a6..b98d6dd 100644 --- a/python/nistoar/nerdm/merge.py +++ b/python/nistoar/nerdm/merge.py @@ -273,7 +273,8 @@ def keys_match(self, key1, key2): altkey2 = self._altkey(key2) # if everything is None, respond with False - if all([v is None for v in altkey1.values() + altkey2.values()]): + if all([v is None for v in + list(altkey1.values()) + list(altkey2.values())]): return False if altkey1 == altkey2: @@ -405,13 +406,12 @@ def get_schema(self, walk, schema, meta, **kwargs): "baseArrayAsDefault": BaseArrayAsDefault() } -class MergerFactoryBase(object): +class MergerFactoryBase(object, metaclass=ABCMeta): """ a class for creating Merger objects. The factory is responsible for locating and loading the necessary schema as well as configuring the strategies available. """ - __metaclass__ = ABCMeta def __init__(self, logger=None): """ @@ -458,7 +458,7 @@ def __init__(self, rootdir, strategies=(), logger=None): super(DirBasedMergerFactory, self).__init__(logger) if not rootdir: raise ValueError("DirBasedMergerFactory: rootdir not provided") - if not isinstance(rootdir, (str, unicode)): + if not isinstance(rootdir, str): raise TypeError("DirBasedMergerFactory: rootdir not a str: {0}". format(str(rootdir))) if not os.path.exists(rootdir): @@ -496,7 +496,7 @@ def make_merger(self, stratname, typename): schema = json.load(fd) out = Merger(schema, self.strategies, 'OrderedDict') - for schema in cache.schemas().values(): + for schema in list(cache.schemas().values()): out.cache_schema(schema) return out diff --git a/python/nistoar/nerdm/utils.py b/python/nistoar/nerdm/utils.py index 820aed0..c387615 100644 --- a/python/nistoar/nerdm/utils.py +++ b/python/nistoar/nerdm/utils.py @@ -63,6 +63,34 @@ def which_type(nerdm, typenames): return name return None +def _insert_before_val(vals, inval, *beforevals): + p = -1 + for insertpt in beforevals: + try: + p = vals.index(insertpt) + vals.insert(p, inval) + break + except ValueError: + continue + if p < 0: + vals.append(inval) + return vals + +def insert_type(nerdm, newtype, *beforetypes): + """ + ensure that a given type is included among the values of the `@type` property. + :param Mapping nerdm: the NERDm object that is expected to have an @type property + :param str newtype: the `@type` value to look for and insert if not found + :param *List[str] beforetypes: a list of `@type` values to look for if `newtype` is + not found in the current `@type` property; the new value should + be inserted before the first of these values found in the + currently set list. If none of these values are found, the `newtype` + will be appended to the list. + """ + types = nerdm.setdefault('@type', []) + _insert_before_val(types, newtype, *beforetypes) + return nerdm + def get_schema(nerdm, prefixchs=META_PREFIXES): schemaprop = meta_prop_ch(nerdm, prefixchs=prefixchs) + "schema" return nerdm.get(schemaprop) diff --git a/python/nistoar/nerdm/validate.py b/python/nistoar/nerdm/validate.py index 82b9402..aa029a9 100644 --- a/python/nistoar/nerdm/validate.py +++ b/python/nistoar/nerdm/validate.py @@ -55,7 +55,7 @@ def create_validator(schemadir, forprefix="_"): """ if isinstance(forprefix, Mapping): forprefix = get_mdval_flavor(forprefix) or "_" - if not isinstance(forprefix, (str, unicode)): + if not isinstance(forprefix, str): raise TypeError("create_validator: forprefix: not a str or dict") return ejs.ExtValidator.with_schema_dir(schemadir, forprefix) diff --git a/python/nistoar/rmm/__init__.py b/python/nistoar/rmm/__init__.py index f3fe05c..84135b6 100644 --- a/python/nistoar/rmm/__init__.py +++ b/python/nistoar/rmm/__init__.py @@ -5,38 +5,13 @@ from abc import ABCMeta, abstractmethod, abstractproperty from .version import __version__ +from ..base import SystemInfoMixin +from ..base import config -class SystemInfoMixin(object): - """ - a mixin for getting information about the current system that a class is - a part of. - """ - __metaclass__ = ABCMeta - - @property - def system_name(self): - return "" - - @property - def system_abbrev(self): - return "" - - @property - def subsystem_name(self): - return "" - - @property - def subsystem_abbrev(self): - return "" +__all__ = [ 'RMMSystem' ] - @abstractproperty - def system_version(self): - return __version__ - -_RMMSYSNAME = "Public Data Repository" +_RMMSYSNAME = "Resource Metadata Manager" _RMMSYSABBREV = "RMM" -_RMMSUBSYSNAME = _RMMSYSNAME -_RMMSUBSYSABBREV = _RMMSYSABBREV class RMMSystem(SystemInfoMixin): """ @@ -46,113 +21,7 @@ class RMMSystem(SystemInfoMixin): instance--namely, an Exception--is part of a particular system by calling `isinstance(inst, RMMSystem)`. """ + def __init__(self): + super(RMMSystem, self).__init__(_RMMSYSNAME, _RMMSYSABBREV, "", "", __version__) - @property - def system_version(self): - return __version__ - - @property - def system_name(self): return _RMMSYSNAME - @property - def system_abbrev(self): return _RMMSYSABBREV - @property - def subsystem_name(self): return _RMMSUBSYSNAME - @property - def subsystem_abbrev(self): return _RMMSUBSYSABBREV - -def find_jq_lib(config=None): - """ - return the directory containing the jq libraries - """ - def assert_exists(dir, ctxt=""): - if not os.path.exists(dir): - "{0}directory does not exist: {1}".format(ctxt, dir) - raise ConfigurationException(msg, sys=self) - - # check local configuration - if config and 'jq_lib' in config: - assert_exists(config['jq_lib'], "config param 'jq_lib' ") - return config['jq_lib'] - - # check environment variable - if 'OAR_JQ_LIB' in os.environ: - assert_exists(os.environ['OAR_JQ_LIB'], "env var OAR_JQ_LIB ") - return os.environ['OAR_JQ_LIB'] - - # look relative to a base directory - if 'OAR_HOME' in os.environ: - # this is normally an installation directory (where lib/jq is our - # directory) but we also allow it to be the source directory - assert_exists(os.environ['OAR_HOME'], "env var OAR_HOME ") - basedir = os.environ['OAR_HOME'] - candidates = [os.path.join(basedir, 'lib', 'jq'), - os.path.join(basedir, 'jq')] - else: - # guess some locations based on the location of the executing code. - # The code might be coming from an installation, build, or source - # directory. - import nistoar - basedir = os.path.dirname(os.path.dirname(os.path.dirname( - os.path.abspath(nistoar.__file__)))) - candidates = [os.path.join(basedir, 'jq')] - basedir = os.path.dirname(os.path.dirname(basedir)) - candidates.append(os.path.join(basedir, 'jq')) - candidates.append(os.path.join(basedir, 'oar-metadata', 'jq')) - - for dir in candidates: - if os.path.exists(dir): - return dir - - return None - -def_jq_libdir = find_jq_lib() - -def find_merge_etc(config=None): - """ - return the directory containing the merge rules - """ - def assert_exists(dir, ctxt=""): - if not os.path.exists(dir): - "{0}directory does not exist: {1}".format(ctxt, dir) - raise ConfigurationException(msg, sys=self) - - # check local configuration - if config and 'merge_rules_lib' in config: - assert_exists(config['merge_rules_lib'], - "config param 'merge_rules_lib' ") - return config['merge_rules_lib'] - - # check environment variable - if 'OAR_MERGE_ETC' in os.environ: - assert_exists(os.environ['OAR_MERGE_ETC'], "env var OAR_MERGE_ETC ") - return os.environ['OAR_MERGE_ETC'] - - # look relative to a base directory - if 'OAR_HOME' in os.environ: - # this is normally an installation directory (where lib/jq is our - # directory) but we also allow it to be the source directory - assert_exists(os.environ['OAR_HOME'], "env var OAR_HOME ") - basedir = os.environ['OAR_HOME'] - candidates = [os.path.join(basedir, 'etc', 'merge')] - - else: - # guess some locations based on the location of the executing code. - # The code might be coming from an installation, build, or source - # directory. - import nistoar - basedir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname( - os.path.abspath(nistoar.__file__))))) - candidates = [os.path.join(basedir, 'etc', 'merge')] - candidates.append(os.path.join(basedir, 'oar-metadata', 'etc', 'merge')) - basedir = os.path.dirname(basedir) - candidates.append(os.path.join(basedir, 'oar-metadata', 'etc', 'merge')) - candidates.append(os.path.join(basedir, 'etc', 'merge')) - - for dir in candidates: - if os.path.exists(dir): - return dir - - return None - -def_merge_etcdir = find_merge_etc() - +system = RMMSystem() diff --git a/python/nistoar/rmm/exceptions.py b/python/nistoar/rmm/exceptions.py index e48ffd9..6966c7f 100644 --- a/python/nistoar/rmm/exceptions.py +++ b/python/nistoar/rmm/exceptions.py @@ -1,37 +1,24 @@ """ Exceptions and warnings for the RMM system """ -from . import RMMSystem, SystemInfoMixin +from . import system as _sys +from ..base import OARException -class SourceSystemMixin(SystemInfoMixin): +class RMMException(OARException): """ - a mixin that stores information about the source system of the error + an exception occurring while interacting with the RMM system """ - def __init__(self, systeminfo): - self._sysname = systeminfo.system_name or "" - self._sysabbrev = systeminfo.system_abbrev or "" - self._subsysname = systeminfo.subsystem_name or systeminfo.system_name - self._subsysabbrev = \ - systeminfo.subsystem_abbrev or systeminfo.system_abbrev - self._sysver = systeminfo.system_version or "x" - @property - def system_version(self): return self._sysver - @property - def system_name(self): return self._sysname - @property - def system_abbrev(self): return self._sysabbrev - @property - def subsystem_name(self): return self._subsysname - @property - def subsystem_abbrev(self): return self._subsysabbrev + def __init__(self, message=None, cause=None): + if not message and not cause: + message = "Unknown RMM exception occurred" + super(RMMException, self).__init__(message, cause, _sys) - -class RMMWarning(Warning, SourceSystemMixin): +class RMMWarning(Warning): """ a base class for warnings generated by the preservation system """ - def __init__(self, msg=None, cause=None, sys=None): + def __init__(self, msg=None, cause=None): """ create the base warning. @@ -42,52 +29,17 @@ def __init__(self, msg=None, cause=None, sys=None): information as to the cause of the """ self.cause = cause - if not sys or not isinstance(sys, SystemInfoMixin): - sys = RMMSystem() - SourceSystemMixin.__init__(self, sys) + self.sys = _sys Warning.__init__(self, msg) -class RMMException(Exception, SourceSystemMixin): - """ - a base class for exceptions occuring in the RMM system - """ - def __init__(self, msg=None, cause=None, sys=None): - """ - create the exception. - - :param msg str: a specific warning message - :param cause Exception: a caught but handled Exception that is the - cause of the warning - :param sys SystemInfo: a SystemInfo instance that can provide - information as to the cause of the - """ - self.cause = cause - if not sys or not isinstance(sys, SystemInfoMixin): - sys = RMMSystem() - SourceSystemMixin.__init__(self, sys) - - if not msg: - if cause: - msg = str(cause) - else: - msg = "Unknown {0} System Error".format(self.subsystem_abbrev) - Exception.__init__(self, msg) - self.cause = cause - -class ConfigurationException(RMMException): - """ - a class indicating an error in the configuration of the RMM system - """ - pass - -class StateException(RMMException): +class RMMStateException(RMMException): """ a class indicating that the RMM system or environment is in an uncorrectable state preventing proper processing """ pass -class DatabaseStateError(StateException): +class DatabaseStateError(RMMStateException): """ a class indicating that the RMM database is in an uncorrectable state preventing proper processing. An example diff --git a/python/nistoar/rmm/ingest/wsgi.py b/python/nistoar/rmm/ingest/wsgi.py index 8cd4333..2def329 100644 --- a/python/nistoar/rmm/ingest/wsgi.py +++ b/python/nistoar/rmm/ingest/wsgi.py @@ -6,14 +6,14 @@ a framework-based implementation if any further capabilities are needed. """ -import os, sys, logging, json, cgi, re, subprocess -from urlparse import urlsplit, urlunsplit -from collections import Mapping +import os, sys, logging, json, re, subprocess +from urllib.parse import urlsplit, urlunsplit, parse_qs +from collections.abc import Mapping from wsgiref.headers import Headers from ..mongo.nerdm import (NERDmLoader, LoadLog, RecordIngestError, JSONEncodingError) -from ..exceptions import ConfigurationException +from nistoar.base.config import ConfigurationException log = logging.getLogger("RMM").getChild("ingest") @@ -162,7 +162,7 @@ def set_response(self, code, message): def end_headers(self): status = "{0} {1}".format(str(self._code), self._msg) - self._start(status, self._hdr.items()) + self._start(status, list(self._hdr.items())) def handle(self): meth_handler = 'do_'+self._meth @@ -184,7 +184,7 @@ def authorize(self): return self.authorize_via_queryparam() def authorize_via_queryparam(self): - params = cgi.parse_qs(self._env.get('QUERY_STRING', '')) + params = parse_qs(self._env.get('QUERY_STRING', '')) auths = params.get('auth',[]) if self._auth[1]: # match the last value provided @@ -216,6 +216,7 @@ def do_GET(self, path): if not path: try: out = json.dumps(list(self._loaders.keys())) + '\n' + out = out.encode() except Exception as ex: log.exception("Internal error: "+str(ex)) return self.send_error(500, "Internal error") @@ -229,7 +230,7 @@ def do_GET(self, path): self.set_response(200, "Service is ready") self.add_header('Content-Type', 'application/json') self.end_headers() - return ["Service ready\n"] + return [b"Service ready\n"] else: return self.send_error(404, "resource does not exist") @@ -253,13 +254,14 @@ def nerdm_archive_cache(self, rec): records that have been accepted but not ingested. """ try: - arkid = rec['@id'] - outfile = os.path.join(self._archdir, '_cache', - os.path.basename(arkid)+".json") + arkid = re.sub(r'/.*$', '', re.sub(r'ark:/\d+/', '', rec['@id'])) + ver = rec.get('version', '1.0.0').replace('.', '_') + recid = "%s-v%s" % (os.path.basename(arkid), ver) + outfile = os.path.join(self._archdir, '_cache', recid+".json") with open(outfile, 'w') as fd: json.dump(rec, fd, indent=2) - return arkid + return recid except KeyError as ex: # this shouldn't happen if the record was already validated @@ -273,23 +275,22 @@ def nerdm_archive_cache(self, rec): raise RuntimeError("Failed to cache record ({0}): {1}" .format(arkid, str(ex))) - def nerdm_archive_commit(self, arkid): + def nerdm_archive_commit(self, recid): """ commit a previously cached record to the local disk archive. This method is called after the record has been successfully ingested to the RMM's database. """ - outfile = os.path.join(self._archdir, '_cache', - os.path.basename(arkid)+".json") + outfile = os.path.join(self._archdir, '_cache', recid+".json") if not os.path.exists(outfile): raise RuntimeError("record to commit ({0}) not found in cache: {1}" - .format(arkid, outfile)) + .format(recid, outfile)) try: os.rename(outfile, os.path.join(self._archdir, os.path.basename(outfile))) except OSError as ex: raise RuntimeError("Failed to archvie record ({0}): {1}" - .format(arkid, str(ex))) + .format(recid, str(ex))) def ingest_nerdm_record(self): @@ -332,7 +333,8 @@ def ingest_nerdm_record(self): self.set_response(400, "Input record is not valid") self.add_header('Content-Type', 'application/json') self.end_headers() - return [ json.dumps([str(e) for e in res.errs]) + '\n' ] + out = json.dumps([str(e) for e in res.errs]) + '\n' + return [ out.encode() ] except RecordIngestError as ex: log.exception("Failed to load posted record: "+str(ex)) @@ -369,7 +371,7 @@ def nerdm_post_commit(self, recid): run an external executable for further processing after the record is commited to the database (e.g. update an external index) """ - cmd = _mkpostcomm(self._postexec, recid, self._archdir) + cmd = _mkpostcomm(self._postexec, recid) try: log.debug("Executing post-commit script:\n %s", " ".join(cmd)) diff --git a/python/nistoar/rmm/mongo/fields.py b/python/nistoar/rmm/mongo/fields.py index 37b1dc2..80425e5 100644 --- a/python/nistoar/rmm/mongo/fields.py +++ b/python/nistoar/rmm/mongo/fields.py @@ -13,7 +13,7 @@ UpdateWarning, LoadLog) from .loader import ValidationError, SchemaError, RefResolutionError -DEF_BASE_SCHEMA = "https://www.nist.gov/od/dm/field-help/v0.1#" +DEF_BASE_SCHEMA = "https://data.nist.gov/od/dm/field-help/v0.1#" DEF_SCHEMA = DEF_BASE_SCHEMA + "/definitions/FieldInfo" COLLECTION_NAME="fields" @@ -59,7 +59,7 @@ def load(self, fielddata, validate=True, results=None, id=None): if not provided, the extracted key will be used as applicable. """ - if hasattr(fielddata, 'iteritems'): + if hasattr(fielddata, 'items'): # JSON object return self.load_obj(fielddata, validate, results, id) elif hasattr(fielddata, '__getitem__'): @@ -88,7 +88,7 @@ def load_obj(self, fielddata, validate=True, results=None, id=None): try: key = { "name": fielddata['name'] } - except KeyError, ex: + except KeyError as ex: if id is None: id = str({'name': '?'}) return results.add(id, @@ -108,7 +108,7 @@ def load_obj(self, fielddata, validate=True, results=None, id=None): try: self.load_data(fielddata, key, self.onupdate) - except Exception, ex: + except Exception as ex: errs = [ex] return results.add(id, errs) @@ -130,7 +130,7 @@ def load_from_file(self, filepath, validate=True, results=None): with open(filepath) as fd: try: data = json.load(fd) - except ValueError, ex: + except ValueError as ex: if not results: results = self._mkloadlog() return results.add(filepath, [ JSONEncodingError(ex) ]) diff --git a/python/nistoar/rmm/mongo/loader.py b/python/nistoar/rmm/mongo/loader.py index 68ca013..c131ed8 100644 --- a/python/nistoar/rmm/mongo/loader.py +++ b/python/nistoar/rmm/mongo/loader.py @@ -8,15 +8,14 @@ from ejsonschema import ExtValidator from ejsonschema import ValidationError, SchemaError, RefResolutionError -from ..exceptions import DatabaseStateError +from ..exceptions import RMMException, DatabaseStateError _dburl_re = re.compile(r"^mongodb://(\w+(:\S+)?@)?\w+(\.\w+)*(:\d+)?/\w+$") -class Loader(object): +class Loader(object, metaclass=ABCMeta): """ an abstract base class for loading data """ - __metaclass__ = ABCMeta def __init__(self, dburl, collname=None, schemadir=None, log=None): """ @@ -121,12 +120,12 @@ def load_data(self, data, key=None, onupdate='quiet'): coll = self._db[self.coll] if key: - curs = coll.find(key) - if curs.count() > 1: + count = coll.count_documents(key); + if count > 1: # key should have returned no more than 1 record raise DatabaseStateError("unique key query returns " "multiple records") - if curs.count() > 0: + if count > 0: # a previous record with matching key exists if onupdate == 'fail': raise RecordIngestError("Existing record with key " @@ -134,11 +133,16 @@ def load_data(self, data, key=None, onupdate='quiet'): doload = True if hasattr(onupdate, '__call__'): - doload = onupdate(data, key) + c = coll.find(key) + try: + doload = onupdate(c[0], key) + finally: + c.close() if doload: - if onupdate != 'quiet': - msg = "Updating previously loaded record" + if isinstance(onupdate, str) and onupdate != 'quiet': + msg = "Updating previously loaded record into %s: %s" % \ + (self.coll, str(key)) if self.log: self.log.warn(msg) else: @@ -156,9 +160,9 @@ def load_data(self, data, key=None, onupdate='quiet'): result = coll.insert_one(data) return 1 - except RecordIngestError, ex: + except RecordIngestError as ex: raise - except Exception, ex: + except Exception as ex: if self.log: self.log.exception("Unexpected loading error: "+str(ex)) raise RuntimeError("Unexpected loading error: "+str(ex)) @@ -288,19 +292,15 @@ def merge(self, otherlog): self._results.append(res) return self -class RecordIngestError(Exception): +class RecordIngestError(RMMException): """ - an exception indicating a failure to load a record + an exception indicating a failure to load a record into the RMM """ def __init__(self, msg=None, cause=None): - if not msg: - if cause: - msg = str(cause) - else: - msg = "Unknown Ingest Error" - super(RecordIngestError, self).__init__(msg) - self.cause = cause + if not msg and not cause: + msg = "Unknown RMM Ingest Error" + super(RecordIngestError, self).__init__(msg, cause) class JSONEncodingError(RecordIngestError): """ diff --git a/python/nistoar/rmm/mongo/nerdm.py b/python/nistoar/rmm/mongo/nerdm.py index 004305c..a1a97cf 100644 --- a/python/nistoar/rmm/mongo/nerdm.py +++ b/python/nistoar/rmm/mongo/nerdm.py @@ -2,23 +2,109 @@ load NERDm records into the RMM's MongoDB database """ import json, os, sys +from collections import Mapping from .loader import (Loader, RecordIngestError, JSONEncodingError, UpdateWarning, LoadLog) from .loader import ValidationError, SchemaError, RefResolutionError +from nistoar.nerdm import utils +from nistoar.nerdm.convert.rmm import NERDmForRMM DEF_BASE_SCHEMA = "https://data.nist.gov/od/dm/nerdm-schema/v0.5#" DEF_SCHEMA = DEF_BASE_SCHEMA + "/definitions/Resource" -COLLECTION_NAME="record" +LATEST_COLLECTION_NAME="record" +VERSIONS_COLLECTION_NAME="versions" +RELEASES_COLLECTION_NAME="releasesets" -class NERDmLoader(Loader): +class _NERDmRenditionLoader(Loader): + """ + a base class for loading a rendition of a NERDm record into one of the data collections holding + NERDm metadata (record, versions, releaseSets) + """ + def __init__(self, collection_name, dburl, schemadir, log=None, defschema=DEF_SCHEMA): + """ + create the loader. + + :param str collection_name + :param str dburl: the URL of MongoDB database in the form, + 'mongodb://HOST:PORT/DBNAME' + :param str schemadir: the path to a directory containing the JSON + schemas needed to validate the input JSON data. + :param logging.Logger log: a logging instance that messages can be + sent to. If not provided, warnings might be + issued via the warnings module. + :param str defschema: the URI for the schema to validated new records + against by default. + """ + super(_NERDmRenditionLoader, self).__init__(dburl, collection_name, schemadir, log) + self._schema = defschema + + def _get_upd_key(self, nerdm): + return { "@id": nerdm['@id'] } + + def _get_onupdate(self, nerdm): + newver = nerdm.get('version', "1.0.0") + # replace previous record if the version of new rec is newer or same as previous + return lambda data, key: utils.cmp_versions(newver, data.get('version', '1.0.0')) >= 0 + + def load(self, rec, validate=True, results=None, id=None): + """ + load a NERDm resource record into the database + :param rec dict: the NERDm JSON record to load + :param validate bool: False if validation should be skipped before + loading; otherwise, loading will fail if the input + data is not valid. + """ + if not results: + results = self._mkloadlog() + + try: + key = self._get_upd_key(rec) + except KeyError as ex: + if id is None: + id = str({'@id': '?'}) + return results.add(id, RecordIngestError("Data is missing input key value, @id")) + if id is None: + id = key + + errs = None + if validate: + schemauri = rec.get("_schema") + if not schemauri: + schemauri = self._schema + + errs = self.validate(rec, schemauri) + if errs: + return results.add(id, errs) + + try: + if self.load_data(rec, key, self._get_onupdate(rec)): + results.add(key, None) + except Exception as ex: + results.add(key, [ex]) + return results + + def _mkloadlog(self): + return LoadLog("NERDm resources") + + + +class NERDmLoader(_NERDmRenditionLoader): """ a class for validating and loading NERDm records into the Mongo database. """ - def __init__(self, dburl, schemadir, onupdate='quiet', log=None, - defschema=DEF_SCHEMA): + class LatestLoader(_NERDmRenditionLoader): + def __init__(self, dburl, schemadir, log=None): + super(NERDmLoader.LatestLoader, self).__init__(LATEST_COLLECTION_NAME, dburl, schemadir, log) + + class ReleaseSetLoader(_NERDmRenditionLoader): + def __init__(self, dburl, schemadir, log=None): + super(NERDmLoader.ReleaseSetLoader, self).__init__(RELEASES_COLLECTION_NAME, dburl, schemadir, log) + + + def __init__(self, dburl, schemadir, onupdate='quiet', log=None, defschema=DEF_SCHEMA): """ create the loader. @@ -35,49 +121,112 @@ def __init__(self, dburl, schemadir, onupdate='quiet', log=None, :param defschema str: the URI for the schema to validated new records against by default. """ - super(NERDmLoader, self).__init__(dburl, COLLECTION_NAME, schemadir, log) - self._schema = defschema + super(NERDmLoader, self).__init__(VERSIONS_COLLECTION_NAME, dburl, schemadir, log, defschema) self.onupdate = onupdate - def load(self, resrec, validate=True, results=None, id=None): + self.lateloadr = self.LatestLoader(dburl, schemadir, log) + self.relloadr = self.ReleaseSetLoader(dburl, schemadir, log) + self.tormm = NERDmForRMM(log, schemadir) + + def connect(self): + """ + establish a connection to the database. + """ + super(NERDmLoader, self).connect() + self.lateloadr._client = self._client + self.lateloadr._db = self._db + self.relloadr._client = self._client + self.relloadr._db = self._db + + def disconnect(self): + """ + close the connection to the database. + """ + try: + super(NERDmLoader, self).disconnect() + finally: + self.lateloadr._client = None + self.relloadr._db = None + + def _get_upd_key(self, nerdm): + return { "@id": nerdm['@id'], "version": nerdm.get('version', '1.0.0') } + + def _get_onupdate(self, nerdm): + return self.onupdate + + def load(self, rec, validate=True, results=None, id=None): """ load a NERDm resource record into the database - :param resrec dict: the resource JSON record to load - :param validate bool: False if validation should be skipped before + :param dict rec: the NERDm JSON record to load + :param bool validate: False if validation should be skipped before loading; otherwise, loading will fail if the input data is not valid. + :param LoadLog results: the results object to add loading result error messages to. Provide + this when chaining loaders together + :param str|dict id: an identifier for the record being loaded that messages should be associated + with. """ if not results: results = self._mkloadlog() + errs = [] + # the input is a versioned Resource record; convert it into its three parts for the three + # collections (record, versions, releaseSets) try: - key = { "@id": resrec['@id'] } - except KeyError, ex: + parts = self.tormm.convert(rec, validate=False) + except (ValueError, ValidationError) as ex: + return results.add(id or json.dumps({'@id': rec.get('@id','?')}), ex) + + for prop in "record version releaseSet".split(): + if prop not in parts or not isinstance(parts[prop], Mapping): + errs.append( + ValidationError("Failed to extract %s record from input NERDm Resource" % prop) + ) + if errs: if id is None: - id = str({'@id': '?'}) - return results.add(id, - RecordIngestError("Data is missing input key value, @id")) + id = json.dumps({'@id': rec.get('@id','?')}) + return results.add(id, errs) + + # now load the versioned record first; if that's successful, we'll load the others + + # determine the versions udpate key + try: + key = self._get_upd_key(parts['version']) + id = json.dumps(key) + except KeyError as ex: + if id is None: + id = json.dumps({'@id': '?'}) + return results.add(id, RecordIngestError("Data is missing input key value, @id")) + if id is None: - id = key + id = key + # validate the versions record (if requested) errs = None if validate: - schemauri = resrec.get("_schema") + schemauri = parts['version'].get("_schema") if not schemauri: schemauri = self._schema - errs = self.validate(resrec, schemauri) + errs = self.validate(parts['version'], schemauri) if errs: return results.add(id, errs) + # load the version record try: - self.load_data(resrec, key, self.onupdate) - except Exception, ex: + if self.load_data(parts['version'], key, self._get_onupdate(parts['version'])): + results.add(id, None) + except Exception as ex: errs = [ex] - return results.add(key, errs) + return results.add(id, errs) + - def _mkloadlog(self): - return LoadLog("NERDm resources") + # now (conditionally) load the other parts. (These will not get loaded if the version is not + # new enough) + self.lateloadr.load(parts['record'], validate, results, key) + self.relloadr.load(parts['releaseSet'], validate, results, key) + return results + def load_from_file(self, filepath, validate=True, results=None): """ @@ -86,8 +235,13 @@ def load_from_file(self, filepath, validate=True, results=None): with open(filepath) as fd: try: data = json.load(fd) - except ValueError, ex: - raise JSONEncodingError(ex) + except ValueError as ex: + ex = JSONEncodingError(ex) + if not results: + raise ex + results.add(filepath, ex) + return results + return self.load(data, validate=validate, results=results, id=filepath) def load_from_dir(self, dirpath, validate=True, results=None): diff --git a/python/nistoar/rmm/mongo/taxon.py b/python/nistoar/rmm/mongo/taxon.py index 7242f39..d7633e3 100644 --- a/python/nistoar/rmm/mongo/taxon.py +++ b/python/nistoar/rmm/mongo/taxon.py @@ -7,7 +7,7 @@ UpdateWarning, LoadLog) from .loader import ValidationError, SchemaError, RefResolutionError -DEF_BASE_SCHEMA = "https://www.nist.gov/od/dm/simple-taxonomy/v1.0#" +DEF_BASE_SCHEMA = "https://data.nist.gov/od/dm/simple-taxonomy/v1.0#" DEF_SCHEMA = DEF_BASE_SCHEMA + "/definitions/Term" COLLECTION_NAME="taxonomy" @@ -72,7 +72,7 @@ def load_obj(self, termdata, validate=True, results=None, id=None): termdata['parent'] = "" try: key = { "term": termdata['term'], "parent": termdata['parent'] } - except KeyError, ex: + except KeyError as ex: if id is None: id = str({'term': '?'}) return results.add(id, @@ -95,7 +95,7 @@ def load_obj(self, termdata, validate=True, results=None, id=None): try: self.load_data(termdata, key, self.onupdate) - except Exception, ex: + except Exception as ex: errs = [ex] return results.add(id, errs) @@ -113,7 +113,7 @@ def load(self, termdata, validate=True, results=None, id=None): if not provided, the extracted key will be used as applicable. """ - if hasattr(termdata, 'iteritems'): + if hasattr(termdata, 'items'): # JSON object return self.load_obj(termdata, validate, results, id) elif hasattr(termdata, '__getitem__'): @@ -130,7 +130,7 @@ def load_from_file(self, filepath, validate=True, results=None): with open(filepath) as fd: try: data = json.load(fd) - except ValueError, ex: + except ValueError as ex: if not results: results = self._mkloadlog() return results.add(filepath, [ JSONEncodingError(ex) ]) diff --git a/python/nistoar/rmm/mongo/tests/test_nerdm.py b/python/nistoar/rmm/mongo/tests/test_nerdm.py deleted file mode 100644 index b6b0b0e..0000000 --- a/python/nistoar/rmm/mongo/tests/test_nerdm.py +++ /dev/null @@ -1,83 +0,0 @@ -import pdb, os, json, urlparse, warnings, logging -import unittest as test -from pymongo import MongoClient -from ejsonschema import ExtValidator, SchemaValidator - -from nistoar.rmm.mongo import nerdm -from nistoar.rmm.mongo import loader - -pydir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))) -basedir = os.path.dirname(pydir) -schemadir = os.path.join(basedir, "model") -exdir = os.path.join(schemadir, "examples") -janaffile = os.path.join(exdir, "janaf.json") - -dburl = None -if os.environ.get('MONGO_TESTDB_URL'): - dburl = os.environ.get('MONGO_TESTDB_URL') - -assert os.path.exists(schemadir), schemadir - -# logger = logging.getLogger("test") - -@test.skipIf(not os.environ.get('MONGO_TESTDB_URL'), - "test mongodb not available") -class TestNERDmLoader(test.TestCase): - - def setUp(self): - self.ldr = nerdm.NERDmLoader(dburl, schemadir) - - def tearDown(self): - client = MongoClient(dburl) - if not hasattr(client, 'get_database'): - client.get_database = client.get_default_database - db = client.get_database() - if "record" in db.collection_names(): - db.drop_collection("record") - - def test_ctor(self): - self.assertEquals(self.ldr.coll, "record") - - def test_validate(self): - with open(janaffile) as fd: - data = json.load(fd) - res = self.ldr.validate(data, schemauri=nerdm.DEF_SCHEMA) - self.assertEqual(res, []) - - del data['landingPage'] - res = self.ldr.validate(data, schemauri=nerdm.DEF_SCHEMA) - self.assertEqual(len(res), 2) - - def test_load_data(self): - with open(janaffile) as fd: - data = json.load(fd) - key = { '@id': "ark:/88434/sdp0fjspek351" } - self.assertEqual(self.ldr.load_data(data, key, 'fail'), 1) - c = self.ldr._client.get_database().record.find() - self.assertEqual(c.count(), 1) - self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') - - def test_load(self): - with open(janaffile) as fd: - data = json.load(fd) - res = self.ldr.load(data) - self.assertEqual(res.attempt_count, 1) - self.assertEqual(res.success_count, 1) - self.assertEqual(res.failure_count, 0) - c = self.ldr._client.get_database().record.find() - self.assertEqual(c.count(), 1) - self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') - - def test_load_from_file(self): - res = self.ldr.load_from_file(janaffile) - self.assertEqual(res.attempt_count, 1) - self.assertEqual(res.success_count, 1) - self.assertEqual(res.failure_count, 0) - c = self.ldr._client.get_database().record.find() - self.assertEqual(c.count(), 1) - self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') - - - -if __name__ == '__main__': - test.main() diff --git a/python/nistoar/rmm/mongo/tests/warnings.py b/python/nistoar/rmm/mongo/tests/warnings.py deleted file mode 100644 index bc5d030..0000000 --- a/python/nistoar/rmm/mongo/tests/warnings.py +++ /dev/null @@ -1,401 +0,0 @@ -"""Python part of the warnings subsystem.""" - -# Note: function level imports should *not* be used -# in this module as it may cause import lock deadlock. -# See bug 683658. -import linecache -import sys -import types - -__all__ = ["warn", "warn_explicit", "showwarning", - "formatwarning", "filterwarnings", "simplefilter", - "resetwarnings", "catch_warnings"] - - -def warnpy3k(message, category=None, stacklevel=1): - """Issue a deprecation warning for Python 3.x related changes. - - Warnings are omitted unless Python is started with the -3 option. - """ - if sys.py3kwarning: - if category is None: - category = DeprecationWarning - warn(message, category, stacklevel+1) - -def _show_warning(message, category, filename, lineno, file=None, line=None): - """Hook to write a warning to a file; replace if you like.""" - if file is None: - file = sys.stderr - try: - file.write(formatwarning(message, category, filename, lineno, line)) - except IOError: - pass # the file (probably stderr) is invalid - this warning gets lost. -# Keep a working version around in case the deprecation of the old API is -# triggered. -showwarning = _show_warning - -def formatwarning(message, category, filename, lineno, line=None): - """Function to format a warning the standard way.""" - s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message) - line = linecache.getline(filename, lineno) if line is None else line - if line: - line = line.strip() - s += " %s\n" % line - return s - -def filterwarnings(action, message="", category=Warning, module="", lineno=0, - append=0): - """Insert an entry into the list of warnings filters (at the front). - - 'action' -- one of "error", "ignore", "always", "default", "module", - or "once" - 'message' -- a regex that the warning message must match - 'category' -- a class that the warning must be a subclass of - 'module' -- a regex that the module name must match - 'lineno' -- an integer line number, 0 matches all warnings - 'append' -- if true, append to the list of filters - """ - import re - assert action in ("error", "ignore", "always", "default", "module", - "once"), "invalid action: %r" % (action,) - assert isinstance(message, basestring), "message must be a string" - assert isinstance(category, (type, types.ClassType)), \ - "category must be a class" - assert issubclass(category, Warning), "category must be a Warning subclass" - assert isinstance(module, basestring), "module must be a string" - assert isinstance(lineno, int) and lineno >= 0, \ - "lineno must be an int >= 0" - item = (action, re.compile(message, re.I), category, - re.compile(module), lineno) - if append: - filters.append(item) - else: - filters.insert(0, item) - -def simplefilter(action, category=Warning, lineno=0, append=0): - """Insert a simple entry into the list of warnings filters (at the front). - - A simple filter matches all modules and messages. - 'action' -- one of "error", "ignore", "always", "default", "module", - or "once" - 'category' -- a class that the warning must be a subclass of - 'lineno' -- an integer line number, 0 matches all warnings - 'append' -- if true, append to the list of filters - """ - assert action in ("error", "ignore", "always", "default", "module", - "once"), "invalid action: %r" % (action,) - assert isinstance(lineno, int) and lineno >= 0, \ - "lineno must be an int >= 0" - item = (action, None, category, None, lineno) - if append: - filters.append(item) - else: - filters.insert(0, item) - -def resetwarnings(): - """Clear the list of warning filters, so that no filters are active.""" - filters[:] = [] - -class _OptionError(Exception): - """Exception used by option processing helpers.""" - pass - -# Helper to process -W options passed via sys.warnoptions -def _processoptions(args): - for arg in args: - try: - _setoption(arg) - except _OptionError, msg: - print >>sys.stderr, "Invalid -W option ignored:", msg - -# Helper for _processoptions() -def _setoption(arg): - import re - parts = arg.split(':') - if len(parts) > 5: - raise _OptionError("too many fields (max 5): %r" % (arg,)) - while len(parts) < 5: - parts.append('') - action, message, category, module, lineno = [s.strip() - for s in parts] - action = _getaction(action) - message = re.escape(message) - category = _getcategory(category) - module = re.escape(module) - if module: - module = module + '$' - if lineno: - try: - lineno = int(lineno) - if lineno < 0: - raise ValueError - except (ValueError, OverflowError): - raise _OptionError("invalid lineno %r" % (lineno,)) - else: - lineno = 0 - filterwarnings(action, message, category, module, lineno) - -# Helper for _setoption() -def _getaction(action): - if not action: - return "default" - if action == "all": return "always" # Alias - for a in ('default', 'always', 'ignore', 'module', 'once', 'error'): - if a.startswith(action): - return a - raise _OptionError("invalid action: %r" % (action,)) - -# Helper for _setoption() -def _getcategory(category): - import re - if not category: - return Warning - if re.match("^[a-zA-Z0-9_]+$", category): - try: - cat = eval(category) - except NameError: - raise _OptionError("unknown warning category: %r" % (category,)) - else: - i = category.rfind(".") - module = category[:i] - klass = category[i+1:] - try: - m = __import__(module, None, None, [klass]) - except ImportError: - raise _OptionError("invalid module name: %r" % (module,)) - try: - cat = getattr(m, klass) - except AttributeError: - raise _OptionError("unknown warning category: %r" % (category,)) - if not issubclass(cat, Warning): - raise _OptionError("invalid warning category: %r" % (category,)) - return cat - - -def warn(message, category=None, stacklevel=1): - """Issue a warning, or maybe ignore it or raise an exception.""" - # Check if message is already a Warning object - if isinstance(message, Warning): - category = message.__class__ - # Check category argument - if category is None: - category = UserWarning - assert issubclass(category, Warning) - # Get context information - try: - caller = sys._getframe(stacklevel) - except ValueError: - globals = sys.__dict__ - lineno = 1 - else: - globals = caller.f_globals - lineno = caller.f_lineno - if '__name__' in globals: - module = globals['__name__'] - else: - module = "" - filename = globals.get('__file__') - if filename: - fnl = filename.lower() - if fnl.endswith((".pyc", ".pyo")): - filename = filename[:-1] - else: - if module == "__main__": - try: - filename = sys.argv[0] - except AttributeError: - # embedded interpreters don't have sys.argv, see bug #839151 - filename = '__main__' - if not filename: - filename = module - registry = globals.setdefault("__warningregistry__", {}) - - warn_explicit(message, category, filename, lineno, module, registry, - globals) - -def warn_explicit(message, category, filename, lineno, - module=None, registry=None, module_globals=None): - lineno = int(lineno) - if module is None: - module = filename or "" - if module[-3:].lower() == ".py": - module = module[:-3] # XXX What about leading pathname? - if registry is None: - registry = {} - if isinstance(message, Warning): - text = str(message) - category = message.__class__ - else: - text = message - message = category(message) - key = (text, category, lineno) - # Quick test for common case - if registry.get(key) and not registryreset: -# print "warnings no show: key="+str(key) - return - # Search the filters - for item in filters: - action, msg, cat, mod, ln = item - if ((msg is None or msg.match(text)) and - issubclass(category, cat) and - (mod is None or mod.match(module)) and - (ln == 0 or lineno == ln)): - break - else: - action = defaultaction - # Early exit actions - if action == "ignore": - registry[key] = 1 - print "warnings ignore: key="+str(key) - return - - # Prime the linecache for formatting, in case the - # "file" is actually in a zipfile or something. - linecache.getlines(filename, module_globals) - - if action == "error": - raise message - # Other actions - if action == "once": - registry[key] = 1 - oncekey = (text, category) - if onceregistry.get(oncekey): - return - onceregistry[oncekey] = 1 - elif action == "always": - pass - elif action == "module": - registry[key] = 1 - altkey = (text, category, 0) - if registry.get(altkey): - return - registry[altkey] = 1 - elif action == "default": - registry[key] = 1 - else: - # Unrecognized actions are errors - raise RuntimeError( - "Unrecognized action (%r) in warnings.filters:\n %s" % - (action, item)) - # Print message and context - showwarning(message, category, filename, lineno) - - -class WarningMessage(object): - - """Holds the result of a single showwarning() call.""" - - _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file", - "line") - - def __init__(self, message, category, filename, lineno, file=None, - line=None): - local_values = locals() - for attr in self._WARNING_DETAILS: - setattr(self, attr, local_values[attr]) - self._category_name = category.__name__ if category else None - - def __str__(self): - return ("{message : %r, category : %r, filename : %r, lineno : %s, " - "line : %r}" % (self.message, self._category_name, - self.filename, self.lineno, self.line)) - - -class catch_warnings(object): - - """A context manager that copies and restores the warnings filter upon - exiting the context. - - The 'record' argument specifies whether warnings should be captured by a - custom implementation of warnings.showwarning() and be appended to a list - returned by the context manager. Otherwise None is returned by the context - manager. The objects appended to the list are arguments whose attributes - mirror the arguments to showwarning(). - - The 'module' argument is to specify an alternative module to the module - named 'warnings' and imported under that name. This argument is only useful - when testing the warnings module itself. - - """ - - def __init__(self, record=False, module=None, reset=False): - """Specify whether to record warnings and if an alternative module - should be used other than sys.modules['warnings']. - - For compatibility with Python 3.0, please consider all arguments to be - keyword-only. - - """ - self._record = record - self._module = sys.modules['warnings'] if module is None else module - if reset: - self._module.onceregistry = {} - self._module.registryreset = True - self._entered = False - - def __repr__(self): - args = [] - if self._record: - args.append("record=True") - if self._module is not sys.modules['warnings']: - args.append("module=%r" % self._module) - name = type(self).__name__ - return "%s(%s)" % (name, ", ".join(args)) - - def __enter__(self): - if self._entered: - raise RuntimeError("Cannot enter %r twice" % self) - self._entered = True - self._filters = self._module.filters - self._module.filters = self._filters[:] - self._showwarning = self._module.showwarning - if self._record: - log = [] - def showwarning(*args, **kwargs): - log.append(WarningMessage(*args, **kwargs)) - self._module.showwarning = showwarning - return log - else: - return None - - def __exit__(self, *exc_info): - if not self._entered: - raise RuntimeError("Cannot exit %r without entering first" % self) - self._module.filters = self._filters - self._module.showwarning = self._showwarning - self._module.registryreset = False - - - -# filters contains a sequence of filter 5-tuples -# The components of the 5-tuple are: -# - an action: error, ignore, always, default, module, or once -# - a compiled regex that must match the warning message -# - a class representing the warning category -# - a compiled regex that must match the module that is being warned -# - a line number for the line being warning, or 0 to mean any line -# If either if the compiled regexs are None, match anything. -_warnings_defaults = False -filters = [] -defaultaction = "default" -onceregistry = {} -registryreset = False - -# Module initialization -_processoptions(sys.warnoptions) -if not _warnings_defaults: - silence = [ImportWarning, PendingDeprecationWarning] - # Don't silence DeprecationWarning if -3 or -Q was used. - if not sys.py3kwarning and not sys.flags.division_warning: - silence.append(DeprecationWarning) - for cls in silence: - simplefilter("ignore", category=cls) - bytes_warning = sys.flags.bytes_warning - if bytes_warning > 1: - bytes_action = "error" - elif bytes_warning: - bytes_action = "default" - else: - bytes_action = "ignore" - simplefilter(bytes_action, category=BytesWarning, append=1) -del _warnings_defaults diff --git a/python/nistoar/rmm/tests/test_config.py b/python/nistoar/rmm/tests/test_config.py deleted file mode 100644 index 70e314e..0000000 --- a/python/nistoar/rmm/tests/test_config.py +++ /dev/null @@ -1,123 +0,0 @@ -import os, sys, pdb, shutil, logging, json -import unittest as test -from nistoar.tests import * - -import nistoar.rmm.config as config - -datadir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data") -tmpd = None - -csurl = None -if os.environ.get('CONFIG_SERVER_URL'): - csurl = os.environ.get('CONFIG_SERVER_URL') - -def setUpModule(): - global tmpd - ensure_tmpdir() - tmpd = tmpdir() - -def tearDownModule(): - rmtmpdir() - -class TestConfig(test.TestCase): - - def test_load_from_file(self): - cfgfile = os.path.join(datadir, "config.json") - cfg = config.load_from_file(cfgfile) - - self.assertIsInstance(cfg, dict) - self.assertEqual(cfg['working_dir'], "/pdr/work") - - cfgfile = os.path.join(datadir, "config.yaml") - cfg = config.load_from_file(cfgfile) - - self.assertIsInstance(cfg, dict) - self.assertEqual(cfg['working_dir'], "/pdr/work") - - def test_resolve_configuration(self): - cfgfile = os.path.join(datadir, "config.json") - cfg = config.resolve_configuration(cfgfile) - self.assertEqual(cfg['working_dir'], "/pdr/work") - - cfg = config.resolve_configuration("config.json", "file://"+datadir) - self.assertEqual(cfg['working_dir'], "/pdr/work") - - cfgfile = "file://" + cfgfile - cfg = config.resolve_configuration(cfgfile) - self.assertEqual(cfg['working_dir'], "/pdr/work") - - def test_extract_from_cs(self): - cfgfile = os.path.join(datadir, "csconfig.json") - with open(cfgfile) as fd: - csdata = json.load(fd) - - cfg = config.ConfigService.extract(csdata, flat=True) - names = "oar.mongodb.port oar.mongodb.host oar.mongodb.database.name stuff.filter stuff.mode name".split() - for name in names: - self.assertIn(name, cfg) - self.assertEqual(len(cfg.keys()), len(names)) - self.assertEqual(cfg['oar.mongodb.database.name'], "TestDB") - self.assertEqual(cfg['oar.mongodb.port'], "3333") - self.assertEqual(cfg['name'], "Hank") - self.assertEqual(cfg['stuff.filter'], "off") - - @test.skipIf(not os.environ.get('CONFIG_SERVER_URL'), - "test config server not available") - def test_resolve_configuration_fromcs(self): - cfgfile = "http://goober.net/gurn.log" - with self.assertRaises(NotImplementedError): - cfg = config.resolve_configuration(cfgfile) - - -class TestLogConfig(test.TestCase): - - def resetLogfile(self): - if config._log_handler: - self.rootlog.removeHandler(config._log_handler) - if self.logfile and os.path.exists(self.logfile): - os.remove(self.logfile) - self.logfile = None - - def setUp(self): - if not hasattr(self, 'logfile'): - self.logfile = None - if not hasattr(self, 'rootlog'): - self.rootlog = logging.getLogger() - self.resetLogfile() - - def tearDown(self): - self.resetLogfile() - - def test_from_config(self): - logfile = "cfgd.log" - cfg = { - 'logdir': tmpd, - 'logfile': logfile - } - - self.logfile = os.path.join(tmpd, logfile) - self.assertFalse(os.path.exists(self.logfile)) - - config.configure_log(config=cfg) - self.rootlog.warn('Oops') - self.assertTrue(os.path.exists(self.logfile)) - with open(self.logfile) as fd: - words = fd.read() - self.assertIn("Oops", words) - - def test_abs(self): - self.logfile = os.path.join(tmpd, "cfgfile.log") - cfg = { - 'logfile': "goob.log" - } - - self.assertFalse(os.path.exists(self.logfile)) - config.configure_log(logfile=self.logfile, config=cfg) - self.rootlog.warn('Oops') - self.assertTrue(os.path.exists(self.logfile)) - - - - -if __name__ == '__main__': - test.main() diff --git a/python/nistoar/tests/__init__.py b/python/nistoar/testing/__init__.py similarity index 89% rename from python/nistoar/tests/__init__.py rename to python/nistoar/testing/__init__.py index 565ef19..d92c9b8 100644 --- a/python/nistoar/tests/__init__.py +++ b/python/nistoar/testing/__init__.py @@ -6,6 +6,10 @@ import os, shutil +__all__ = [ + 'ensure_tmpdir', 'tmpdir', 'rmtmpdir', 'Tempfiles', 'artifactdir' +] + tmpname = "_test" def ensure_tmpdir(basedir=None, dirname=None): @@ -69,6 +73,7 @@ class Tempfiles(object): It is instantiated with a base directory where temporary directories and files can be created. Full paths to a temporary file or directory can be gotten, then, by calling the instance as a function: + .. code-block:: python ts = Tempfiles(basedir) tmpfile = ts("testoutput.txt") @@ -79,6 +84,7 @@ class Tempfiles(object): tmpfile = ts.track("testoutput.txt") Temporary directories that should be cleaned up can be created with mkdir(): + .. code-block:: python tmpdir = ts.mkdir("mytempdir") @@ -129,7 +135,7 @@ def clean(self): """ remove all files and directories being tracked by this instance. """ - for i in xrange(len(self._files)): + for i in range(len(self._files)): filen = self._files.pop() path = os.path.join(self._root, filen) if os.path.exists(path): @@ -145,3 +151,20 @@ def clean(self): def __del__(self): if self._autoclean: self.clean() + +def artifactdir(mod=None): + out = os.environ.get('OAR_TEST_ARTIFACT_DIR') + if not out or not os.path.isdir(out): + return tmpdir() + + if not isinstance(mod, str) and hasattr(mod, '__name__'): + mod = mod.__name__ + if not isinstance(mod, str): + return out + out = os.path.join(out, mod) + if not os.path.exists(out): + os.mkdir(out) + return out + + + diff --git a/python/nistoar/testing/uwsgi.py b/python/nistoar/testing/uwsgi.py new file mode 100644 index 0000000..f958f47 --- /dev/null +++ b/python/nistoar/testing/uwsgi.py @@ -0,0 +1,59 @@ +""" +a simulated uwsgi module +""" +opt = {} + +import imp as _imp +import sys, os +from argparse import ArgumentParser + +def load_as_root_module(): + """ + This loads this module under the symbol uwsgi so that later attempts to import + uwsgi will get this simulated version. + """ + srcf = __file__ + if srcf.endswith('.pyc'): + srcf = srcf[:-1] + with open(srcf) as fd: + return _imp.load_module("uwsgi", fd, srcf, (".py", 'r', _imp.PY_SOURCE)) + +def load(): + """ + load a simulated uwsgi environment based on command line arguments + """ + out = load_as_root_module() + parser = create_parser(os.path.basename(sys.argv[0])) + opts = parser.parse_args(sys.argv[1:]) + load_env(opts, out.opt) + + return out + +def create_parser(progname): + usage = None + desc = "Run the %s uWSGI service script in a simulated mode" % progname + parser = ArgumentParser(progname, usage, desc) + + parser.add_argument("-c", "--config-file", type=str, dest="oar_config_file", metavar="FILE", + help="read service configuration file from FILE") + parser.add_argument("-w", "--working-dir", type=str, dest="oar_working_dir", metavar="DIR", + help="set DIR as the working directory of the service") + parser.add_argument("--set-ph", type=str, dest="ph", action="append", default=[], metavar="VAR=VAL", + help="set an arbitrary place-holder variable") + + return parser + +def load_env(opts, wsgiopt): + for arg in opts.ph: + parts = arg.split('=', 1) + if len(parts) < 2: + raise RuntimeError("Bad placeholder argument: "+arg) + wsgiopt[parts[0]] = parts[1].encode() + + if opts.oar_config_file: + wsgiopt['oar_config_file'] = opts.oar_config_file.encode() + if opts.oar_working_dir: + wsgiopt['oar_working_dir'] = opts.oar_working_dir.encode() + + + diff --git a/python/setup.py b/python/setup.py index faca7ef..d832cb8 100644 --- a/python/setup.py +++ b/python/setup.py @@ -1,6 +1,6 @@ import glob, os, shutil -from distutils.core import setup -from distutils.command.build import build as _build +from setuptools import setup, find_namespace_packages +from setuptools.command.build_py import build_py as _build def set_version(): try: @@ -56,18 +56,20 @@ def run(self): _build.run(self) setup(name='nistoar', - version='0.1', + version=get_version(), description="the NERDm metadata support for nistoar", author="Ray Plante", author_email="raymond.plante@nist.gov", url='https://github.com/usnistgov/oar-metadata', - packages=['nistoar', 'nistoar.nerdm', 'nistoar.nerdm.convert', - 'nistoar.id', 'nistoar.doi', 'nistoar.doi.resolving', - 'nistoar.rmm', 'nistoar.rmm.mongo', 'nistoar.rmm.ingest'], + packages=find_namespace_packages(exclude=['*.tests', '*.tests.data'], include=['nistoar.*']), scripts=[os.path.join("..","scripts",s) for s in ["pdl2resources.py", "ingest-nerdm-res.py", "ingest-field-info.py", "ingest-taxonomy.py", "ingest-uwsgi.py" ]], - cmdclass={'build': build} + cmdclass={'build_py': build}, + classifiers=[ + 'Programming Language :: Python :: 3 :: Only' + ], + zip_safe=False ) diff --git a/python/nistoar/__init__.py b/python/tests/__init__.py similarity index 100% rename from python/nistoar/__init__.py rename to python/tests/__init__.py diff --git a/python/nistoar/id/tests/__init__.py b/python/tests/nistoar/__init__.py similarity index 100% rename from python/nistoar/id/tests/__init__.py rename to python/tests/nistoar/__init__.py diff --git a/python/nistoar/nerdm/convert/tests/__init__.py b/python/tests/nistoar/base/__init__.py similarity index 100% rename from python/nistoar/nerdm/convert/tests/__init__.py rename to python/tests/nistoar/base/__init__.py diff --git a/python/nistoar/rmm/tests/data/config.json b/python/tests/nistoar/base/data/config.json similarity index 100% rename from python/nistoar/rmm/tests/data/config.json rename to python/tests/nistoar/base/data/config.json diff --git a/python/nistoar/rmm/tests/data/config.yaml b/python/tests/nistoar/base/data/config.yaml similarity index 100% rename from python/nistoar/rmm/tests/data/config.yaml rename to python/tests/nistoar/base/data/config.yaml diff --git a/python/tests/nistoar/base/test_config.py b/python/tests/nistoar/base/test_config.py new file mode 100644 index 0000000..4fd4d39 --- /dev/null +++ b/python/tests/nistoar/base/test_config.py @@ -0,0 +1,440 @@ +import os, sys, pdb, shutil, logging, json, re, importlib +import unittest as test +from nistoar.testing import * + +import nistoar.base.config as config + +datadir = os.path.join(os.path.dirname(__file__), "data") +tmpd = None + +def setUpModule(): + global tmpd + ensure_tmpdir() + tmpd = tmpdir() + +def tearDownModule(): + rmtmpdir() + +class TestConfig(test.TestCase): + + def test_load_from_service(self): + with self.assertRaises(NotImplementedError): + config.load_from_service("goob/dev") + + def test_lookup_config_server(self): + with self.assertRaises(NotImplementedError): + config.lookup_config_server(8888) + + def test_load_from_file(self): + cfgfile = os.path.join(datadir, "config.json") + cfg = config.load_from_file(cfgfile) + + self.assertIsInstance(cfg, dict) + self.assertEqual(cfg['working_dir'], "/pdr/work") + + cfgfile = os.path.join(datadir, "config.yaml") + cfg = config.load_from_file(cfgfile) + + self.assertIsInstance(cfg, dict) + self.assertEqual(cfg['working_dir'], "/pdr/work") + + def test_resolve_configuration(self): + cfgfile = os.path.join(datadir, "config.json") + cfg = config.resolve_configuration(cfgfile) + self.assertEqual(cfg['working_dir'], "/pdr/work") + + cfgfile = "file://" + cfgfile + cfg = config.resolve_configuration(cfgfile) + self.assertEqual(cfg['working_dir'], "/pdr/work") + + cfgfile = "http://goober.net/gurn.log" + with self.assertRaises(NotImplementedError): + cfg = config.resolve_configuration(cfgfile) + + def test_merge_config(self): + app = { + "foo": "bar", + "goob": { "gurn": "cranston", "hank": "aaron" }, + "zub": "dub", + "tell": { "a": 1 } + } + defc = { + "black": "blue", + "goob": { "gurn": "gomer", "patty": "duke" }, + "tell": 1, + "zub": { "dub": 2} + } + out = config.merge_config(app, defc) + self.assertEqual(out['foo'], 'bar') + self.assertEqual(out['goob'], { 'gurn': 'cranston', "hank": "aaron", + 'patty': "duke" }) + self.assertEqual(out['zub'], 'dub') + self.assertEqual(out['tell'], {"a": 1}) + +class TestLogConfig(test.TestCase): + + def resetLogfile(self): + if config._log_handler: + self.rootlog.removeHandler(config._log_handler) + if self.logfile and os.path.exists(self.logfile): + os.remove(self.logfile) + self.logfile = None + + def setUp(self): + if not hasattr(self, 'logfile'): + self.logfile = None + if not hasattr(self, 'rootlog'): + self.rootlog = logging.getLogger() + self.resetLogfile() + + def tearDown(self): + self.resetLogfile() + + def test_from_config(self): + logfile = "cfgd.log" + cfg = { + 'logdir': tmpd, + 'logfile': logfile, + 'loglevel': 'DEBUG' + } + + self.logfile = os.path.join(tmpd, logfile) + self.assertFalse(os.path.exists(self.logfile)) + + config.configure_log(config=cfg) + self.assertEqual(config.global_logdir, tmpd) + self.assertEqual(config.global_logfile, self.logfile) + + self.rootlog.warning('Oops') + self.assertTrue(os.path.exists(self.logfile)) + with open(self.logfile) as fd: + words = fd.read() + self.assertIn("Oops", words) + + def test_abs(self): + self.logfile = os.path.join(tmpd, "cfgfile.log") + cfg = { + 'logfile': "goob.log" + } + + self.assertFalse(os.path.exists(self.logfile)) + config.configure_log(logfile=self.logfile, config=cfg) + self.rootlog.warning('Oops') + self.assertTrue(os.path.exists(self.logfile)) + +class TestConfigService(test.TestCase): + + def test_ctor(self): + srvc = config.ConfigService("https://config.org/oar/", "dev") + self.assertEqual(srvc._base, "https://config.org/oar/") + self.assertEqual(srvc._prof, "dev") + + srvc = config.ConfigService("https://config.org/oar") + self.assertEqual(srvc._base, "https://config.org/oar/") + self.assertIsNone(srvc._prof) + + srvc = config.ConfigService("https://config.org") + self.assertEqual(srvc._base, "https://config.org/") + self.assertIsNone(srvc._prof) + + def test_bad_url(self): + with self.assertRaises(config.ConfigurationException): + srvc = config.ConfigService("config.org") + + with self.assertRaises(config.ConfigurationException): + srvc = config.ConfigService("https://") + + def test_url_for(self): + srvc = config.ConfigService("https://config.org/oar/", "dev") + self.assertEqual(srvc.url_for("goob"), "https://config.org/oar/goob/dev") + self.assertEqual(srvc.url_for("goob", "dumb"), + "https://config.org/oar/goob/dumb") + + def test_from_env(self): + try: + if 'OAR_CONFIG_SERVICE' in os.environ: + del os.environ['OAR_CONFIG_SERVICE'] + self.assertIsNone(config.ConfigService.from_env()) + + os.environ['OAR_CONFIG_SERVICE'] = "https://config.org/oar/" + srvc = config.ConfigService.from_env() + self.assertEqual(srvc._base, "https://config.org/oar/") + self.assertIsNone(srvc._prof) + + os.environ['OAR_CONFIG_ENV'] = "test" + srvc = config.ConfigService.from_env() + self.assertEqual(srvc._base, "https://config.org/oar/") + self.assertEqual(srvc._prof, "test") + finally: + if 'OAR_CONFIG_SERVICE' in os.environ: + del os.environ['OAR_CONFIG_SERVICE'] + if 'OAR_CONFIG_ENV' in os.environ: + del os.environ['OAR_CONFIG_ENV'] + + def test_cvtarrays(self): + d = { + "a": { + "[1]": "ia", + "[5]": { + "ib": { + "[0]": "ibb", + "[3]": "ibe" + }, + "[0]": "0a", + }, + "[0]": "ic", + "[3]": "id" + } + } + out = { + "a": [ "ic", "ia", "id", { + "ib": [ "ibb", "ibe" ], + "[0]": "0a" + }] + } + self.assertEqual(config.ConfigService._cvtarrays(d), out) + + def test_inflate(self): + d = { + "working_dir": "/data/pdr", + "store_dir": "/data/store", + 'notifier.alerts[1].type': "preserve.success", + 'notifier.alerts[1].targets[0]': "dev", + 'notifier.alerts[0].type': "preserve.failure", + 'notifier.alerts[0].targets[0]': "oarop", + 'sip_type.midas.common.review_dir': "/data/review", + 'sip_type.midas.common.upload_dir': "/data/upload", + } + out = { + "working_dir": "/data/pdr", + "store_dir": "/data/store", + "notifier": { + "alerts": [{ + "type": "preserve.failure", + "targets": [ "oarop" ] + }, { + "type": "preserve.success", + "targets": [ "dev" ] + }] + }, + "sip_type": { + "midas": { + "common": { + "review_dir": "/data/review", + "upload_dir": "/data/upload" + } + } + } + } + self.assertEqual(config.ConfigService._inflate(d), out) + + def test_deep_update(self): + d = { + "a": { + "a.b": 1, + "a.c": 2, + "a.d": { + "ad.a": 4, + "ad.b": 5 + } + } + } + u = { + "a": { + "a.c": 20, + "a.d": { + "ad.b": 50, + "ad.c": 60 + } + } + } + out = { + "a": { + "a.b": 1, + "a.c": 20, + "a.d": { + "ad.a": 4, + "ad.b": 50, + "ad.c": 60 + } + } + } + n = config.ConfigService._deep_update(d, u) + self.assertEqual(n, out) + self.assertIs(n, d) + + def test_extract1(self): + data = \ +{ + "propertySources": [ + { + "source": { + "RMMAPI": "https://goob/rmm/", + "LANDING": "https://localhost/rmm/", + "SDPAPI": "https://localhost/sdp/", + }, + "name": "classpath:config/oar-uri/oar-uri.yml" + }, + { + "source": { + "RMMAPI": "https://localhost/rmm/", + "SDPAPI": "https://localhost/sdp/", + }, + "hail": "fire" + } + ], + "version": None, + "name": "oaruri", + "profiles": [ + "local" + ], + "label": None +} + out = { + "RMMAPI": "https://goob/rmm/", + "SDPAPI": "https://localhost/sdp/", + "LANDING": "https://localhost/rmm/", + } + + self.assertEqual(config.ConfigService.extract(data), out) + + def test_extract2(self): + data = \ +{ + "propertySources": [ + { + "source": { + "store_dir": "/var/data/store", + 'sip_type.midas.common.review_dir': "/var/data/review", + 'notifier.alerts[1].type': "preserve.win", + 'notifier.alerts[1].targets[3]': "oarop", + }, + "name": "classpath:config/oar-uri/oar-uri.yml" + }, + { + "source": { + "working_dir": "/data/pdr", + "store_dir": "/data/store", + 'notifier.alerts[1].type': "preserve.success", + 'notifier.alerts[1].targets[0]': "dev", + 'notifier.alerts[0].type': "preserve.failure", + 'notifier.alerts[0].targets[0]': "oarop", + 'sip_type.midas.common.review_dir': "/data/review", + 'sip_type.midas.common.upload_dir': "/data/upload", + }, + "name": "classpath:config/oar-uri/oar-uri-dev.yml" + } + ], + "version": None, + "name": "oaruri", + "profiles": [ + "local" + ], + "label": None +} + + out = { + "working_dir": "/data/pdr", + "store_dir": "/var/data/store", + "notifier": { + "alerts": [{ + "type": "preserve.failure", + "targets": [ "oarop" ] + }, { + "type": "preserve.win", + "targets": [ "dev", "oarop" ] + }] + }, + "sip_type": { + "midas": { + "common": { + "review_dir": "/var/data/review", + "upload_dir": "/data/upload" + } + } + } + } + self.assertEqual(config.ConfigService.extract(data), out) + + def test_extract3(self): + data = \ +{ + "propertySources": [ + { + "source": { + "store_dir": "/var/data/store", + 'sip_type.midas.common.review_dir': "/var/data/review", + 'notifier.alerts[1].type': "preserve.win", + 'notifier.alerts[1].targets[3]': "oarop", + }, + "name": "classpath:config/oar-uri/oar-uri.yml" + }, + { + "source": { + "working_dir": "/data/pdr", + "store_dir": "/data/store", + 'notifier.alerts[1].type': "preserve.success", + 'notifier.alerts[1].targets[0]': "dev", + 'notifier.alerts[0].type': "preserve.failure", + 'notifier.alerts[0].targets[0]': "oarop", + 'sip_type.midas.common.review_dir': "/data/review", + 'sip_type.midas.common.upload_dir': "/data/upload", + }, + "name": "classpath:config/oar-uri/oar-uri-dev.yml" + } + ], + "version": None, + "name": "oaruri", + "profiles": [ + "local" + ], + "label": None +} + + out = { "working_dir": "/data/pdr", + "store_dir": "/var/data/store", + 'notifier.alerts[1].type': "preserve.win", + 'notifier.alerts[1].targets[0]': "dev", + 'notifier.alerts[1].targets[3]': "oarop", + 'notifier.alerts[0].type': "preserve.failure", + 'notifier.alerts[0].targets[0]': "oarop", + 'sip_type.midas.common.review_dir': "/var/data/review", + 'sip_type.midas.common.upload_dir': "/data/upload" } + + self.assertEqual(config.ConfigService.extract(data, flat=True), out) + + @test.skipIf("noreload" in os.environ.get("OAR_TEST_INCLUDE", ""), + "Avoid reloading modules when part of larger TestSuite") + def test_defservice_reload(self): + config.service = None + self.assertNotIn('OAR_CONFIG_SERVICE', os.environ) + self.assertIsNone(config.service) + try: + os.environ['OAR_CONFIG_SERVICE'] = "https://config.org/oar/" + importlib.reload(config) + self.assertIsNotNone(config.service) + self.assertEqual(config.service._base, "https://config.org/oar/") + self.assertIsNone(config.service._prof) + finally: + if 'OAR_CONFIG_SERVICE' in os.environ: + del os.environ['OAR_CONFIG_SERVICE'] + config.service = None + + def test_defservice(self): + self.assertNotIn('OAR_CONFIG_SERVICE', os.environ) + self.assertIsNone(config.service) + try: + os.environ['OAR_CONFIG_SERVICE'] = "https://config.org/oar/" + config.service = config.ConfigService.from_env() + self.assertIsNotNone(config.service) + self.assertEqual(config.service._base, "https://config.org/oar/") + self.assertIsNone(config.service._prof) + finally: + if 'OAR_CONFIG_SERVICE' in os.environ: + del os.environ['OAR_CONFIG_SERVICE'] + config.service = None + + + +if __name__ == '__main__': + test.main() diff --git a/python/nistoar/nerdm/tests/__init__.py b/python/tests/nistoar/doi/__init__.py similarity index 100% rename from python/nistoar/nerdm/tests/__init__.py rename to python/tests/nistoar/doi/__init__.py diff --git a/python/nistoar/rmm/mongo/tests/__init__.py b/python/tests/nistoar/doi/resolving/__init__.py similarity index 100% rename from python/nistoar/rmm/mongo/tests/__init__.py rename to python/tests/nistoar/doi/resolving/__init__.py diff --git a/python/nistoar/doi/resolving/tests/test_common.py b/python/tests/nistoar/doi/resolving/test_common.py similarity index 96% rename from python/nistoar/doi/resolving/tests/test_common.py rename to python/tests/nistoar/doi/resolving/test_common.py index b66c829..6217b10 100644 --- a/python/nistoar/doi/resolving/tests/test_common.py +++ b/python/tests/nistoar/doi/resolving/test_common.py @@ -1,6 +1,6 @@ import os, sys, pdb, shutil, logging, json import unittest as test -from collections import Mapping +from collections.abc import Mapping # from nistoar.tests import * import nistoar.doi.resolving.common as res @@ -15,6 +15,9 @@ def setUpModule(): class TestFuncs(test.TestCase): + def setUp(self): + res._client_info = None + def tearDown(self): res._client_info = None @@ -52,6 +55,12 @@ def test_get_default_user_agent(self): class TestDOIInfo(test.TestCase): + def setUp(self): + res._client_info = None + + def tearDown(self): + res._client_info = None + def test_ctor(self): self.assertEqual(res.default_doi_resolver, "https://doi.org/") diff --git a/python/nistoar/doi/resolving/tests/test_crossref.py b/python/tests/nistoar/doi/resolving/test_crossref.py similarity index 100% rename from python/nistoar/doi/resolving/tests/test_crossref.py rename to python/tests/nistoar/doi/resolving/test_crossref.py diff --git a/python/nistoar/doi/resolving/tests/test_datacite.py b/python/tests/nistoar/doi/resolving/test_datacite.py similarity index 100% rename from python/nistoar/doi/resolving/tests/test_datacite.py rename to python/tests/nistoar/doi/resolving/test_datacite.py diff --git a/python/nistoar/doi/resolving/tests/test_resolving.py b/python/tests/nistoar/doi/resolving/test_resolving.py similarity index 83% rename from python/nistoar/doi/resolving/tests/test_resolving.py rename to python/tests/nistoar/doi/resolving/test_resolving.py index dfc2994..846e082 100644 --- a/python/nistoar/doi/resolving/tests/test_resolving.py +++ b/python/tests/nistoar/doi/resolving/test_resolving.py @@ -1,6 +1,6 @@ import os, sys, pdb, shutil, logging, json import unittest as test -from collections import Mapping +from collections.abc import Mapping # from nistoar.tests import * import nistoar.doi.resolving as res @@ -14,6 +14,7 @@ cli = ("NIST Open Access for Research", "testing", "http://github.com/usnistgov/oar-metadata/", "datasupport@nist.gov") +set_client_info(*cli) def setUpModule(): set_client_info(*cli) def tearDownModule(): @@ -28,9 +29,9 @@ class TestResolving(test.TestCase): def test_resolve_dc(self): info = res.resolve(dcdoi, logger=logger) self.assertIn(info.source, ["Datacite", "Crosscite"]) - self.assertTrue(isinstance(info, res.DataciteDOIInfo)) + self.assertTrue(isinstance(info, res.CrossciteDOIInfo)) self.assertIsNotNone(info._data) - self.assertEqual(info.data['DOI'], dcdoi) + self.assertTrue(info.data['DOI'] == dcdoi or info.data['DOI'] == dcdoi.upper()) @test.skipIf("doi" not in os.environ.get("OAR_TEST_INCLUDE",""), "kindly skipping doi service checks") @@ -64,21 +65,24 @@ def test_resolve_badresolver(self): class TestResolver(test.TestCase): + def setUp(self): + set_client_info(*cli) + def test_ctor(self): rslvr = res.Resolver(resolver="ftp:/goober.com/") - self.assertEquals(rslvr._resolver, "ftp:/goober.com/") - self.assertEquals(rslvr._client_info[1], "testing") + self.assertEqual(rslvr._resolver, "ftp:/goober.com/") + self.assertEqual(rslvr._client_info[1], "testing") ci = (comm._client_info[0], "testing2", comm._client_info[2], comm._client_info[3]) rslvr = res.Resolver(ci) - self.assertEquals(rslvr._resolver, "https://doi.org/") - self.assertEquals(rslvr._client_info[1], "testing2") + self.assertEqual(rslvr._resolver, "https://doi.org/") + self.assertEqual(rslvr._client_info[1], "testing2") rslvr = res.Resolver() - self.assertEquals(rslvr._resolver, "https://doi.org/") - self.assertEquals(rslvr._client_info[1], "testing") + self.assertEqual(rslvr._resolver, "https://doi.org/") + self.assertEqual(rslvr._client_info[1], "testing") @test.skipIf("doi" not in os.environ.get("OAR_TEST_INCLUDE",""), diff --git a/python/nistoar/doi/tests/sim_datacite_srv.py b/python/tests/nistoar/doi/sim_datacite_srv.py similarity index 91% rename from python/nistoar/doi/tests/sim_datacite_srv.py rename to python/tests/nistoar/doi/sim_datacite_srv.py index df3190f..b03a2d7 100644 --- a/python/nistoar/doi/tests/sim_datacite_srv.py +++ b/python/tests/nistoar/doi/sim_datacite_srv.py @@ -1,9 +1,9 @@ -from __future__ import absolute_import, print_function -import json, os, cgi, sys, re, hashlib, json, logging, random +import json, os, sys, re, hashlib, json, logging, random from datetime import datetime from wsgiref.headers import Headers from collections import OrderedDict, Mapping from copy import deepcopy +from urllib.parse import parse_qs JSONAPI_MT = "application/vnd.api+json" @@ -108,7 +108,7 @@ def __init__(self, repo, basepath, prefixes, wsgienv, start_resp): self._code = 0 self._msg = "unknown state" - def send_error(self, code, message, errtitle=None, errdesc={}): + def send_error(self, code, message, errtitle=None, errdesc={}, tellexc=False): edata = None if errdesc and not errtitle: errtitle=message @@ -122,10 +122,15 @@ def send_error(self, code, message, errtitle=None, errdesc={}): self.add_header("Content-type", JSONAPI_MT) self.add_header("Content-length", len(edata)) status = "{0} {1}".format(str(code), message) - self._start(status, self._hdr.items(), sys.exc_info()) + excinfo = None + if tellexc: + excinfo = sys.exc_info() + if excinfo == (None, None, None): + excinfo = None + self._start(status, self._hdr.items(), excinfo) if edata: - return [ edata ] + return [ edata.encode() ] return [] def add_header(self, name, value): @@ -143,7 +148,7 @@ def handle(self): meth_handler = 'do_'+self._meth path = self._env.get('PATH_INFO', '/') - params = cgi.parse_qs(self._env.get('QUERY_STRING', '')) + params = parse_qs(self._env.get('QUERY_STRING', '')) if path.startswith(self.basepath): path = path[len(self.basepath):] @@ -186,10 +191,10 @@ def do_GET(self, path, params=None): self.add_header("Content-type", JSONAPI_MT) self.add_header("Content-length", len(out)) self.end_headers() - return [out] + return [out.encode()] except (ValueError, TypeError) as ex: return self.send_error(500, "JSON encoding error", - errdesc={"detail": str(ex)}) + errdesc={"detail": str(ex)}, tellexc=True) def do_HEAD(self, path, params=None): if path: @@ -218,8 +223,8 @@ def do_POST(self, path, params=None): {"detail": self._env['CONTENT_TYPE']}) try: - bodyin = self._env['wsgi.input'] - doc = json.load(bodyin, object_pairs_hook=OrderedDict) + bodyin = self._env['wsgi.input'].read().decode('utf-8') + doc = json.loads(bodyin, object_pairs_hook=OrderedDict) except (ValueError, TypeError) as ex: return self.send_error(400, "Not JSON", "Failed to parse input as JSON", {"detail": str(ex)}) @@ -296,9 +301,9 @@ def do_POST(self, path, params=None): self.add_header("Content-type", JSONAPI_MT) self.add_header("Content-length", str(len(out))) self.end_headers() - return [out] + return [out.encode()] except (ValueError, TypeError) as ex: - return self.send_error(500,"JSON encoding error", errdesc={"detail":str(ex)}) + return self.send_error(500,"JSON encoding error", errdesc={"detail":str(ex)}, tellexc=True) def do_PUT(self, path, params=None): if path: @@ -326,8 +331,8 @@ def do_PUT(self, path, params=None): return self.send_error(404, "ID Not Found", errdesc={"detail": path}) try: - bodyin = self._env['wsgi.input'] - doc = json.load(bodyin, object_pairs_hook=OrderedDict) + bodyin = self._env['wsgi.input'].read().decode('utf-8') + doc = json.loads(bodyin, object_pairs_hook=OrderedDict) except (ValueError, TypeError) as ex: return self.send_error(400, "Not JSON", "Failed to parse input as JSON", {"detail": str(ex)}) @@ -371,9 +376,9 @@ def do_PUT(self, path, params=None): self.add_header("Content-type", JSONAPI_MT) self.add_header("Content-length", len(out)) self.end_headers() - return [out] + return [out.encode()] except (ValueError, TypeError) as ex: - return self.send_error(500, "JSON encoding error") + return self.send_error(500, "JSON encoding error", tellexc=True) def do_DELETE(self, path, params=None): if path: @@ -391,6 +396,13 @@ def do_DELETE(self, path, params=None): return self.send_error(204, "Deleted") +def get_uwsgi_opt(key, default=None): + out = uwsgi.opt.get(key) + if out is None: + return default + elif isinstance(out, bytes): + return out.decode('utf-8') + return out -prefixes = re.split(r'\s*,\s*', uwsgi.opt.get("prefixes", "")) +prefixes = re.split(r'\s*,\s*', get_uwsgi_opt("prefixes", "")) application = SimIDService("/dois", prefixes) diff --git a/python/nistoar/doi/tests/test_datacite.py b/python/tests/nistoar/doi/test_datacite.py similarity index 98% rename from python/nistoar/doi/tests/test_datacite.py rename to python/tests/nistoar/doi/test_datacite.py index ee5ef69..ed72771 100644 --- a/python/nistoar/doi/tests/test_datacite.py +++ b/python/tests/nistoar/doi/test_datacite.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import os, pdb, sys, json, requests, logging, time, re, hashlib, shutil import unittest as test @@ -28,11 +27,12 @@ def startService(): srvport = port pidfile = os.path.join(tdir,"simsrv"+str(srvport)+".pid") - wpy = "python/nistoar/doi/tests/sim_datacite_srv.py" - cmd = "uwsgi --daemonize {0} --plugin python --http-socket :{1} " \ + wpy = "python/tests/nistoar/doi/sim_datacite_srv.py" + cmd = "uwsgi --daemonize {0} --plugin python3 --http-socket :{1} " \ "--wsgi-file {2} --pidfile {3} --set-ph prefixes={4}" cmd = cmd.format(os.path.join(tdir,"simsrv.log"), srvport, os.path.join(basedir, wpy), pidfile, ",".join(prefixes)) +# print(cmd) os.system(cmd) time.sleep(0.2) @@ -63,6 +63,7 @@ def tearDownModule(): if loghdlr: if rootlog: rootlog.removeHandler(loghdlr) + loghdlr.close() loghdlr = None stopService() shutil.rmtree(tmpdir()) diff --git a/python/nistoar/doi/tests/test_datacite_testsvc.py b/python/tests/nistoar/doi/test_datacite_testsvc.py similarity index 100% rename from python/nistoar/doi/tests/test_datacite_testsvc.py rename to python/tests/nistoar/doi/test_datacite_testsvc.py diff --git a/python/nistoar/doi/tests/test_sim_datacite_srv.py b/python/tests/nistoar/doi/test_sim_datacite_srv.py similarity index 87% rename from python/nistoar/doi/tests/test_sim_datacite_srv.py rename to python/tests/nistoar/doi/test_sim_datacite_srv.py index c73c9bd..10c2a77 100644 --- a/python/nistoar/doi/tests/test_sim_datacite_srv.py +++ b/python/tests/nistoar/doi/test_sim_datacite_srv.py @@ -1,17 +1,17 @@ -from __future__ import absolute_import -import os, pdb, requests, logging, time, json +import os, pdb, requests, logging, time, json, sys import unittest as test from copy import deepcopy -from StringIO import StringIO +from io import BytesIO testdir = os.path.dirname(os.path.abspath(__file__)) datadir = os.path.join(testdir, 'data') -import imp +import importlib.util simsrvrsrc = os.path.join(testdir, "sim_datacite_srv.py") -with open(simsrvrsrc, 'r') as fd: - svc = imp.load_module("sim_datacite_svc", fd, simsrvrsrc, - (".py", 'r', imp.PY_SOURCE)) +spec = importlib.util.spec_from_file_location("sim_datacite_svc", simsrvrsrc) +svc = importlib.util.module_from_spec(spec) +sys.modules["sim_datacite_svc"] = svc +spec.loader.exec_module(svc) basedir = os.path.dirname(os.path.dirname(os.path.dirname( os.path.dirname(os.path.dirname(testdir))))) @@ -153,7 +153,7 @@ def test_badGET(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("404", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) self.resp = [] @@ -162,7 +162,7 @@ def test_badGET(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("406", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) def test_badHEAD(self): @@ -210,7 +210,7 @@ def test_badPOST(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("405", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) req.update({ @@ -221,7 +221,7 @@ def test_badPOST(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("415", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) req.update({ @@ -232,7 +232,7 @@ def test_badPOST(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("406", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) def test_badPUT(self): @@ -264,7 +264,7 @@ def test_badPUT(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("405", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) req.update({ @@ -274,7 +274,7 @@ def test_badPUT(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("401", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) req.update({ @@ -285,7 +285,7 @@ def test_badPUT(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("415", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) req.update({ @@ -296,7 +296,7 @@ def test_badPUT(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("406", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) req.update({ @@ -307,7 +307,7 @@ def test_badPUT(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("404", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) def test_badPOST_input(self): @@ -317,13 +317,13 @@ def test_badPOST_input(self): 'HTTP_ACCEPT': svc.JSONAPI_MT, 'CONTENT_TYPE': svc.JSONAPI_MT, 'PATH_INFO': "/dois", - 'wsgi.input': StringIO(inp) + 'wsgi.input': BytesIO(inp.encode()) } body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("400", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) inp = 'Tinker\nTailor\n' @@ -331,7 +331,7 @@ def test_badPOST_input(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("400", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) inp = '{"goob":"gurn"}' @@ -339,7 +339,7 @@ def test_badPOST_input(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("400", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) inp = '{"data":{}}' @@ -347,7 +347,7 @@ def test_badPOST_input(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("400", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) inp = '{"data":{"attributes":{}}}' @@ -355,7 +355,7 @@ def test_badPOST_input(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("400", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) inp = '{"data":{"type":"dois","attributes":{}}}' @@ -363,7 +363,7 @@ def test_badPOST_input(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("400", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) def test_create_update(self): @@ -381,14 +381,14 @@ def test_create_update(self): 'HTTP_ACCEPT': svc.JSONAPI_MT, 'CONTENT_TYPE': svc.JSONAPI_MT, 'PATH_INFO': "/dois", - 'wsgi.input': StringIO(inp) + 'wsgi.input': BytesIO(inp.encode()) } body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("201", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "draft") @@ -404,7 +404,7 @@ def test_create_update(self): self.assertGreater(len(self.resp), 0) self.assertIn("200", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertEqual(doc['data']['attributes'], saved) req = { @@ -443,7 +443,7 @@ def test_create_update(self): 'HTTP_ACCEPT': svc.JSONAPI_MT, 'CONTENT_TYPE': svc.JSONAPI_MT, 'PATH_INFO': "/dois/"+id, - 'wsgi.input': StringIO(inp) + 'wsgi.input': BytesIO(inp.encode()) } self.resp = [] @@ -451,7 +451,7 @@ def test_create_update(self): self.assertGreater(len(self.resp), 0) self.assertIn("201", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "draft") @@ -468,14 +468,14 @@ def test_create_update(self): } } }) - req['wsgi.input'] = StringIO(inp) + req['wsgi.input'] = BytesIO(inp.encode()) self.resp = [] body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("422", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "draft") @@ -497,13 +497,13 @@ def test_create_update(self): } } }) - req['wsgi.input'] = StringIO(inp) + req['wsgi.input'] = BytesIO(inp.encode()) self.resp = [] body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("201", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "findable") @@ -520,13 +520,13 @@ def test_create_update(self): } } }) - req['wsgi.input'] = StringIO(inp) + req['wsgi.input'] = BytesIO(inp.encode()) body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("201", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "findable") @@ -546,7 +546,7 @@ def test_DELETE(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("404", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) # create @@ -563,7 +563,7 @@ def test_DELETE(self): 'HTTP_ACCEPT': svc.JSONAPI_MT, 'CONTENT_TYPE': svc.JSONAPI_MT, 'PATH_INFO': "/dois", - 'wsgi.input': StringIO(json.dumps(inp)) + 'wsgi.input': BytesIO(json.dumps(inp).encode()) } self.resp = [] @@ -571,7 +571,7 @@ def test_DELETE(self): self.assertGreater(len(self.resp), 0) self.assertIn("201", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "draft") @@ -602,7 +602,7 @@ def test_DELETE(self): 'HTTP_ACCEPT': svc.JSONAPI_MT, 'CONTENT_TYPE': svc.JSONAPI_MT, 'PATH_INFO': "/dois", - 'wsgi.input': StringIO(json.dumps(inp)) + 'wsgi.input': BytesIO(json.dumps(inp).encode()) } self.resp = [] @@ -610,7 +610,7 @@ def test_DELETE(self): self.assertGreater(len(self.resp), 0) self.assertIn("422", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "draft") @@ -624,7 +624,7 @@ def test_DELETE(self): "creators": [{"fn": "me"}], "types": { "resourceType": "Dataset", "schemaOrg": "Dataset"} }) - req['wsgi.input'] = StringIO(json.dumps(inp)) + req['wsgi.input'] = BytesIO(json.dumps(inp).encode()) self.resp = [] @@ -632,7 +632,7 @@ def test_DELETE(self): self.assertGreater(len(self.resp), 0) self.assertIn("200", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) saved = doc['data']['attributes'] self.assertEqual(saved['doi'], id) self.assertEqual(saved['state'], "findable") @@ -647,7 +647,7 @@ def test_DELETE(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("403", self.resp[0]) - doc = json.loads("\n".join(body)) + doc = json.loads("\n".join([b.decode('utf-8') for b in body])) self.assertIn('errors', doc) diff --git a/python/nistoar/doi/tests/test_utils.py b/python/tests/nistoar/doi/test_utils.py similarity index 94% rename from python/nistoar/doi/tests/test_utils.py rename to python/tests/nistoar/doi/test_utils.py index 871dbbc..d3b1274 100644 --- a/python/nistoar/doi/tests/test_utils.py +++ b/python/tests/nistoar/doi/test_utils.py @@ -1,7 +1,7 @@ import os, sys, pdb, shutil, logging, json import unittest as test -from collections import Mapping -from nistoar.tests import * +from collections.abc import Mapping +from nistoar.testing import * import nistoar.doi.utils as utils diff --git a/python/tests/nistoar/id/__init__.py b/python/tests/nistoar/id/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/nistoar/id/tests/test_jq.py b/python/tests/nistoar/id/test_jq.py similarity index 68% rename from python/nistoar/id/tests/test_jq.py rename to python/tests/nistoar/id/test_jq.py index 6f60d6d..818fc3e 100644 --- a/python/nistoar/id/tests/test_jq.py +++ b/python/tests/nistoar/id/test_jq.py @@ -1,4 +1,5 @@ import unittest, pdb, os, json +from collections import OrderedDict import nistoar.jq as jq @@ -20,10 +21,11 @@ def test_version(self): def test_library(self): self.assertIsNone(self.jqc.library) self.jqc.library = jqlibdir - self.assertEquals(self.jqc.library, jqlibdir) + self.assertEqual(self.jqc.library, jqlibdir) def test_form_argopts(self): - opts = self.jqc.form_argopts({"id": "ark:ID", "goober": [ 1, 2 ]}) + opts = self.jqc.form_argopts( + OrderedDict([("id", "ark:ID"), ("goober", [ 1, 2 ])])) self.assertEqual(opts, ['--argjson', 'id', '"ark:ID"', '--argjson', 'goober', '[1, 2]']) @@ -32,16 +34,16 @@ def test_bad_form_argopts(self): opts = self.jqc.form_argopts(["id", "ark:ID", "goober", [ 1, 2 ]]) def test_form_cmd(self): - self.assertEquals(self.jqc.form_cmd(".goober | [.]"), + self.assertEqual(self.jqc.form_cmd(".goober | [.]"), ['jq', '.goober | [.]']) self.jqc.library = jqlibdir - self.assertEquals(self.jqc.form_cmd(".goober | [.]", - {"id": "ark:ID", "goober": [ 1, 2 ]}), + self.assertEqual(self.jqc.form_cmd(".goober | [.]", + OrderedDict([("id", "ark:ID"), ("goober", [ 1, 2 ])])), ['jq', '-L'+jqlibdir, '--argjson', 'id', '"ark:ID"', '--argjson', 'goober', '[1, 2]', '.goober | [.]']) - self.assertEquals(self.jqc.form_cmd(".goober | [.]", - {"id": "ark:ID", "goober": [ 1, 2 ]}, + self.assertEqual(self.jqc.form_cmd(".goober | [.]", + OrderedDict([("id", "ark:ID"), ("goober", [ 1, 2 ])]), 'data.json'), ['jq', '-L'+jqlibdir, '--argjson', 'id', '"ark:ID"', '--argjson', 'goober', @@ -50,40 +52,40 @@ def test_form_cmd(self): def test_format_cmd(self): cmd = ['jq', '-L', 'jqlib', 'import "pod2nerdm" as nerdm; .accessLevel', 'janaf_pod.json'] - self.assertEquals(self.jqc._format_cmd(cmd), + self.assertEqual(self.jqc._format_cmd(cmd), "jq -L jqlib 'import \"pod2nerdm\" as nerdm; .accessLevel' janaf_pod.json") def test_process_file(self): out = self.jqc.process_file(".accessLevel", janaffile) - self.assertEquals(out, 'public') + self.assertEqual(out, 'public') def test_process_file_w_args(self): out = self.jqc.process_file(".accessLevel", janaffile, {"id": "ID", "goob": "gurn"}) - self.assertEquals(out, 'public') + self.assertEqual(out, 'public') def test_process_data(self): data = {"id": "ID", "goob": "gurn"} out = self.jqc.process_data("[.goob]", json.dumps(data)) - self.assertEquals(out, ["gurn"]) + self.assertEqual(out, ["gurn"]) class TestJq(unittest.TestCase): def test_ctr(self): jqt = jq.Jq("[.goob]") - self.assertEquals(jqt.filter, "[.goob]") + self.assertEqual(jqt.filter, "[.goob]") self.assertIsNone(jqt.cmd.library) - self.assertEquals(jqt.args, {}) + self.assertEqual(jqt.args, {}) jqt = jq.Jq("[.goob]", jqlibdir) - self.assertEquals(jqt.filter, "[.goob]") - self.assertEquals(jqt.cmd.library, jqlibdir) - self.assertEquals(jqt.args, {}) + self.assertEqual(jqt.filter, "[.goob]") + self.assertEqual(jqt.cmd.library, jqlibdir) + self.assertEqual(jqt.args, {}) jqt = jq.Jq("[.goob]", jqlibdir, ["gurn"]) - self.assertEquals(jqt.filter, 'import "gurn" as gurn; [.goob]') - self.assertEquals(jqt.cmd.library, jqlibdir) - self.assertEquals(jqt.args, {}) + self.assertEqual(jqt.filter, 'import "gurn" as gurn; [.goob]') + self.assertEqual(jqt.cmd.library, jqlibdir) + self.assertEqual(jqt.args, {}) with self.assertRaises(ValueError): jqt = jq.Jq("[.goob]", modules=["gurn"]) @@ -91,51 +93,51 @@ def test_ctr(self): jqt = jq.Jq("[.goob]", args=["gurn"]) jqt = jq.Jq("[.goob]", jqlibdir, ["gurn", "pod2nerdm:nerdm"]) - self.assertEquals(jqt.filter, + self.assertEqual(jqt.filter, 'import "gurn" as gurn; import "pod2nerdm" as nerdm; [.goob]') - self.assertEquals(jqt.cmd.library, jqlibdir) - self.assertEquals(jqt.args, {}) + self.assertEqual(jqt.cmd.library, jqlibdir) + self.assertEqual(jqt.args, {}) jqt = jq.Jq("[.goob]", jqlibdir, ["gurn"], {"id": "ark:ID", "goob": "gurn"}) - self.assertEquals(jqt.filter, 'import "gurn" as gurn; [.goob]') - self.assertEquals(jqt.cmd.library, jqlibdir) - self.assertEquals(jqt.args, {"id": "ark:ID", "goob": "gurn"}) + self.assertEqual(jqt.filter, 'import "gurn" as gurn; [.goob]') + self.assertEqual(jqt.cmd.library, jqlibdir) + self.assertEqual(jqt.args, {"id": "ark:ID", "goob": "gurn"}) jqt = jq.Jq("[.goob]", args={"id": "ark:ID", "goob": "gurn"}) - self.assertEquals(jqt.filter, '[.goob]') + self.assertEqual(jqt.filter, '[.goob]') self.assertIsNone(jqt.cmd.library) - self.assertEquals(jqt.args, {"id": "ark:ID", "goob": "gurn"}) + self.assertEqual(jqt.args, {"id": "ark:ID", "goob": "gurn"}) def test_transform(self): jqt = jq.Jq("[.goob]") data = {"id": "ID", "goob": "gurn"} out = jqt.transform(json.dumps(data)) - self.assertEquals(out, ["gurn"]) + self.assertEqual(out, ["gurn"]) def test_transform_w_args(self): data = {"id": "ID", "goob": "gurn"} jqt = jq.Jq("[$goob]", args=data) out = jqt.transform(json.dumps({})) - self.assertEquals(out, ["gurn"]) + self.assertEqual(out, ["gurn"]) out = jqt.transform(json.dumps({}), {"goob": "hank"}) - self.assertEquals(out, ["hank"]) + self.assertEqual(out, ["hank"]) def test_transform_file(self): jqt = jq.Jq(".accessLevel") out = jqt.transform_file(janaffile) - self.assertEquals(out, 'public') + self.assertEqual(out, 'public') def test_transform_file_w_mod(self): jqt = jq.Jq('nerdm::podds2resource | .["@id"]', jqlibdir, ["pod2nerdm:nerdm"], {"id": "ID", "goob": "gurn"}) out = jqt.transform_file(janaffile) - self.assertEquals(out, 'ID') + self.assertEqual(out, 'ID') out = jqt.transform_file(janaffile, {"id": "ark:ID"}) - self.assertEquals(out, 'ark:ID') + self.assertEqual(out, 'ark:ID') diff --git a/python/nistoar/id/tests/test_minter.py b/python/tests/nistoar/id/test_minter.py similarity index 51% rename from python/nistoar/id/tests/test_minter.py rename to python/tests/nistoar/id/test_minter.py index 170ecdf..73c4f0b 100644 --- a/python/nistoar/id/tests/test_minter.py +++ b/python/tests/nistoar/id/test_minter.py @@ -9,100 +9,100 @@ def test_seqFor(self): mask = "ede" reg = minter.NoidMinter.seqreg(0, "ede") - self.assertEquals(reg.seqFor("000"), 0) - self.assertEquals(reg.seqFor("001"), 1) - self.assertEquals(reg.seqFor("008"), 8) - self.assertEquals(reg.seqFor("00b"), 10) - self.assertEquals(reg.seqFor("01f"), 42) - self.assertEquals(reg.seqFor("10z"), 318) - self.assertEquals(reg.seqFor("34g"), 1000) - self.assertEquals(reg.seqFor("m79"), 5432) + self.assertEqual(reg.seqFor("000"), 0) + self.assertEqual(reg.seqFor("001"), 1) + self.assertEqual(reg.seqFor("008"), 8) + self.assertEqual(reg.seqFor("00b"), 10) + self.assertEqual(reg.seqFor("01f"), 42) + self.assertEqual(reg.seqFor("10z"), 318) + self.assertEqual(reg.seqFor("34g"), 1000) + self.assertEqual(reg.seqFor("m79"), 5432) for mask in "dddd eeee eded".split(): reg = minter.NoidMinter.seqreg(0, mask) for i in range(5, 6000, 5): n = reg.seqFor(noid.mint(mask, i)) - self.assertEquals(n, i, + self.assertEqual(n, i, "{0} != {1} for mask={2}".format(n, i, repr(mask))) def testseqFor_k(self): reg = minter.NoidMinter.seqreg(0, "edek") - self.assertEquals(reg.seqFor("0000"), 0) - self.assertEquals(reg.seqFor("0013"), 1) - self.assertEquals(reg.seqFor("008t"), 8) - self.assertEquals(reg.seqFor("00b1"), 10) - self.assertEquals(reg.seqFor("m791"), 5432) + self.assertEqual(reg.seqFor("0000"), 0) + self.assertEqual(reg.seqFor("0013"), 1) + self.assertEqual(reg.seqFor("008t"), 8) + self.assertEqual(reg.seqFor("00b1"), 10) + self.assertEqual(reg.seqFor("m791"), 5432) def testseqFor_rs(self): reg = minter.NoidMinter.seqreg(0, "rede") - self.assertEquals(reg.seqFor("000"), 0) - self.assertEquals(reg.seqFor("001"), 1) - self.assertEquals(reg.seqFor("008"), 8) - self.assertEquals(reg.seqFor("00b"), 10) - self.assertEquals(reg.seqFor("m79"), 5432) + self.assertEqual(reg.seqFor("000"), 0) + self.assertEqual(reg.seqFor("001"), 1) + self.assertEqual(reg.seqFor("008"), 8) + self.assertEqual(reg.seqFor("00b"), 10) + self.assertEqual(reg.seqFor("m79"), 5432) reg = minter.NoidMinter.seqreg(0, "sede") - self.assertEquals(reg.seqFor("000"), 0) - self.assertEquals(reg.seqFor("001"), 1) - self.assertEquals(reg.seqFor("008"), 8) - self.assertEquals(reg.seqFor("00b"), 10) - self.assertEquals(reg.seqFor("m79"), 5432) + self.assertEqual(reg.seqFor("000"), 0) + self.assertEqual(reg.seqFor("001"), 1) + self.assertEqual(reg.seqFor("008"), 8) + self.assertEqual(reg.seqFor("00b"), 10) + self.assertEqual(reg.seqFor("m79"), 5432) def testseqFor_pre(self): reg = minter.NoidMinter.seqreg(0, "pref.rede") - self.assertEquals(reg.seqFor("000"), 0) - self.assertEquals(reg.seqFor("001"), 1) - self.assertEquals(reg.seqFor("008"), 8) - self.assertEquals(reg.seqFor("00b"), 10) - self.assertEquals(reg.seqFor("m79"), 5432) - - self.assertEquals(reg.seqFor("pref000"), 0) - self.assertEquals(reg.seqFor("pref001"), 1) - self.assertEquals(reg.seqFor("pref008"), 8) - self.assertEquals(reg.seqFor("pref00b"), 10) - self.assertEquals(reg.seqFor("prefm79"), 5432) + self.assertEqual(reg.seqFor("000"), 0) + self.assertEqual(reg.seqFor("001"), 1) + self.assertEqual(reg.seqFor("008"), 8) + self.assertEqual(reg.seqFor("00b"), 10) + self.assertEqual(reg.seqFor("m79"), 5432) + + self.assertEqual(reg.seqFor("pref000"), 0) + self.assertEqual(reg.seqFor("pref001"), 1) + self.assertEqual(reg.seqFor("pref008"), 8) + self.assertEqual(reg.seqFor("pref00b"), 10) + self.assertEqual(reg.seqFor("prefm79"), 5432) def testseqFor_z(self): reg = minter.NoidMinter.seqreg(0, "zede") - self.assertEquals(reg.seqFor("000"), 0) - self.assertEquals(reg.seqFor("001"), 1) - self.assertEquals(reg.seqFor("008"), 8) - self.assertEquals(reg.seqFor("00b"), 10) - self.assertEquals(reg.seqFor("m79"), 5432) - self.assertEquals(reg.seqFor("16x768"), 8765432) + self.assertEqual(reg.seqFor("000"), 0) + self.assertEqual(reg.seqFor("001"), 1) + self.assertEqual(reg.seqFor("008"), 8) + self.assertEqual(reg.seqFor("00b"), 10) + self.assertEqual(reg.seqFor("m79"), 5432) + self.assertEqual(reg.seqFor("16x768"), 8765432) reg = minter.NoidMinter.seqreg(0, "zdee") - self.assertEquals(reg.seqFor("000"), 0) - self.assertEquals(reg.seqFor("001"), 1) - self.assertEquals(reg.seqFor("008"), 8) - self.assertEquals(reg.seqFor("00b"), 10) - self.assertEquals(reg.seqFor("6f9"), 5432) - self.assertEquals(reg.seqFor("10422m8"), 8765432) + self.assertEqual(reg.seqFor("000"), 0) + self.assertEqual(reg.seqFor("001"), 1) + self.assertEqual(reg.seqFor("008"), 8) + self.assertEqual(reg.seqFor("00b"), 10) + self.assertEqual(reg.seqFor("6f9"), 5432) + self.assertEqual(reg.seqFor("10422m8"), 8765432) def testseqFor_pzk(self): reg = minter.NoidMinter.seqreg(0, "zedek") - self.assertEquals(reg.seqFor("0000"), 0) - self.assertEquals(reg.seqFor("0013"), 1) - self.assertEquals(reg.seqFor("008t"), 8) - self.assertEquals(reg.seqFor("00b1"), 10) - self.assertEquals(reg.seqFor("m791"), 5432) - self.assertEquals(reg.seqFor("16x768w"), 8765432) + self.assertEqual(reg.seqFor("0000"), 0) + self.assertEqual(reg.seqFor("0013"), 1) + self.assertEqual(reg.seqFor("008t"), 8) + self.assertEqual(reg.seqFor("00b1"), 10) + self.assertEqual(reg.seqFor("m791"), 5432) + self.assertEqual(reg.seqFor("16x768w"), 8765432) reg = minter.NoidMinter.seqreg(0, "zub0.zdeek") - self.assertEquals(reg.seqFor("zub00000"), 0) - self.assertEquals(reg.seqFor("zub00013"), 1) - self.assertEquals(reg.seqFor("zub0008t"), 8) - self.assertEquals(reg.seqFor("zub000b1"), 10) - self.assertEquals(reg.seqFor("zub06f91"), 5432) - self.assertEquals(reg.seqFor("zub010422m8q"), 8765432) + self.assertEqual(reg.seqFor("zub00000"), 0) + self.assertEqual(reg.seqFor("zub00013"), 1) + self.assertEqual(reg.seqFor("zub0008t"), 8) + self.assertEqual(reg.seqFor("zub000b1"), 10) + self.assertEqual(reg.seqFor("zub06f91"), 5432) + self.assertEqual(reg.seqFor("zub010422m8q"), 8765432) class TestNoidMinter(unittest.TestCase): @@ -117,17 +117,17 @@ def test_unissued(self): self.assertFalse(self.minter.issued("000")) def test_mint(self): - self.assertEquals(self.minter.mint(), "000") - self.assertEquals(self.minter.mint(), "001") - self.assertEquals(self.minter.mint(), "002") + self.assertEqual(self.minter.mint(), "000") + self.assertEqual(self.minter.mint(), "001") + self.assertEqual(self.minter.mint(), "002") self.minter.nextn = 42 - self.assertEquals(self.minter.mint(), "01f") + self.assertEqual(self.minter.mint(), "01f") self.minter.nextn = 5432 - self.assertEquals(self.minter.mint(), "m79") + self.assertEqual(self.minter.mint(), "m79") self.minter.nextn = 5420 - self.assertEquals(self.minter.mint(), "m7b") + self.assertEqual(self.minter.mint(), "m7b") self.minter.nextn = 8765432 - self.assertEquals(self.minter.mint(), "16x768") + self.assertEqual(self.minter.mint(), "16x768") def test_mint_k(self): self.minter = minter.NoidMinter(self.minter.mask+'k', 0) @@ -157,7 +157,7 @@ def test_masks(self): "id {0} prematurely issued for mask={1}".format(repr(id), repr(mask))) mid = self.minter.mint() - self.assertEquals(mid, id, + self.assertEqual(mid, id, "{0} != {1} for mask={2}".format(repr(mid), repr(id), repr(mask))) @@ -173,7 +173,7 @@ def setUp(self): def testMint(self): self.minter.nextn = 5432 id = self.minter.mint() - self.assertEquals(id, "ark:/88434/pdr06f90") + self.assertEqual(id, "ark:/88434/pdr06f90") self.assertTrue(noid.validate(id)) with self.assertRaises(noid.ValidationError): noid.validate("ark:/88434/pdr06t90") diff --git a/python/nistoar/id/tests/test_persist.py b/python/tests/nistoar/id/test_persist.py similarity index 99% rename from python/nistoar/id/tests/test_persist.py rename to python/tests/nistoar/id/test_persist.py index 28519dc..293c26c 100644 --- a/python/nistoar/id/tests/test_persist.py +++ b/python/tests/nistoar/id/test_persist.py @@ -2,7 +2,7 @@ from random import randint from nistoar.id import persist -from nistoar.tests import * +from nistoar.testing import * loghdlr = None rootlog = None diff --git a/python/tests/nistoar/nerdm/__init__.py b/python/tests/nistoar/nerdm/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/tests/nistoar/nerdm/convert/__init__.py b/python/tests/nistoar/nerdm/convert/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/tests/nistoar/nerdm/convert/test_latest.py b/python/tests/nistoar/nerdm/convert/test_latest.py new file mode 100644 index 0000000..b06ded6 --- /dev/null +++ b/python/tests/nistoar/nerdm/convert/test_latest.py @@ -0,0 +1,314 @@ +import os, sys, pdb, shutil, logging, json +import unittest as test +from copy import deepcopy + +from nistoar.nerdm.convert import latest +import nistoar.nerdm.constants as const + +basedir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname( + os.path.dirname(__file__)))))) +schemadir = os.path.join(basedir, 'model') +datadir1 = os.path.join(schemadir, "examples") +datadir2 = os.path.join(basedir, "jq", "tests", "data") +hitsc = os.path.join(datadir1, "hitsc-0.2.json") +simplenerd = os.path.join(datadir2, "simple-nerdm.json") + +NERDM_SCH_ID_BASE = const.core_schema_base + +class TestNERDm2Latest(test.TestCase): + def test_schuripat(self): + self.assertTrue(latest._nrdpat.match("https://data.nist.gov/od/dm/nerdm-schema/pub/v1.0#Res") ) + self.assertTrue(latest._nrdpat.match("https://data.nist.gov/od/dm/nerdm-schema/goob/v0.3") ) + self.assertTrue(latest._nrdpat.match("https://data.nist.gov/od/dm/nerdm-schema/foo/bar/v0.3#")) + + self.assertFalse(latest._nrdpat.match("https://www.nist.gov/od/id/nerdm-schema/blue/v0.3#")) + self.assertFalse(latest._nrdpat.match("https://data.nist.gov/od/dm/nerdm-schema/v0.3#Res")) + + pat = latest._schuripatfor(NERDM_SCH_ID_BASE) + self.assertTrue(pat.match("https://data.nist.gov/od/dm/nerdm-schema/v0.3#pub")) + pat = latest._schuripatfor(NERDM_SCH_ID_BASE+"pub/") + self.assertTrue(pat.match("https://data.nist.gov/od/dm/nerdm-schema/pub/v0.3#pub")) + self.assertFalse(pat.match("https://data.nist.gov/od/dm/nerdm-schema/goob/v0.3#pub")) + + def test_upd_schema_ver(self): + cvtr = latest.NERDm2Latest(defver="1.0", byext={}) + self.assertEqual(cvtr._upd_schema_ver("https://data.nist.gov/od/dm/nerdm-schema/pub/v0.3#Res", + cvtr.byext, cvtr.defver), + "https://data.nist.gov/od/dm/nerdm-schema/pub/1.0#Res") + + byext = { + latest._schuripatfor("http://example.com/anext/"): "v0.1", + latest._schuripatfor(NERDM_SCH_ID_BASE+"pub/"): "v1.2", + latest._schuripatfor(NERDM_SCH_ID_BASE): "v2.2" + } + + self.assertEqual(cvtr._upd_schema_ver("https://data.nist.gov/od/dm/nerdm-schema/pub/v0.3#Res", + byext, cvtr.defver), + "https://data.nist.gov/od/dm/nerdm-schema/pub/v1.2#Res") + self.assertEqual(cvtr._upd_schema_ver("https://data.nist.gov/od/dm/nerdm-schema/v0.3", + byext, cvtr.defver), + "https://data.nist.gov/od/dm/nerdm-schema/v2.2") + self.assertEqual(cvtr._upd_schema_ver("http://example.com/anext/v88#goob", byext, cvtr.defver), + "http://example.com/anext/v0.1#goob") + self.assertEqual(cvtr._upd_schema_ver("https://data.nist.gov/od/dm/nerdm-schema/blue/v0.3#Res", + byext, cvtr.defver), + "https://data.nist.gov/od/dm/nerdm-schema/blue/1.0#Res") + + def test_upd_schema_ver_on_node(self): + defver = "1.0" + byext = { + latest._schuripatfor("http://example.com/anext/"): "v0.1", + latest._schuripatfor(NERDM_SCH_ID_BASE+"pub/"): "v1.2", + latest._schuripatfor(NERDM_SCH_ID_BASE): "v2.2" + } + cvtr = latest.NERDm2Latest(defver=defver, byext={}) + + data = { + "goob": "https://data.nist.gov/od/dm/nerdm-schema/v0.3", + "foo": [{ + "goob": [ "http://example.com/anext/v88#goob", + "http://goober.com/foop/v99#big" ], + "blah": "snooze" + }], + "bar": { + "hank": "aaron", + "tex": { + "goob": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.3#Contact", + "blah": "blah" + }, + "goob": [] + } + } + + cvtr._upd_schema_ver_on_node(data, "goob", byext, "1.0") + self.assertEqual(data['goob'], + "https://data.nist.gov/od/dm/nerdm-schema/v2.2") + self.assertEqual(data['foo'][0]['goob'], + [ "http://example.com/anext/v0.1#goob", + "http://goober.com/foop/v99#big" ]) + self.assertEqual(data['bar']['tex']['goob'], + "https://data.nist.gov/od/dm/nerdm-schema/pub/v1.2#Contact") + self.assertEqual(data['bar']['goob'], []) + + cvtr._upd_schema_ver_on_node(data, "goob", {}, "1.0") + self.assertEqual(data['goob'], + "https://data.nist.gov/od/dm/nerdm-schema/v2.2") + self.assertEqual(data['foo'][0]['goob'], + [ "http://example.com/anext/v0.1#goob", + "http://goober.com/foop/v99#big" ]) + self.assertEqual(data['bar']['tex']['goob'], + "https://data.nist.gov/od/dm/nerdm-schema/pub/1.0#Contact") + self.assertEqual(data['bar']['goob'], []) + + def test_update_nerdm_schema(self): + defver = "v1.0" + byext = { + "http://example.com/anext/": "v0.1", + "pub": "v1.2", + "bib": "v0.8", + "": "v2.2" + } + cvtr = latest.NERDm2Latest(defver=defver, byext=byext) + + nerdmd = { + "$schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.3", + "foo": { + "$extensionSchemas": [ "http://example.com/anext/v88#goob", + "http://goober.com/foop/v99#big" ], + "blah": "snooze" + }, + "bar": { + "hank": "aaron", + "tex": { + "$extensionSchemas": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.3#Contact", + "blah": "blah" + }, + "$extensionSchemas": [] + }, + "references": [ + { + 'location': 'https://tinyurl.com/asdf', + "$extensionSchemas": [ + "https://data.nist.gov/od/dm/nerdm-schema/v0.3#/definitions/BibliographicReference", + "https://data.nist.gov/od/dm/nerdm-schema/v0.3#/definitions/DCiteReference" + ] + } + ] + } + + data = cvtr.update_nerdm_schema(nerdmd) + self.assertEqual(data['$schema'], + "https://data.nist.gov/od/dm/nerdm-schema/v2.2") + self.assertEqual(nerdmd['$schema'], "https://data.nist.gov/od/dm/nerdm-schema/v0.3") + self.assertEqual(data['foo']['$extensionSchemas'], + [ "http://example.com/anext/v0.1#goob", + "http://goober.com/foop/v99#big" ]) + self.assertEqual(data['bar']['tex']['$extensionSchemas'], + "https://data.nist.gov/od/dm/nerdm-schema/pub/v1.2#Contact") + self.assertEqual(data['bar']['$extensionSchemas'], []) + self.assertEqual(data['references'][0]['$extensionSchemas'][0], + "https://data.nist.gov/od/dm/nerdm-schema/v2.2#/definitions/BibliographicReference") + self.assertEqual(data['references'][0]['$extensionSchemas'][1], + "https://data.nist.gov/od/dm/nerdm-schema/bib/v0.8#/definitions/DCiteReference") + + nerdmd = { + "_schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.3", + "foo": { + "_extensionSchemas": [ "http://example.com/anext/v88#goob", + "http://goober.com/foop/v99#big" ], + "blah": "snooze" + }, + "bar": { + "hank": "aaron", + "tex": { + "_extensionSchemas": "https://data.nist.gov/od/dm/nerdm-schema/pub/v0.3#Contact", + "blah": "blah" + }, + "_extensionSchemas": [] + } + } + + cvtr = latest.NERDm2Latest() + data = cvtr.update_nerdm_schema(nerdmd) + self.assertEqual(data['_schema'], const.CORE_SCHEMA_URI) + self.assertEqual(nerdmd['_schema'], "https://data.nist.gov/od/dm/nerdm-schema/v0.3") + self.assertEqual(data['foo']['_extensionSchemas'], + [ "http://example.com/anext/v88#goob", + "http://goober.com/foop/v99#big" ]) + self.assertEqual(data['bar']['tex']['_extensionSchemas'], const.PUB_SCHEMA_URI+"#Contact") + self.assertEqual(data['bar']['_extensionSchemas'], []) + + latest.update_nerdm_schema(nerdmd) + self.assertEqual(nerdmd['_schema'], const.CORE_SCHEMA_URI) + + def test_create_release_history(self): + cvtr = latest.NERDm2Latest() + nerdm = { + "$schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.3", + "@id": "ark:/88888/goob", + "versionHistory": [ + { + "version": "1.0.0" + }, + { + "version": "1.0.1" + } + ] + } + hist = cvtr.create_release_history(nerdm, "_r") + self.assertEqual(hist.get("@id"), nerdm['@id']+"_r") + self.assertEqual(len(hist['hasRelease']), 2) + self.assertEqual(hist['hasRelease'][0]['version'], "1.0.0") + self.assertEqual(hist['hasRelease'][1]['version'], "1.0.1") + hist = cvtr.create_release_history(nerdm) + self.assertEqual(hist.get("@id"), nerdm['@id']+"/pdr:v") + + nerdm = { + "$schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.3", + "@id": "ark:/88888/goob", + "modified": "2021-08-21" + } + hist = cvtr.create_release_history(nerdm) + self.assertEqual(hist.get("@id"), nerdm['@id']+"/pdr:v") + self.assertEqual(len(hist['hasRelease']), 1) + self.assertEqual(hist['hasRelease'][0]['version'], "1.0.0") + self.assertEqual(hist['hasRelease'][0]['@id'], "ark:/88888/goob/pdr:v/1.0.0") + self.assertEqual(hist['hasRelease'][0]['issued'], "2021-08-21") + + + def test_create_release_ref(self): + cvtr = latest.NERDm2Latest() + nerdm = { + "@id": "ark:/88888/goob" + } + ref = cvtr.create_release_ref(nerdm) + self.assertEqual(ref['version'], '1.0.0') + self.assertEqual(ref['@id'], "ark:/88888/goob/pdr:v/1.0.0") + self.assertNotIn('issued', ref) + self.assertNotIn('location', ref) + self.assertEqual(ref['description'], "initial release") + + nerdm = { + "@id": "ark:/88888/goob", + "version": "2.4.2", + "landingPage": "https://testdata.nist.gov/od/id/ark:/88888/goob", + "annotated": "2021-08-24" + } + ref = cvtr.create_release_ref(nerdm) + self.assertEqual(ref['version'], '2.4.2') + self.assertEqual(ref['@id'], "ark:/88888/goob/pdr:v/2.4.2") + self.assertEqual(ref['issued'], "2021-08-24") + self.assertEqual(ref['location'], "https://testdata.nist.gov/od/id/ark:/88888/goob") + self.assertEqual(ref['description'], "metadata update") + + nerdm = { + "@id": "ark:/88888/goob/pdr:v/2.3.0", + "landingPage": "https://testdata.nist.gov/od/id/ark:/88888/goob", + "issued": "2021-08-25" + } + ref = cvtr.create_release_ref(nerdm, "2.4.0") + self.assertEqual(ref['version'], '2.4.0') + self.assertEqual(ref['@id'], "ark:/88888/goob/pdr:v/2.3.0") + self.assertEqual(ref['issued'], "2021-08-25") + self.assertEqual(ref['location'], "https://testdata.nist.gov/od/id/ark:/88888/goob") + self.assertEqual(ref['description'], "data update") + + def test_convert(self): + cvtr = latest.NERDm2Latest() + nerdm = { + "$schema": "https://data.nist.gov/od/dm/nerdm-schema/v0.3", + "@id": "ark:/88888/goob", + "versionHistory": [ + { + "version": "1.0.0" + }, + { + "version": "1.0.1" + } + ] + } + data = cvtr.convert(nerdm) + self.assertIn("$schema", data) + self.assertNotIn("_schema", data) + self.assertEqual(data["$schema"], const.CORE_SCHEMA_URI) + self.assertNotIn("versionHistory", data) + self.assertIn("releaseHistory", data) + self.assertEqual(data['releaseHistory']['@id'], nerdm['@id']+"/pdr:v") + + def test_convert2(self): + cvtr = latest.NERDm2Latest() + with open(hitsc) as fd: + nerdm = json.load(fd) + self.assertTrue(nerdm['_schema'].endswith("/v0.2#")) + self.assertIn('versionHistory', nerdm) + + ltst = cvtr.convert(nerdm) + + self.assertEqual(ltst['@id'], nerdm['@id']) + self.assertEqual(ltst['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ltst['version'], "1.0") + self.assertIn('version', ltst) + self.assertNotIn('versionHistory', ltst) + self.assertIn('releaseHistory', ltst) + self.assertEqual(len(ltst['releaseHistory']['hasRelease']), 1) + + def test_convert3(self): + cvtr = latest.NERDm2Latest() + with open(simplenerd) as fd: + nerdm = json.load(fd) + self.assertTrue(nerdm['_schema'].endswith("/v0.1#")) + self.assertNotIn('versionHistory', nerdm) + self.assertNotIn('version', nerdm) + + ltst = cvtr.convert(nerdm) + + self.assertEqual(ltst['@id'], nerdm['@id']) + self.assertEqual(ltst['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ltst['_extensionSchemas'], [const.PUB_SCHEMA_URI+"#/definitions/PublicDataResource"]) + self.assertNotIn('versionHistory', ltst) + self.assertNotIn('version', ltst) + self.assertNotIn('releaseHistory', ltst) + + +if __name__ == '__main__': + test.main() diff --git a/python/nistoar/nerdm/convert/tests/test_pod.py b/python/tests/nistoar/nerdm/convert/test_pod.py similarity index 95% rename from python/nistoar/nerdm/convert/tests/test_pod.py rename to python/tests/nistoar/nerdm/convert/test_pod.py index d89fab8..934cb28 100644 --- a/python/nistoar/nerdm/convert/tests/test_pod.py +++ b/python/tests/nistoar/nerdm/convert/test_pod.py @@ -65,8 +65,8 @@ def test_convert(self): data = fd.read() res = cvtr.convert(data, "ark:ID") - self.assertEquals(res["@id"], "ark:ID") - self.assertEquals(res["accessLevel"], "public") + self.assertEqual(res["@id"], "ark:ID") + self.assertEqual(res["accessLevel"], "public") def test_convert_data(self): cvtr = cvt.PODds2Res(jqlibdir) @@ -77,8 +77,8 @@ def test_convert_data(self): data['theme'] = ['optical physics'] res = cvtr.convert_data(data, "ark:ID") - self.assertEquals(res["@id"], "ark:ID") - self.assertEquals(res["accessLevel"], "public") + self.assertEqual(res["@id"], "ark:ID") + self.assertEqual(res["accessLevel"], "public") self.assertEqual(len(res['references']), 1) self.assertNotIn('citation', res["references"][0]) @@ -187,8 +187,8 @@ def test_enrich_refs(self): data['references'].append("https://doi.org/10.1126/science.169.3946.635") res = cvtr.convert_data(data, "ark:ID") - self.assertEquals(res["@id"], "ark:ID") - self.assertEquals(res["accessLevel"], "public") + self.assertEqual(res["@id"], "ark:ID") + self.assertEqual(res["accessLevel"], "public") self.assertEqual(len(res['references']), 2) self.assertNotIn('citation', res["references"][0]) @@ -317,8 +317,8 @@ def test_ctor(self): def test_convert_file(self): cvtr = cvt.Res2PODds(jqlibdir) pod = cvtr.convert_file(os.path.join(nerddir,"janaf.json")) - self.assertEquals(pod["accessLevel"], "public") - self.assertEquals(pod["@type"], "dcat:Dataset") + self.assertEqual(pod["accessLevel"], "public") + self.assertEqual(pod["@type"], "dcat:Dataset") self.assertEqual(len(pod['references']), 2) self.assertEqual(len(pod['distribution']), 319) @@ -331,8 +331,8 @@ def test_convert(self): with open(os.path.join(nerddir,"janaf.json")) as fd: data = json.load(fd) pod = cvtr.convert(json.dumps(data)) - self.assertEquals(pod["accessLevel"], "public") - self.assertEquals(pod["@type"], "dcat:Dataset") + self.assertEqual(pod["accessLevel"], "public") + self.assertEqual(pod["@type"], "dcat:Dataset") self.assertEqual(len(pod['references']), 2) self.assertEqual(len(pod['distribution']), 319) @@ -345,8 +345,8 @@ def test_convert_data(self): with open(os.path.join(nerddir,"janaf.json")) as fd: data = json.load(fd) pod = cvtr.convert_data(data) - self.assertEquals(pod["accessLevel"], "public") - self.assertEquals(pod["@type"], "dcat:Dataset") + self.assertEqual(pod["accessLevel"], "public") + self.assertEqual(pod["@type"], "dcat:Dataset") self.assertEqual(len(pod['references']), 2) self.assertEqual(len(pod['distribution']), 319) @@ -376,7 +376,7 @@ class TestHierarchyBuilder(unittest.TestCase): def test_build_hierarchy(self): hb = cvt.HierarchyBuilder(jqlibdir) hier = hb.build_hierarchy(simplenerd['components']) - self.assertEquals(hier, simplehier) + self.assertEqual(hier, simplehier) if __name__ == '__main__': unittest.main() diff --git a/python/nistoar/nerdm/convert/tests/test_pod_doi.py b/python/tests/nistoar/nerdm/convert/test_pod_doi.py similarity index 93% rename from python/nistoar/nerdm/convert/tests/test_pod_doi.py rename to python/tests/nistoar/nerdm/convert/test_pod_doi.py index cfd0007..1f1b27f 100644 --- a/python/nistoar/nerdm/convert/tests/test_pod_doi.py +++ b/python/tests/nistoar/nerdm/convert/test_pod_doi.py @@ -2,20 +2,21 @@ from collections import OrderedDict import nistoar.nerdm.convert.pod as cvt +import nistoar.doi.resolving.common as res from nistoar.doi.resolving import DOIInfo from nistoar.nerdm.constants import CORE_SCHEMA_URI, PUB_SCHEMA_URI, BIB_SCHEMA_URI citeproc_auths = [ - {u'affiliation': [], u'given': u'Carmen', u'family': - u'Galen Acedo', u'sequence': u'first'}, - {u'authenticated-orcid': False, u'given': u'Victor', - u'family': u'Arroyo', u'sequence': u'additional', - u'affiliation': ["The Institute"], - u'ORCID': u'http://orcid.org/0000-0002-0858-0324'}, - {u'affiliation': [], u'given': u'Ellen', u'family': u'Andresen', - u'sequence': u'additional'}, - {u'affiliation': [], u'given': u'Ricard', u'family': u'Arasa-Gisbert', - u'sequence': u'additional'} + {'affiliation': [], 'given': 'Carmen', 'family': + 'Galen Acedo', 'sequence': 'first'}, + {'authenticated-orcid': False, 'given': 'Victor', + 'family': 'Arroyo', 'sequence': 'additional', + 'affiliation': ["The Institute"], + 'ORCID': 'http://orcid.org/0000-0002-0858-0324'}, + {'affiliation': [], 'given': 'Ellen', 'family': 'Andresen', + 'sequence': 'additional'}, + {'affiliation': [], 'given': 'Ricard', 'family': 'Arasa-Gisbert', + 'sequence': 'additional'} ] datacite_auths = [ @@ -175,7 +176,7 @@ def test_datacite_creators2nerdm_authors(self): self.assertNotIn('affiliation', authors[2]) crossref = { - "title": "Ecological traits of the world\u2019s primates", + "title": "Ecological traits of the world\\u2019s primates", "URL": "http://dx.doi.org/10.1038/s41597-019-0059-9", "publisher": "Springer Science and Business Media LLC", "issued": { @@ -246,7 +247,7 @@ def test_crossref_doiinfo2reference(self): self.assertEqual(ref['@id'], 'doi:10.10/XXX') self.assertEqual(ref['refType'], 'IsCitedBy') self.assertEqual(ref['title'], - "Ecological traits of the world\u2019s primates") + "Ecological traits of the world\\u2019s primates") self.assertEqual(ref['location'], "https://goober.org/10.10/XXX") self.assertEqual(ref['issued'], '2019-05-13') self.assertEqual(ref['citation'], 'ibid') @@ -272,10 +273,21 @@ def test_datacite_doiinfo2reference(self): class TestDOIResolver(unittest.TestCase): + def setUp(self): + res._client_info = None + + def tearDown(self): + res._client_info = None + def test_ctor(self): rslvr = cvt.DOIResolver() self.assertIsNone(rslvr.resolver._client_info) self.assertEqual(rslvr.resolver._resolver, "https://doi.org/") + + res.set_client_info('d', 'c', 'b', 'a') + rslvr = cvt.DOIResolver() + self.assertEqual(rslvr.resolver._client_info, ('d', 'c', 'b', 'a')) + self.assertEqual(rslvr.resolver._resolver, "https://doi.org/") rslvr = cvt.DOIResolver(resolver="https://goob.org/") self.assertEqual(rslvr.resolver._resolver, "https://goob.org/") diff --git a/python/tests/nistoar/nerdm/convert/test_rmm.py b/python/tests/nistoar/nerdm/convert/test_rmm.py new file mode 100644 index 0000000..e4c8955 --- /dev/null +++ b/python/tests/nistoar/nerdm/convert/test_rmm.py @@ -0,0 +1,186 @@ +import os, sys, pdb, shutil, logging, json +import unittest as test +from copy import deepcopy + +from nistoar.nerdm.convert import rmm +import nistoar.nerdm.constants as const +from nistoar.nerdm import validate + +basedir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname( + os.path.dirname(os.path.abspath(__file__))))))) +schemadir = os.path.join(basedir, 'model') +datadir1 = os.path.join(schemadir, "examples") +datadir2 = os.path.join(basedir, "jq", "tests", "data") +hitsc = os.path.join(datadir1, "hitsc-0.2.json") +pdrrec = os.path.join(datadir1, "mds2-2106.json") +simplenerd = os.path.join(datadir2, "simple-nerdm.json") + +NERDM_SCH_ID_BASE = const.core_schema_base + +class TestNERDm2RMM(test.TestCase): + + def test_ctor(self): + cvtr = rmm.NERDmForRMM() + self.assertEqual(cvtr._lpsbase, "https://data.nist.gov/od/id/") + + cvtr = rmm.NERDmForRMM(pubeps={"portalBase": "https://testdata.nist.gov/"}) + self.assertEqual(cvtr._lpsbase, "https://testdata.nist.gov/od/id/") + + cvtr = rmm.NERDmForRMM(pubeps={"portalBase": "https://bit.ly/pdr/", + "landingPageService": "lps/show"}) + self.assertEqual(cvtr._lpsbase, "https://bit.ly/pdr/lps/show") + + cvtr = rmm.NERDmForRMM(pubeps={"portalBase": "https://oardev.nist.gov/pdr/", + "landingPageService": "http://localhost/lps/show?id="}) + self.assertEqual(cvtr._lpsbase, "http://localhost/lps/show?id=") + + self.assertTrue(os.path.isdir(schemadir)) + self.assertTrue(os.path.isfile(os.path.join(schemadir,"nerdm-schema.json"))) + cvtr = rmm.NERDmForRMM(schemadir=schemadir) + self.assertTrue(cvtr._valid8r) + + def test_to_rmm(self): + lpsep = "https://testdata.nist.gov/od/id/" + cvtr = rmm.NERDmForRMM(pubeps={"landingPageService": lpsep}) + + with open(hitsc) as fd: + nerdm = json.load(fd) + self.assertTrue(nerdm['_schema'].endswith("/v0.2#")) + self.assertIn('versionHistory', nerdm) + + ing = cvtr.to_rmm(nerdm) + + for prop in "record version releaseSet".split(): + self.assertIn(prop, ing) + + self.assertEqual(ing['record']['@id'], nerdm['@id']) + self.assertEqual(ing['record']['title'], nerdm['title']) + self.assertEqual(ing['record']['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ing['record']['version'], "1.0") + self.assertNotIn('versionHistory', ing['record']) + self.assertIn('releaseHistory', ing['record']) + self.assertEqual(len(ing['record']['releaseHistory']['hasRelease']), 1) + self.assertEqual(ing['version']['@id'], nerdm['@id']+"/pdr:v/1.0") + self.assertEqual(ing['version']['releaseHistory']['hasRelease'][0]['version'], "1.0") + self.assertEqual(ing['version']['releaseHistory']['hasRelease'][0]['description'], "initial release") + self.assertEqual(ing['version']['landingPage'], "http://srdata.nist.gov/CeramicDataPortal/hit") + + self.assertEqual(ing['releaseSet']['@id'], nerdm['@id']+"/pdr:v") + self.assertEqual(ing['releaseSet']['title'], nerdm['title']) + self.assertEqual(ing['releaseSet']['ediid'], nerdm['ediid']) + self.assertEqual(ing['releaseSet']['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ing['releaseSet']['version'], "1.0") + self.assertIn('version', ing['releaseSet']) + self.assertNotIn('versionHistory', ing['releaseSet']) + self.assertNotIn('releaseHistory', ing['releaseSet']) + self.assertIn('hasRelease', ing['releaseSet']) + self.assertEqual(len(ing['releaseSet']['hasRelease']), 1) + self.assertEqual(ing['releaseSet']['hasRelease'][0]['version'], "1.0") + self.assertEqual(ing['releaseSet']['hasRelease'][0]['description'], "initial release") + + def test_to_rmm_tweakurls(self): + lpsep = "https://testdata.nist.gov/od/id/" + cvtr = rmm.NERDmForRMM(pubeps={"landingPageService": lpsep}) + + with open(pdrrec) as fd: + nerdm = json.load(fd) + self.assertTrue(nerdm['_schema'].endswith("/v0.7#")) + self.assertNotIn('versionHistory', nerdm) + + ing = cvtr.to_rmm(nerdm) + + for prop in "record version releaseSet".split(): + self.assertIn(prop, ing) + + self.assertEqual(ing['record']['@id'], nerdm['@id']) + self.assertEqual(ing['record']['title'], nerdm['title']) + self.assertEqual(ing['record']['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ing['record']['version'], "1.6.0") + self.assertEqual(ing['record']['landingPage'], "https://data.nist.gov/od/id/mds2-2106") + self.assertEqual(ing['record']['components'][2]['downloadURL'], + "https://data.nist.gov/od/ds/mds2-2106/Readme.txt") + self.assertNotIn('versionHistory', ing['record']) + self.assertIn('releaseHistory', ing['record']) + self.assertEqual(len(ing['record']['releaseHistory']['hasRelease']), 7) + self.assertEqual(ing['version']['@id'], nerdm['@id']+"/pdr:v/1.6.0") + self.assertEqual(ing['version']['releaseHistory']['hasRelease'][0]['version'], "1.0.0") + self.assertEqual(ing['version']['releaseHistory']['hasRelease'][0]['description'], "initial release") + self.assertEqual(ing['version']['landingPage'], "https://data.nist.gov/od/id/mds2-2106/pdr:v/1.6.0") + self.assertEqual(ing['version']['components'][2]['downloadURL'], + "https://data.nist.gov/od/ds/mds2-2106/_v/1.6.0/Readme.txt") + + self.assertEqual(ing['releaseSet']['@id'], nerdm['@id']+"/pdr:v") + self.assertEqual(ing['releaseSet']['title'], nerdm['title']) + self.assertEqual(ing['releaseSet']['ediid'], nerdm['ediid']) + self.assertEqual(ing['releaseSet']['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ing['releaseSet']['version'], "1.6.0") + self.assertIn('version', ing['releaseSet']) + self.assertNotIn('versionHistory', ing['releaseSet']) + self.assertNotIn('releaseHistory', ing['releaseSet']) + self.assertIn('hasRelease', ing['releaseSet']) + self.assertEqual(len(ing['releaseSet']['hasRelease']), 7) + self.assertEqual(ing['releaseSet']['hasRelease'][0]['version'], "1.0.0") + self.assertEqual(ing['releaseSet']['hasRelease'][0]['description'], "initial release") + + def test_validate_rmm(self): + lpsep = "https://testdata.nist.gov/od/id/" + cvtr = rmm.NERDmForRMM() + + with open(hitsc) as fd: + nerdm = json.load(fd) + self.assertTrue(nerdm['_schema'].endswith("/v0.2#")) + self.assertIn('versionHistory', nerdm) + + ing = cvtr.to_rmm(nerdm) + with self.assertRaises(RuntimeError): + cvtr.validate_rmm(ing) # validator not configured + + cvtr = rmm.NERDmForRMM(schemadir=schemadir) + cvtr.validate_rmm(ing) # should not raise exception + + with self.assertRaises(validate.ValidationError): + cvtr.validate_rmm(ing['version']) + + ing['record'] = ing['releaseSet'] + with self.assertRaises(validate.ValidationError): + cvtr.validate_rmm(ing) + + def test_convert(self): + lpsep = "https://testdata.nist.gov/od/id/" + cvtr = rmm.NERDmForRMM(schemadir=schemadir, pubeps={"landingPageService": lpsep}) + + with open(hitsc) as fd: + nerdm = json.load(fd) + self.assertTrue(nerdm['_schema'].endswith("/v0.2#")) + self.assertIn('versionHistory', nerdm) + + ing = cvtr.convert(nerdm, True) + + self.assertEqual(ing['record']['@id'], nerdm['@id']) + self.assertEqual(ing['record']['title'], nerdm['title']) + self.assertEqual(ing['record']['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ing['record']['version'], "1.0") + self.assertNotIn('versionHistory', ing['record']) + self.assertIn('releaseHistory', ing['record']) + self.assertEqual(len(ing['record']['releaseHistory']['hasRelease']), 1) + self.assertEqual(ing['version']['releaseHistory']['hasRelease'][0]['version'], "1.0") + self.assertEqual(ing['version']['releaseHistory']['hasRelease'][0]['description'], "initial release") + + self.assertEqual(ing['releaseSet']['@id'], nerdm['@id']+"/pdr:v") + self.assertEqual(ing['releaseSet']['title'], nerdm['title']) + self.assertEqual(ing['releaseSet']['_schema'], const.CORE_SCHEMA_URI+"#") + self.assertEqual(ing['releaseSet']['version'], "1.0") + self.assertIn('version', ing['releaseSet']) + self.assertNotIn('versionHistory', ing['releaseSet']) + self.assertNotIn('releaseHistory', ing['releaseSet']) + self.assertIn('hasRelease', ing['releaseSet']) + self.assertEqual(len(ing['releaseSet']['hasRelease']), 1) + self.assertEqual(ing['releaseSet']['hasRelease'][0]['version'], "1.0") + self.assertEqual(ing['releaseSet']['hasRelease'][0]['description'], "initial release") + + + + + +if __name__ == '__main__': + test.main() diff --git a/python/nistoar/nerdm/tests/data/janaf-annot.json b/python/tests/nistoar/nerdm/data/janaf-annot.json similarity index 100% rename from python/nistoar/nerdm/tests/data/janaf-annot.json rename to python/tests/nistoar/nerdm/data/janaf-annot.json diff --git a/python/nistoar/nerdm/tests/data/janaf-orig.json b/python/tests/nistoar/nerdm/data/janaf-orig.json similarity index 100% rename from python/nistoar/nerdm/tests/data/janaf-orig.json rename to python/tests/nistoar/nerdm/data/janaf-orig.json diff --git a/python/nistoar/nerdm/tests/test_merge.py b/python/tests/nistoar/nerdm/test_merge.py similarity index 94% rename from python/nistoar/nerdm/tests/test_merge.py rename to python/tests/nistoar/nerdm/test_merge.py index 3c2f706..94d5137 100644 --- a/python/nistoar/nerdm/tests/test_merge.py +++ b/python/tests/nistoar/nerdm/test_merge.py @@ -112,7 +112,7 @@ def test_merge(self): head = [ "b", "i", "z", "a" ] mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ "a", "e", "i", "b", "z" ]) + self.assertEqual(mrgd, [ "a", "e", "i", "b", "z" ]) def test_incompat(self): strat = mrg.UniqueArray() @@ -129,7 +129,7 @@ def test_incompat(self): head = [ "b", "i", 4, 5, "z" ] mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ "c", "b", "i", 4, 5, "z" ]) + self.assertEqual(mrgd, [ "c", "b", "i", 4, 5, "z" ]) class TestArrayMergeByMultiId(unittest.TestCase): @@ -151,7 +151,7 @@ def test_merge_def(self): { "@id": "bob", "tells": "alice" } ] mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ + self.assertEqual(mrgd, [ { "@id": "goob", "foo": "bar", "gurn": "cranston" }, { "@id": "hank", "foo": "bin" }, { "@id": "bob", "tells": "alice" } @@ -174,7 +174,7 @@ def test_merge(self): { "@id": "bob", "tells": "alice" } ] mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ + self.assertEqual(mrgd, [ { "@id": "goob", "foo": "bar", "gurn": "cranston" }, { "@id": "goob", "foo": "bin" }, { "@id": "goob", "gurn": "cranston" }, @@ -198,7 +198,7 @@ def test_merge_ignore(self): { "@id": "bob", "tells": "alice" } ] mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ + self.assertEqual(mrgd, [ { "@id": "goob", "foo": "bar", "gurn": "cranston" }, { "@id": "goob", "foo": "bin" }, { "@id": "bob", "tells": "alice" } @@ -232,7 +232,7 @@ def test_merge_def(self): mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ + self.assertEqual(mrgd, [ { "@id": "goob", "tag": "Physics" }, { "@id": "gurn", "scheme": "hsr", "tag": "physics", "lab": "MML" }, { "scheme": "hsr", "tag": "biology" } @@ -260,7 +260,7 @@ def test_merge_def(self): mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ + self.assertEqual(mrgd, [ { "@id": "goob", "foo": "bar", "gurn": "cranston", "blue": "blah" }, { "@id": "bob", "tells": "alice" }, { "@id": "hank", "foo": "bin" } @@ -279,7 +279,7 @@ def test_merge_undef_base(self): mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ + self.assertEqual(mrgd, [ { "@id": "goob", "gurn": "cranston" }, { "@id": "bob", "tells": "alice" } ]) @@ -300,7 +300,7 @@ def test_merge_ignore(self): { "@id": "bob", "tells": "alice" } ] mrgd = merger.merge(base, head) self.assertIsInstance(mrgd, list) - self.assertEquals(mrgd, [ + self.assertEqual(mrgd, [ { "@id": "goob", "foo": "bar", "gurn": "cranston" }, { "@id": "bob", "tells": "alice" }, { "@id": "hank", "foo": "bin" } @@ -346,18 +346,18 @@ def test_merge(self): self.assertNotIn("postalAddress", orig["contactPoint"]) self.assertIn("postalAddress", merged["contactPoint"]) - self.assertEquals(len(orig['description']), 1) - self.assertEquals(len(annot['description']), 1) - self.assertEquals(len(merged['description']), 2) + self.assertEqual(len(orig['description']), 1) + self.assertEqual(len(annot['description']), 1) + self.assertEqual(len(merged['description']), 2) self.assertNotIn("abbrev", orig) self.assertIn("abbrev", merged) self.assertNotIn("authors", orig) self.assertIn("authors", merged) self.assertNotEqual(orig["ediid"], annot["ediid"]) self.assertEqual(orig["ediid"], merged["ediid"]) - self.assertEquals(len(orig['references']), 1) - self.assertEquals(len(annot['references']), 2) - self.assertEquals(len(merged['references']), 3) + self.assertEqual(len(orig['references']), 1) + self.assertEqual(len(annot['references']), 2) + self.assertEqual(len(merged['references']), 3) comp = [c for c in merged['components'] if c['@id'] == "#cmp/cryolite/srd13_Al-053.json"][0] self.assertIn("title", comp) diff --git a/python/nistoar/nerdm/tests/test_taxonomy.py b/python/tests/nistoar/nerdm/test_taxonomy.py similarity index 100% rename from python/nistoar/nerdm/tests/test_taxonomy.py rename to python/tests/nistoar/nerdm/test_taxonomy.py diff --git a/python/tests/nistoar/nerdm/test_utils.py b/python/tests/nistoar/nerdm/test_utils.py new file mode 100644 index 0000000..11fdab2 --- /dev/null +++ b/python/tests/nistoar/nerdm/test_utils.py @@ -0,0 +1,183 @@ +import os, sys, pdb, shutil, logging, json +import unittest as test + +from nistoar.nerdm import utils +from nistoar.nerdm import constants as const + +class TestUtils(test.TestCase): + + def test_meta_prop_ch(self): + data = { "_schema": "uri", "$schema": "URI", "*schema": "URB", + "_goob": "gurn", "$goob": "GURN", "#goob": "Gurn" } + self.assertEqual(utils.meta_prop_ch(data), "_") + self.assertEqual(utils.meta_prop_ch(data, "goob"), "_") + self.assertEqual(utils.meta_prop_ch(data, "goob", "#$_"), "#") + del data["#goob"] + self.assertEqual(utils.meta_prop_ch(data, "goob", "#$_"), "$") + del data["_schema"] + self.assertEqual(utils.meta_prop_ch(data), "$") + self.assertEqual(utils.meta_prop_ch(data, prefixchs="*$"), "*") + del data["$schema"] + with self.assertRaises(ValueError): + utils.meta_prop_ch(data) + + def test_is_type(self): + data = { "name": "Bob", "@type": [ "nrd:Resource", "dcat:Dataset", "Metadata" ]} + self.assertTrue(utils.is_type(data, "Resource")) + self.assertTrue(utils.is_type(data, "schema:Resource")) + self.assertTrue(utils.is_type(data, "Dataset")) + self.assertTrue(utils.is_type(data, "goob:Metadata")) + self.assertTrue(not utils.is_type(data, "Randy")) + data['@type'] = "dcat:Dataset" + self.assertTrue(not utils.is_type(data, "Resource")) + self.assertTrue(not utils.is_type(data, "schema:Resource")) + self.assertTrue(utils.is_type(data, "Dataset")) + + def test_which_type(self): + data = { "name": "Bob", "@type": [ "nrd:Resource", "dcat:Dataset", "Metadata" ]} + self.assertEqual(utils.which_type(data, ["Goob", "Dataset", "Resource"]), "Dataset") + self.assertIsNone(utils.which_type(data, ["Goob", "Foo"])) + with self.assertRaises(TypeError): + utils.which_type(data, "Goob") + + def test_is_any_type(self): + data = { "name": "Bob", "@type": [ "nrd:Resource", "dcat:Dataset", "Metadata" ]} + self.assertTrue(utils.is_any_type(data, ["Goob", "Dataset", "Resource"])) + self.assertFalse(utils.is_any_type(data, ["Goob", "Foo"])) + with self.assertRaises(TypeError): + utils.is_any_type(data, "Goob") + + def test_insert_before_val(self): + vals = [] + self.assertEqual(utils._insert_before_val(vals, 'r'), ['r']) + self.assertEqual(utils._insert_before_val(vals, 'g', 'u', 'r', 'n'), ['g', 'r']) + self.assertEqual(utils._insert_before_val(vals, 'u', 'r', 'n'), ['g', 'u', 'r']) + self.assertEqual(utils._insert_before_val(vals, 'n'), ['g', 'u', 'r', 'n']) + self.assertEqual(utils._insert_before_val(vals, 'y', ['o', 'o', 'b']), ['g', 'u', 'r', 'n', 'y']) + + def test_insert_type(self): + nerdm = {} + utils.insert_type(nerdm, "nrd:Resource") + self.assertIn('@type', nerdm) + self.assertEqual(nerdm['@type'], ['nrd:Resource']) + + nerdm = {} + utils.insert_type(nerdm, "nrd:Resource", "gb:Gurn") + self.assertIn('@type', nerdm) + self.assertEqual(nerdm['@type'], ['nrd:Resource']) + + utils.insert_type(nerdm, "nrdp:DataPublication", "nrd:PublicDataResource", "nrd:Resource") + self.assertEqual(nerdm['@type'], ['nrdp:DataPublication', 'nrd:Resource']) + utils.insert_type(nerdm, "nrd:Resource", "gb:Gurn") + self.assertEqual(nerdm['@type'], ['nrdp:DataPublication', 'nrd:Resource', 'nrd:Resource']) + utils.insert_type(nerdm, "gb:Gurn") + self.assertEqual(nerdm['@type'], ['nrdp:DataPublication', 'nrd:Resource', 'nrd:Resource', 'gb:Gurn']) + + def test_nerdm_schema_version(self): + self.assertEqual(utils.nerdm_schema_version(const.CORE_SCHEMA_URI), const.core_ver.lstrip('v')) + self.assertEqual(utils.nerdm_schema_version("urn:goober/v3.0"), "3.0") + self.assertEqual(utils.nerdm_schema_version("urn:goober/v3.0#"), "3.0") + self.assertEqual(utils.nerdm_schema_version("urn:goober/v3.0#/definitions/Gurn"), "3.0") + self.assertEqual(utils.nerdm_schema_version("urn:goober/beta"), "beta") + with self.assertRaises(ValueError): + utils.nerdm_schema_version("foo") + + + def test_cmp_versions(self): + self.assertEqual(utils.cmp_versions("1.0.0", "1.0.2"), -1) + self.assertEqual(utils.cmp_versions("1.0.1", "1.0.1"), 0) + self.assertEqual(utils.cmp_versions("1.0.2", "1.0.1"), 1) + self.assertEqual(utils.cmp_versions("1.0", "1.0.2"), -1) + self.assertEqual(utils.cmp_versions("1.0.0", "1.0"), 1) + self.assertEqual(utils.cmp_versions("1", "1.0"), -1) + self.assertEqual(utils.cmp_versions("1.0.2", "1.1.0"), -1) + self.assertEqual(utils.cmp_versions("1.2.1", "1.0.1"), 1) + self.assertEqual(utils.cmp_versions("1.0.2", "4.0.1"), -1) + self.assertEqual(utils.cmp_versions("12.0.2", "4.0.1"), 1) + + def test_schema_version_cmp(self): + data = {"_schema": "https://data.nist.gov/od/dm/nerdm-schema/pub/v1.3"} + self.assertEqual(utils.cmp_versions(utils.get_nerdm_schema_version(data), "0.5"), 1) + self.assertEqual(utils.cmp_versions(utils.get_nerdm_schema_version(data), "2.5"), -1) + self.assertEqual(utils.cmp_versions(utils.get_nerdm_schema_version(data), "1.3"), 0) + +class TestVersion(test.TestCase): + + def test_ctor(self): + ver = utils.Version("3.3.5.0") + self.assertEqual(ver._vs, "3.3.5.0") + self.assertEqual(ver.fields, [3,3,5,0]) + + def testEQ(self): + ver = utils.Version("3.3.0") + self.assertEqual(ver, utils.Version("3.3.0")) + self.assertTrue(ver == "3.3.0") + self.assertFalse(ver == "3.3.1") + self.assertFalse(ver == "1.3") + + def testNE(self): + ver = utils.Version("3.3.0") + self.assertNotEqual(ver, utils.Version("3.3.2")) + self.assertFalse(ver != "3.3.0") + self.assertTrue(ver != "3.3.1") + self.assertTrue(ver != "1.3") + + def testGE(self): + ver = utils.Version("3.3.0") + self.assertTrue(ver >= "3.2.0") + self.assertTrue(ver >= "3.3.0") + self.assertTrue(ver >= "1.3") + + self.assertFalse(ver >= "5.3") + self.assertFalse(ver >= utils.Version("5.3")) + + def testGT(self): + ver = utils.Version("3.3.0") + self.assertTrue(ver > "3.2.0") + self.assertTrue(ver > "1.3") + + self.assertFalse(ver > "3.3.0") + self.assertFalse(ver >= "5.3") + self.assertFalse(ver >= utils.Version("5.3")) + + def testLE(self): + ver = utils.Version("3.3.0") + self.assertTrue(ver <= "3.5.0") + self.assertTrue(ver <= "3.3.1") + self.assertTrue(ver <= "3.3.0") + self.assertTrue(ver <= "5.3") + + self.assertFalse(ver <= "1.3") + self.assertFalse(ver <= utils.Version("2.3")) + + def testLT(self): + ver = utils.Version("3.3.0") + self.assertTrue(ver < "3.5.0") + self.assertTrue(ver < "3.3.1") + self.assertTrue(ver < "5.3") + + self.assertFalse(ver < "3.3.0") + self.assertFalse(ver < "1.3") + self.assertFalse(ver < utils.Version("2.3")) + + def testIsProper(self): + self.assertTrue(utils.Version.is_proper_version("33")) + self.assertTrue(utils.Version.is_proper_version("3.3")) + self.assertTrue(utils.Version.is_proper_version("13_3_0")) + self.assertTrue(utils.Version.is_proper_version("1.23_400.10")) + + self.assertFalse(utils.Version.is_proper_version("-33")) + self.assertFalse(utils.Version.is_proper_version("3.3r23")) + self.assertFalse(utils.Version.is_proper_version("13.3.0-1")) + self.assertFalse(utils.Version.is_proper_version("dev")) + + def test_sorted(self): + vers = "2.0.1 3.0 0.1.1 0 12.3 2.0.1.0".split() + expect = "0 0.1.1 2.0.1 2.0.1.0 3.0 12.3".split() + self.assertEqual(sorted(vers, key=utils.Version), expect) + + + + +if __name__ == '__main__': + test.main() diff --git a/python/nistoar/nerdm/tests/test_validate.py b/python/tests/nistoar/nerdm/test_validate.py similarity index 100% rename from python/nistoar/nerdm/tests/test_validate.py rename to python/tests/nistoar/nerdm/test_validate.py diff --git a/python/tests/nistoar/rmm/__init__.py b/python/tests/nistoar/rmm/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/tests/nistoar/rmm/data/config.json b/python/tests/nistoar/rmm/data/config.json new file mode 100644 index 0000000..f3d0d7f --- /dev/null +++ b/python/tests/nistoar/rmm/data/config.json @@ -0,0 +1,6 @@ +{ + "working_dir": "/pdr/work", + "review_dir": "/midas/review", + "upload_dir": "/midas/upload", + "id_registry_dir": "/pdr" +} diff --git a/python/tests/nistoar/rmm/data/config.yaml b/python/tests/nistoar/rmm/data/config.yaml new file mode 100644 index 0000000..7687b29 --- /dev/null +++ b/python/tests/nistoar/rmm/data/config.yaml @@ -0,0 +1,6 @@ +working_dir: '/pdr/work' +review_dir: '/midas/review' +upload_dir: '/midas/upload' +id_registry_dir: '/pdr' + + diff --git a/python/nistoar/rmm/tests/data/csconfig.json b/python/tests/nistoar/rmm/data/csconfig.json similarity index 100% rename from python/nistoar/rmm/tests/data/csconfig.json rename to python/tests/nistoar/rmm/data/csconfig.json diff --git a/python/tests/nistoar/rmm/ingest/__init__.py b/python/tests/nistoar/rmm/ingest/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/tests/nistoar/rmm/ingest/postcomm.sh b/python/tests/nistoar/rmm/ingest/postcomm.sh new file mode 100755 index 0000000..db9500f --- /dev/null +++ b/python/tests/nistoar/rmm/ingest/postcomm.sh @@ -0,0 +1,14 @@ +#! /bin/bash +# +# Used by test_wsgi.py, this script prints arguments to a given output file +# +# USAGE: postcomm.sh OUTFILE [ARG ...] +# +set -e +[ -n "$1" ] || { + echo "${0}: Missing output filename" + exit 1 +} +out=$1; shift + +echo "$@" > $out diff --git a/python/nistoar/rmm/ingest/tests/test_wsgi.py b/python/tests/nistoar/rmm/ingest/test_wsgi.py similarity index 91% rename from python/nistoar/rmm/ingest/tests/test_wsgi.py rename to python/tests/nistoar/rmm/ingest/test_wsgi.py index 7461b0b..66103dd 100644 --- a/python/nistoar/rmm/ingest/tests/test_wsgi.py +++ b/python/tests/nistoar/rmm/ingest/test_wsgi.py @@ -1,10 +1,10 @@ -import pdb, os, json, urlparse, warnings, logging -from cStringIO import StringIO +import pdb, os, json, urllib.parse, warnings, logging +from io import StringIO from copy import deepcopy import unittest as test from ejsonschema import ExtValidator, SchemaValidator -from nistoar.tests import * +from nistoar.testing import * from nistoar.rmm.ingest import wsgi testdir = os.path.dirname(os.path.abspath(__file__)) @@ -75,8 +75,16 @@ def tearDown(self): if not hasattr(client, 'get_database'): client.get_database = client.get_default_database db = client.get_database() - if "record" in db.collection_names(): + if "record" in db.list_collection_names(): db.drop_collection("record") + if "versions" in db.list_collection_names(): + db.drop_collection("versions") + if "releaseSets" in db.list_collection_names(): + db.drop_collection("releaseSets") + if "taxonomy" in db.list_collection_names(): + db.drop_collection("taxonomy") + if "fields" in db.list_collection_names(): + db.drop_collection("fields") tmpfiles.clean() def test_ctor(self): @@ -95,7 +103,7 @@ def test_get_types(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("200", self.resp[0]) - self.assertEqual(body[0].strip(), '["nerdm"]') + self.assertEqual(body[0].decode().strip(), '["nerdm"]') self.assertFalse(os.path.exists(self.commitfile), "Commit file created unexpectedly") def test_is_ready(self): @@ -107,7 +115,7 @@ def test_is_ready(self): body = self.svc(req, self.start) self.assertGreater(len(self.resp), 0) self.assertIn("200", self.resp[0]) - self.assertEqual(body[0], 'Service ready\n') + self.assertEqual(body[0].decode(), 'Service ready\n') self.assertFalse(os.path.exists(self.commitfile), "Commit file created unexpectedly") def test_auth(self): @@ -136,7 +144,7 @@ def test_auth(self): self.assertGreater(len(self.resp), 0) self.assertIn("200", self.resp[0]) self.assertGreater(len(body), 0) - self.assertEqual(body[0], 'Service ready\n') + self.assertEqual(body[0].decode(), 'Service ready\n') # test single rejection req['QUERY_STRING'] = 'goob=able&auth=gurn' @@ -169,7 +177,7 @@ def test_auth(self): self.assertGreater(len(self.resp), 0) self.assertIn("200", self.resp[0]) self.assertGreater(len(body), 0) - self.assertEqual(body[0], 'Service ready\n') + self.assertEqual(body[0].decode(), 'Service ready\n') self.resp = [] req['HTTP_AUTHORIZATION'] = 'Token 9e73' @@ -269,9 +277,9 @@ def test_good_post(self): client.get_database = client.get_default_database try: db = client.get_database() - if "record" in db.collection_names(): + if "record" in db.list_collection_names(): recs = db['record'].find() - self.assertEqual(recs.count(), 0) + self.assertEqual(db['record'].count_documents(), 0) client.close() finally: client.close() @@ -286,10 +294,9 @@ def test_good_post(self): 'CONTENT_LENGTH': clen, 'wsgi.input': doc } - body = self.svc(req, self.start) - archfile = os.path.join(self.archdir, "sdp0fjspek351.json") + archfile = os.path.join(self.archdir, "sdp0fjspek351-v1_0_0.json") self.assertTrue(os.path.isfile(archfile)) self.assertIn("200", self.resp[0]) @@ -304,9 +311,9 @@ def test_good_post(self): if not hasattr(client, 'get_database'): client.get_database = client.get_default_database db = client.get_database() - self.assertIn("record", db.collection_names()) + self.assertIn("record", db.list_collection_names()) recs = db['record'].find() - self.assertEqual(recs.count(), 1) + self.assertEqual(db['record'].count_documents({}), 1) self.assertIn("JANAF", recs[0]['title']) finally: client.close() @@ -362,7 +369,7 @@ def test_nerdm_archive_cache(self): self.assertTrue(rec) recid = self.hdlr.nerdm_archive_cache(rec) - self.assertEqual(recid, "ark:/88434/sdp0fjspek351") + self.assertEqual(recid, "sdp0fjspek351-v1_0_0") cachefile = os.path.join(self.archdir, "_cache", os.path.basename(recid)+".json") diff --git a/python/tests/nistoar/rmm/mongo/__init__.py b/python/tests/nistoar/rmm/mongo/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/nistoar/rmm/mongo/tests/test_fields.py b/python/tests/nistoar/rmm/mongo/test_fields.py similarity index 80% rename from python/nistoar/rmm/mongo/tests/test_fields.py rename to python/tests/nistoar/rmm/mongo/test_fields.py index 2c57956..1013a8b 100644 --- a/python/nistoar/rmm/mongo/tests/test_fields.py +++ b/python/tests/nistoar/rmm/mongo/test_fields.py @@ -1,6 +1,6 @@ -import pdb, os, sys, json, urlparse #, warnings -from nistoar.rmm.mongo.tests import warnings -sys.modules['warnings'] = warnings +import pdb, os, sys, json, urllib.parse, warnings +# from nistoar.rmm.mongo.tests import warnings +# sys.modules['warnings'] = warnings import unittest as test from pymongo import MongoClient from ejsonschema import ExtValidator, SchemaValidator @@ -46,17 +46,17 @@ def tearDown(self): if not hasattr(client, 'get_database'): client.get_database = client.get_default_database db = client.get_database() - if "fields" in db.collection_names(): + if "fields" in db.list_collection_names(): db.drop_collection("fields") def test_ctor(self): - self.assertEquals(self.ldr.coll, "fields") + self.assertEqual(self.ldr.coll, "fields") def test_connect(self): self.assertIsNone(self.ldr._client) self.ldr.connect() self.assertIsNotNone(self.ldr._client) - self.assertEqual(self.ldr._client.get_database().collection_names(), []) + self.assertNotIn("fields" , self.ldr._client.get_database().list_collection_names()) self.ldr.disconnect() self.assertIsNone(self.ldr._client) @@ -72,46 +72,45 @@ def test_validate(self): def test_load_keyless_data(self): data = { "name": "title", "type": "string" } self.assertEqual(self.ldr.load_data(data), 1) - self.assertEqual(self.ldr._client.get_database().fields.find().count(), 1) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 1) data = { "name": "title", "type": "string" } self.assertEqual(self.ldr.load_data(data), 1) - self.assertEqual(self.ldr._client.get_database().fields.find().count(), 2) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 2) def test_load_data(self): key = { "name": "title" } data = { "name": "title", "type": "string" } self.assertEqual(self.ldr.load_data(data, key, 'fail'), 1) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 1) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['type'], 'string') data = { "name": "title", "type": "array" } with self.assertRaises(fields.RecordIngestError): self.ldr.load_data(data, key, 'fail') + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 1) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['type'], 'string') data = { "name": "title", "type": "array" } - with warnings.catch_warnings(record=True, reset=True) as w: + with warnings.catch_warnings(record=True) as w: self.assertEqual(self.ldr.load_data(data, key, 'warn'), 1) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, fields.UpdateWarning)) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 1) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['type'], 'array') data = { "name": "title", "type": "bool" } - self.assertEqual(self.ldr.load_data(data, key, 'pass'), 1) + self.assertEqual(self.ldr.load_data(data, key, 'quiet'), 1) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 1) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['type'], 'bool') key = { "name": "description" } data = { "name": "description", "type": "bool" } - self.assertEqual(self.ldr.load_data(data, key, 'pass'), 1) - c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 2) + self.assertEqual(self.ldr.load_data(data, key, 'quiet'), 1) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 2) def test_load_simple_obj(self): data = { "name": "title", "type": "string" } @@ -122,8 +121,8 @@ def test_load_simple_obj(self): self.assertEqual(res.failure_count, 0) self.assertTrue(res.succeeded({'name': "title"})) self.assertFalse(res.failed({'name': "title"})) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 1) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['type'], 'string') data = { "name": "title", "type": "array" } @@ -133,8 +132,8 @@ def test_load_simple_obj(self): self.assertEqual(res.failure_count, 0) self.assertTrue(res.succeeded({'name': "title"})) self.assertFalse(res.failed({'name': "title"})) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 1) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['type'], 'array') def test_load_array(self): @@ -146,9 +145,8 @@ def test_load_array(self): self.assertEqual(res.success_count, 2) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded({'name': "title"})) - c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 2) - self.assertEquals(len(res.failures()), 1) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 2) + self.assertEqual(len(res.failures()), 1) def test_load_wrapped_array(self): data = { @@ -161,9 +159,8 @@ def test_load_wrapped_array(self): self.assertEqual(res.success_count, 2) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded({'name': "title"})) - c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 2) - self.assertEquals(len(res.failures()), 1) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 2) + self.assertEqual(len(res.failures()), 1) def test_load(self): data = { @@ -176,10 +173,10 @@ def test_load(self): self.assertEqual(res.success_count, 2) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded({'name': "title"})) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 2) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 2) - self.assertEquals(len(res.failures()), 1) self.assertEqual(c[0]['type'], 'string') + self.assertEqual(len(res.failures()), 1) data = data['fields'][:2] data[0]['type'] = 'array' @@ -189,8 +186,8 @@ def test_load(self): self.assertEqual(res.success_count, 4) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded({'name': "title"})) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 2) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 2) self.assertNotEqual(c[0]['type'], 'string') self.assertNotEqual(c[1]['type'], 'string') @@ -200,8 +197,8 @@ def test_load(self): self.assertEqual(res.success_count, 5) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded({'name': "description"})) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 2) c = self.ldr._client.get_database().fields.find() - self.assertEqual(c.count(), 2) self.assertEqual(c[0]['type'], 'bool') self.assertEqual(c[1]['type'], 'bool') @@ -212,8 +209,8 @@ def test_load_from_file(self): self.assertEqual(res.failure_count, 0) self.assertTrue(res.succeeded({'name': "title"})) + self.assertEqual(self.ldr._client.get_database().fields.count_documents({}), 28) c = self.ldr._client.get_database().fields.find({'name':'title'}) - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['type'], 'string') self.assertIn('searchable', c[0]['tags']) diff --git a/python/nistoar/rmm/mongo/tests/test_loader.py b/python/tests/nistoar/rmm/mongo/test_loader.py similarity index 100% rename from python/nistoar/rmm/mongo/tests/test_loader.py rename to python/tests/nistoar/rmm/mongo/test_loader.py diff --git a/python/tests/nistoar/rmm/mongo/test_nerdm.py b/python/tests/nistoar/rmm/mongo/test_nerdm.py new file mode 100644 index 0000000..c1409b7 --- /dev/null +++ b/python/tests/nistoar/rmm/mongo/test_nerdm.py @@ -0,0 +1,183 @@ +import pdb, os, json, urllib.parse, warnings, logging +import unittest as test +from pymongo import MongoClient +from ejsonschema import ExtValidator, SchemaValidator + +from nistoar.rmm.mongo import nerdm +from nistoar.rmm.mongo import loader + +pydir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))) +basedir = os.path.dirname(pydir) +schemadir = os.path.join(basedir, "model") +exdir = os.path.join(schemadir, "examples") +janaffile = os.path.join(exdir, "janaf.json") + +dburl = None +if os.environ.get('MONGO_TESTDB_URL'): + dburl = os.environ.get('MONGO_TESTDB_URL') + +assert os.path.exists(schemadir), schemadir + +# logger = logging.getLogger("test") + +@test.skipIf(not os.environ.get('MONGO_TESTDB_URL'), + "test mongodb not available") +class TestNERDmLoader(test.TestCase): + + def setUp(self): + self.ldr = nerdm.NERDmLoader(dburl, schemadir) + + def tearDown(self): + client = MongoClient(dburl) + if not hasattr(client, 'get_database'): + client.get_database = client.get_default_database + db = client.get_database() + if "record" in db.list_collection_names(): + db.drop_collection("record") + if "versions" in db.list_collection_names(): + db.drop_collection("versions") + if "releasesets" in db.list_collection_names(): + db.drop_collection("releasesets") + + def test_ctor(self): + self.assertEqual(self.ldr.coll, "versions") + + def test_validate(self): + with open(janaffile) as fd: + data = json.load(fd) + res = self.ldr.validate(data, schemauri=nerdm.DEF_SCHEMA) + self.assertEqual(res, []) + + del data['landingPage'] + res = self.ldr.validate(data, schemauri=nerdm.DEF_SCHEMA) + self.assertEqual(len(res), 2) + + def test_load_data(self): + with open(janaffile) as fd: + data = json.load(fd) + key = { '@id': "ark:/88434/sdp0fjspek351" } + self.assertEqual(self.ldr.load_data(data, key, 'fail'), 1) + self.assertEqual(self.ldr._client.get_database().record.count_documents({}), 0) + self.assertEqual(self.ldr._client.get_database().releasesets.count_documents({}), 0) + self.assertEqual(self.ldr._client.get_database().versions.count_documents({}), 1) + c = self.ldr._client.get_database().versions.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') + + def test_load(self): + with open(janaffile) as fd: + data = json.load(fd) + data['title'] = "Version 1.0.0" + data['version'] = "1.0.0" + res = self.ldr.load(data) + self.assertEqual(res.attempt_count, 3) + self.assertEqual(res.success_count, 3) + self.assertEqual(res.failure_count, 0) + self.assertEqual(self.ldr._client.get_database().record.count_documents({}), 1) + c = self.ldr._client.get_database().record.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') + self.assertEqual(c[0]['version'], '1.0.0') + self.assertEqual(c[0]['title'], 'Version 1.0.0') + self.assertEqual(self.ldr._client.get_database().releasesets.count_documents({}), 1) + c = self.ldr._client.get_database().releasesets.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351/pdr:v') + self.assertEqual(c[0]['version'], '1.0.0') + self.assertEqual(c[0]['title'], 'Version 1.0.0') + self.assertEqual(self.ldr._client.get_database().versions.count_documents({}), 1) + c = self.ldr._client.get_database().versions.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351/pdr:v/1.0.0') + self.assertEqual(c[0]['version'], '1.0.0') + self.assertEqual(c[0]['title'], 'Version 1.0.0') + + # update with next version + self.ldr.onupdate = 'quiet' + data['title'] = "Version 1.0.1" + data['version'] = "1.0.1" + res = self.ldr.load(data) + self.assertEqual(res.attempt_count, 3) + self.assertEqual(res.success_count, 3) + self.assertEqual(res.failure_count, 0) + self.assertEqual(self.ldr._client.get_database().record.count_documents({}), 1) + c = self.ldr._client.get_database().record.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') + self.assertEqual(c[0]['version'], '1.0.1') + self.assertEqual(c[0]['title'], 'Version 1.0.1') + self.assertEqual(self.ldr._client.get_database().releasesets.count_documents({}), 1) + c = self.ldr._client.get_database().releasesets.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351/pdr:v') + self.assertEqual(c[0]['version'], '1.0.1') + self.assertEqual(c[0]['title'], 'Version 1.0.1') + self.assertEqual(self.ldr._client.get_database().versions.count_documents({}), 2) + c = self.ldr._client.get_database().versions.find() + for i in range(1): + v = c[i]['version'] + self.assertEqual(c[i]['@id'], 'ark:/88434/sdp0fjspek351/pdr:v/'+v) + self.assertEqual(c[0]['version'], v) + self.assertEqual(c[0]['title'], 'Version '+v) + + # update older version + self.ldr.onupdate = 'quiet' + data['title'] = "A Version 1.0.0" + data['version'] = "1.0.0" + res = self.ldr.load(data) + self.assertEqual(res.attempt_count, 1) + self.assertEqual(res.success_count, 1) + self.assertEqual(res.failure_count, 0) + self.assertEqual(self.ldr._client.get_database().record.count_documents({}), 1) + c = self.ldr._client.get_database().record.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') + self.assertEqual(c[0]['version'], '1.0.1') + self.assertEqual(c[0]['title'], 'Version 1.0.1') + self.assertEqual(self.ldr._client.get_database().releasesets.count_documents({}), 1) + c = self.ldr._client.get_database().releasesets.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351/pdr:v') + self.assertEqual(c[0]['version'], '1.0.1') + self.assertEqual(c[0]['title'], 'Version 1.0.1') + self.assertEqual(self.ldr._client.get_database().versions.count_documents({}), 2) + c = self.ldr._client.get_database().versions.find() + for i in range(1): + if c[i]['version'] == "1.0.0": + self.assertEqual(c[0]['title'], 'A Version 1.0.0') + else: + self.assertEqual(c[0]['title'], 'Version 1.0.1') + + # update last version + self.ldr.onupdate = 'quiet' + data['title'] = "A Version 1.0.1" + data['version'] = "1.0.1" + res = self.ldr.load(data) + self.assertEqual(res.attempt_count, 3) + self.assertEqual(res.success_count, 3) + self.assertEqual(res.failure_count, 0) + self.assertEqual(self.ldr._client.get_database().record.count_documents({}), 1) + c = self.ldr._client.get_database().record.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') + self.assertEqual(c[0]['version'], '1.0.1') + self.assertEqual(c[0]['title'], 'A Version 1.0.1') + self.assertEqual(self.ldr._client.get_database().releasesets.count_documents({}), 1) + c = self.ldr._client.get_database().releasesets.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351/pdr:v') + self.assertEqual(c[0]['version'], '1.0.1') + self.assertEqual(c[0]['title'], 'A Version 1.0.1') + self.assertEqual(self.ldr._client.get_database().versions.count_documents({}), 2) + c = self.ldr._client.get_database().versions.find() + for i in range(1): + v = c[i]['version'] + self.assertEqual(c[i]['@id'], 'ark:/88434/sdp0fjspek351/pdr:v/'+v) + self.assertEqual(c[0]['version'], v) + self.assertEqual(c[0]['title'], 'A Version '+v) + + def test_load_from_file(self): + res = self.ldr.load_from_file(janaffile) + self.assertEqual(res.attempt_count, 3) + self.assertEqual(res.success_count, 3) + self.assertEqual(res.failure_count, 0) + self.assertEqual(self.ldr._client.get_database().record.count_documents({}), 1) + c = self.ldr._client.get_database().record.find() + self.assertEqual(c[0]['@id'], 'ark:/88434/sdp0fjspek351') + self.assertEqual(self.ldr._client.get_database().versions.count_documents({}), 1) + self.assertEqual(self.ldr._client.get_database().releasesets.count_documents({}), 1) + + + +if __name__ == '__main__': + test.main() diff --git a/python/nistoar/rmm/mongo/tests/test_taxon.py b/python/tests/nistoar/rmm/mongo/test_taxon.py similarity index 82% rename from python/nistoar/rmm/mongo/tests/test_taxon.py rename to python/tests/nistoar/rmm/mongo/test_taxon.py index 9395b95..2321208 100644 --- a/python/nistoar/rmm/mongo/tests/test_taxon.py +++ b/python/tests/nistoar/rmm/mongo/test_taxon.py @@ -1,4 +1,4 @@ -import pdb, os, json, urlparse, warnings +import pdb, os, json, urllib.parse, warnings import unittest as test from pymongo import MongoClient from ejsonschema import ExtValidator, SchemaValidator @@ -44,17 +44,17 @@ def tearDown(self): if not hasattr(client, 'get_database'): client.get_database = client.get_default_database db = client.get_database() - if "taxonomy" in db.collection_names(): + if "taxonomy" in db.list_collection_names(): db.drop_collection("taxonomy") def test_ctor(self): - self.assertEquals(self.ldr.coll, "taxonomy") + self.assertEqual(self.ldr.coll, "taxonomy") def test_connect(self): self.assertIsNone(self.ldr._client) self.ldr.connect() self.assertIsNotNone(self.ldr._client) - self.assertEqual(self.ldr._client.get_database().collection_names(), []) + self.assertNotIn("taxonomy" , self.ldr._client.get_database().list_collection_names()) self.ldr.disconnect() self.assertIsNone(self.ldr._client) @@ -74,24 +74,24 @@ def test_validate(self): def test_load_keyless_data(self): data = { "term": "title", "parent": "goob", "level": 1 } self.assertEqual(self.ldr.load_data(data), 1) - self.assertEqual(self.ldr._client.get_database().taxonomy.find().count(), 1) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 1) data = { "term": "title", "parent": "goob", "level": 2 } self.assertEqual(self.ldr.load_data(data), 1) - self.assertEqual(self.ldr._client.get_database().taxonomy.find().count(), 2) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) def test_load_data(self): key = { "term": "title", "parent": "goob" } data = { "term": "title", "parent": "goob", "level": 2 } self.assertEqual(self.ldr.load_data(data, key, 'fail'), 1) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 1) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 2) data = { "term": "title", "parent": "goob", "level": 3 } with self.assertRaises(taxon.RecordIngestError): self.ldr.load_data(data, key, 'fail') + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 1) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 2) data = { "term": "title", "parent": "goob", "level": 3 } @@ -103,21 +103,20 @@ def test_load_data(self): #self.assertEqual(len(w), 1) #self.assertTrue(issubclass(w[-1].category, taxon.UpdateWarning)) self.ldr.load_data(data, key, 'warn') + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 1) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 3) data = { "term": "title", "parent": "goob", "level": 1 } - self.assertEqual(self.ldr.load_data(data, key, 'pass'), 1) + self.assertEqual(self.ldr.load_data(data, key, 'quiet'), 1) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 1) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 1) key = { "term": "description", "parent": "goob" } data = { "term": "description", "parent": "goob", "level": 2 } - self.assertEqual(self.ldr.load_data(data, key, 'pass'), 1) - c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 2) + self.assertEqual(self.ldr.load_data(data, key, 'quiet'), 1) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) def test_load_simple_obj(self): key = { "term": "title", "parent": "goob" } @@ -129,8 +128,8 @@ def test_load_simple_obj(self): self.assertEqual(res.failure_count, 0) self.assertTrue(res.succeeded(key)) self.assertFalse(res.failed(key)) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 1) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 2) self.assertEqual(c[0]['label'], c[0]['term']) @@ -141,8 +140,8 @@ def test_load_simple_obj(self): self.assertEqual(res.failure_count, 0) self.assertTrue(res.succeeded(key)) self.assertFalse(res.failed(key)) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 1) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 3) data = { "term": "date", "level": 1 } @@ -152,10 +151,9 @@ def test_load_simple_obj(self): self.assertEqual(res.failure_count, 0) self.assertTrue(res.succeeded(key)) self.assertFalse(res.failed(key)) - c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 2) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({'term':'date'}), 1) c = self.ldr._client.get_database().taxonomy.find({'term':'date'}) - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 1) self.assertEqual(c[0]['parent'], "") self.assertEqual(c[0]['label'], "date") @@ -171,9 +169,8 @@ def test_load_array(self): self.assertEqual(res.success_count, 2) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded(key)) - c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 2) - self.assertEquals(len(res.failures()), 1) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) + self.assertEqual(len(res.failures()), 1) def test_load_wrapped_array(self): data = { @@ -188,9 +185,8 @@ def test_load_wrapped_array(self): self.assertEqual(res.success_count, 2) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded(key)) - c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 2) - self.assertEquals(len(res.failures()), 1) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) + self.assertEqual(len(res.failures()), 1) def test_load(self): data = { @@ -205,9 +201,9 @@ def test_load(self): self.assertEqual(res.success_count, 2) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded(key)) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 2) - self.assertEquals(len(res.failures()), 1) + self.assertEqual(len(res.failures()), 1) self.assertEqual(c[0]['parent'], 'goob') self.assertEqual(c[0]['level'], 2) @@ -219,8 +215,8 @@ def test_load(self): self.assertEqual(res.success_count, 4) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded(key)) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 2) self.assertNotEqual(c[0]['level'], 2) self.assertNotEqual(c[1]['level'], 2) @@ -230,8 +226,8 @@ def test_load(self): self.assertEqual(res.success_count, 5) self.assertEqual(res.failure_count, 1) self.assertTrue(res.succeeded(key)) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 2) c = self.ldr._client.get_database().taxonomy.find() - self.assertEqual(c.count(), 2) self.assertEqual(c[0]['level'], 1) self.assertEqual(c[1]['level'], 1) @@ -243,8 +239,8 @@ def test_load_from_file(self): key = {'term': "Advanced Communications", "parent": ""} self.assertTrue(res.succeeded(key)) + self.assertEqual(self.ldr._client.get_database().taxonomy.count_documents({}), 249) c = self.ldr._client.get_database().taxonomy.find(key) - self.assertEqual(c.count(), 1) self.assertEqual(c[0]['level'], 1) diff --git a/python/tests/nistoar/testing/__init__.py b/python/tests/nistoar/testing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/tests/nistoar/testing/test_tempfiles.py b/python/tests/nistoar/testing/test_tempfiles.py new file mode 100644 index 0000000..621cb3a --- /dev/null +++ b/python/tests/nistoar/testing/test_tempfiles.py @@ -0,0 +1,151 @@ +import os, pdb, shutil +import warnings as warn +import unittest as test + +from nistoar.testing import tmpdir, ensure_tmpdir, rmtmpdir, rmdir, Tempfiles + +class TestFunctions(test.TestCase): + + def test_tmpdir(self): + td = tmpdir() + self.assertEqual(os.path.dirname(td), os.getcwd()) + self.assertRegex(os.path.basename(td), r"^_test.\d+$") + + td = tmpdir("/tmp") + self.assertEqual(os.path.dirname(td), "/tmp") + self.assertRegex(os.path.basename(td), r"^_test.\d+$") + + td = tmpdir(dirname="_goob") + self.assertEqual(os.path.dirname(td), os.getcwd()) + self.assertEqual(os.path.basename(td), "_goob") + + td = tmpdir("/tmp", "_goob") + self.assertEqual(os.path.dirname(td), "/tmp") + self.assertEqual(os.path.basename(td), "_goob") + + def test_ensure_dir(self): + tdir = tmpdir() + base = os.path.basename(tdir) + + td = ensure_tmpdir() + self.assertEqual(td, tdir) + self.assertTrue(os.path.exists(td)) + self.assertTrue(os.path.isdir(td)) + self.assertEqual(os.path.dirname(td), os.getcwd()) + self.assertRegex(os.path.basename(tdir), r"^_test.\d+$") + shutil.rmtree(td) + assert not os.path.exists(td) + + td = ensure_tmpdir("/tmp") + self.assertEqual(td, os.path.join("/tmp", base)) + self.assertTrue(os.path.exists(td)) + self.assertTrue(os.path.isdir(td)) + self.assertEqual(os.path.dirname(td), "/tmp") + self.assertRegex(os.path.basename(td), r"^_test.\d+$") + shutil.rmtree(td) + assert not os.path.exists(td) + + td = ensure_tmpdir(dirname="_goob") + self.assertTrue(os.path.exists(td)) + self.assertTrue(os.path.isdir(td)) + self.assertEqual(os.path.dirname(td), os.getcwd()) + self.assertEqual(os.path.basename(td), "_goob") + shutil.rmtree(td) + assert not os.path.exists(td) + + td = ensure_tmpdir("/tmp", "_goob") + self.assertTrue(os.path.exists(td)) + self.assertTrue(os.path.isdir(td)) + self.assertEqual(os.path.dirname(td), "/tmp") + self.assertEqual(os.path.basename(td), "_goob") + shutil.rmtree(td) + assert not os.path.exists(td) + + def test_rmdir(self): + td = ensure_tmpdir() + self.assertTrue(os.path.exists(td)) + self.assertTrue(os.path.isdir(td)) + f = os.path.join(td, "junk.txt") + with open(f, 'w') as fd: + fd.write("Hello world!\n") + self.assertTrue(os.path.exists(f)) + + rmdir(td) + self.assertFalse(os.path.exists(f)) + self.assertFalse(os.path.exists(td)) + + def test_rmtmpdir(self): + td = ensure_tmpdir() + self.assertTrue(os.path.exists(td)) + self.assertTrue(os.path.isdir(td)) + f = os.path.join(td, "junk.txt") + with open(f, 'w') as fd: + fd.write("Hello world!\n") + self.assertTrue(os.path.exists(f)) + + rmtmpdir() + self.assertFalse(os.path.exists(f)) + self.assertFalse(os.path.exists(td)) + + td = ensure_tmpdir("/tmp", "_goob") + self.assertTrue(os.path.exists(td)) + self.assertTrue(os.path.isdir(td)) + self.assertEqual(os.path.dirname(td), "/tmp") + self.assertEqual(os.path.basename(td), "_goob") + f = os.path.join(td, "junk.txt") + with open(f, 'w') as fd: + fd.write("Hello world!\n") + self.assertTrue(os.path.exists(f)) + + rmtmpdir("/tmp", "_goob") + self.assertFalse(os.path.exists(f)) + self.assertFalse(os.path.exists(td)) + +class TestTempfiles(test.TestCase): + + def tearDown(self): + tempdir = tmpdir() + if os.path.exists(tempdir): + shutil.rmtree(tempdir) + + def test_ctor(self): + tf = Tempfiles() + self.assertEqual(tf.root, tmpdir()) + self.assertTrue(os.path.exists(tf.root)) + self.assertFalse(tf._autoclean) + self.assertEqual(len(tf._files), 0) + + def test_path(self): + tf = Tempfiles() + path = tf("goob") + self.assertEqual(path, os.path.join(tf.root, "goob")) + self.assertTrue(os.path.exists(tf.root)) + self.assertFalse(os.path.exists(path)) + + def test_mkdir(self): + tf = Tempfiles() + path = tf.mkdir("goob") + self.assertEqual(path, os.path.join(tf.root, "goob")) + self.assertTrue(os.path.exists(tf.root)) + self.assertTrue(os.path.exists(path)) + + def test_clean(self): + tf = Tempfiles() + dir = tf.mkdir("goob") + self.assertTrue(os.path.exists(dir)) + f = tf.track("goob/junk.txt") + self.assertEqual(f, os.path.join(dir, "junk.txt")) + with open(f, 'w') as fd: + fd.write("Hello world!\n") + self.assertTrue(os.path.exists(f)) + + tf.clean() + self.assertFalse(os.path.exists(f)) + self.assertFalse(os.path.exists(dir)) + + self.assertTrue(os.path.exists(tf.root)) + + +if __name__ == '__main__': + test.main() + diff --git a/scripts/ingest-field-info.py b/scripts/ingest-field-info.py index 2b11596..93bbfd2 100755 --- a/scripts/ingest-field-info.py +++ b/scripts/ingest-field-info.py @@ -1,10 +1,9 @@ -#! /usr/bin/python +#! /usr/bin/env python # # Usage: ingest-field-info.py [-i START] [-c COUNT] [-V] FIELDDATAFILE # # Load the field information from a file into the MongoDB 'fields' collections. # -from __future__ import print_function import os, sys, errno, json, re, warnings from argparse import ArgumentParser @@ -26,7 +25,7 @@ sys.path.extend(oarpypath.split(os.pathsep)) try: import nistoar -except ImportError, e: +except ImportError as e: nistoardir = os.path.join(basedir, "python") sys.path.append(nistoardir) import nistoar @@ -82,7 +81,7 @@ def main(args): try: with open(fldfile) as fd: doc = json.load(fd) - except ValueError, ex: + except ValueError as ex: stat = 1 totres.add(fldfile, [ JSONEncodingError(ex) ]) if not opts.silent: diff --git a/scripts/ingest-nerdm-res.py b/scripts/ingest-nerdm-res.py index bf63d2d..d5ff866 100755 --- a/scripts/ingest-nerdm-res.py +++ b/scripts/ingest-nerdm-res.py @@ -1,11 +1,11 @@ -#! /usr/bin/python +#! /usr/bin/env python # -# Usage: ingest-field-info.py [-i START] [-c COUNT] [-V] FIELDDATAFILE +# Usage: ingest-nerdm-res.py [-VqsU] [-M URL] NERD_FILE_OR_DIR [...] +# See help details via: ingest-nerdm-res.py -h # -# Load the field information from a file into the MongoDB 'fields' collections. +# Load NERDm JSON files into the RMM # -from __future__ import print_function -import os, sys, errno, json, re, warnings +import os, sys, errno, json, re, warnings, shutil from argparse import ArgumentParser basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -26,7 +26,7 @@ sys.path.extend(oarpypath.split(os.pathsep)) try: import nistoar -except ImportError, e: +except ImportError as e: nistoardir = os.path.join(basedir, "python") sys.path.append(nistoardir) import nistoar @@ -52,6 +52,10 @@ def define_opts(progname=None): action="store_false", help="do not attempt to validate the records before "+ "ingesting them") + parser.add_argument('-A', '--archive-records', dest='archdir', metavar="DIR", + action='store', default=None, + help="after successfully loading each record, move the "+ + "record file to the archive directory DIR") parser.add_argument('-q', '--quiet', dest='quiet', default=False, action="store_true", help="do not print non-fatal status messages") @@ -64,7 +68,7 @@ def define_opts(progname=None): parser.add_argument('-M', '--mongodb-url', metavar='URL',type=str,dest='url', action='store', default="mongodb://mongodb:3333/TestDB", help="the URL to the MongoDB database to load into (in "+ - "the form 'mongodb://HOST:PORT/DBNAME'") + "the form 'mongodb://HOST:PORT/DBNAME')") return parser @@ -73,6 +77,10 @@ def main(args): opts = parser.parse_args(args) if opts.silent: opts.quiet = True + if opts.archdir and (not os.path.isdir(opts.archdir) or not os.access(opts.archdir, os.W_OK)): + print("{0}: {1}: not a directory with write permission".format(parser.prog, opts.archdir), + file=sys.stderr) + return 3 stat = 0 loader = NERDmLoader(opts.url, schemadir) @@ -85,9 +93,9 @@ def main(args): validate = opts.validate if os.path.isdir(nerdpath): - res = load_from_dir(nerdpath, loader, validate) + res = load_from_dir(nerdpath, loader, validate, opts.archdir) elif os.path.isfile(nerdpath): - res = load_from_file(nerdpath, loader, validate) + res = load_from_file(nerdpath, loader, validate, opts.archdir) elif not os.path.exists(nerdpath): res = LoadLog().add(nerdpath, [ "File not found." ]) else: @@ -117,11 +125,47 @@ def main(args): stat = 2 return stat -def load_from_dir(dirpath, loader, validate=True): - return loader.load_from_dir(dirpath, validate) +def load_from_dir(dirpath, loader, validate=True, archdir=None): + results = LoadLog() + + for root, dirs, files in os.walk(dirpath): + # don't look in .directorys + for i in range(len(dirs)-1, -1, -1): + if dirs[i].startswith('.'): + del dirs[i] + + for f in files: + if f.startswith('.') or not f.endswith('.json'): + continue + f = os.path.join(root, f) + load_from_file(f, loader, validate, archdir, results) + + return results + +def load_from_file(filepath, loader, validate=True, archdir=None, results=None): + with open(filepath) as fd: + try: + data = json.load(fd) + except ValueError as ex: + ex = JSONEncodingError(ex) + return LoadLog().add(filepath, ex) + + out = loader.load(data, validate=validate, results=results, id=filepath) + + if archdir and out.failure_count == 0: + recid = re.sub(r'/.*$', '', re.sub(r'ark:/\d+/', '', data.get('@id',''))) + if not recid: + # should not happen + recid = filepath + ver = data.get('version', '1.0.0').replace('.', '_') + outfile = os.path.join(archdir, "%s-v%s.json" % (os.path.basename(recid), ver)) + + # this should not raise errors, but if it does, let it bubble up + shutil.move(filepath, outfile) + + return out + -def load_from_file(filepath, loader, validate=True): - return loader.load_from_file(filepath, validate) def fmterrs(errs): msgs = str(errs[0]).split("\n") diff --git a/scripts/ingest-taxonomy.py b/scripts/ingest-taxonomy.py index 1f739d8..6b4e447 100755 --- a/scripts/ingest-taxonomy.py +++ b/scripts/ingest-taxonomy.py @@ -1,10 +1,10 @@ -#! /usr/bin/python +#! /usr/bin/env python # # Usage: ingest-taxonomy.py [-V] TAXONOMYFILE # # Load the taxonomy terms from a file into the MongoDB 'taxonomy' collections. # -from __future__ import print_function + import os, sys, errno, json, re, warnings from argparse import ArgumentParser @@ -26,7 +26,7 @@ sys.path.extend(oarpypath.split(os.pathsep)) try: import nistoar -except ImportError, e: +except ImportError as e: nistoardir = os.path.join(basedir, "python") sys.path.append(nistoardir) import nistoar @@ -82,7 +82,7 @@ def main(args): try: with open(taxfile) as fd: doc = json.load(fd) - except ValueError, ex: + except ValueError as ex: stat = 1 totres.add(taxfile, [ JSONEncodingError(ex) ]) if not opts.silent: diff --git a/scripts/ingest-uwsgi.py b/scripts/ingest-uwsgi.py index 3fd6505..c168dcd 100644 --- a/scripts/ingest-uwsgi.py +++ b/scripts/ingest-uwsgi.py @@ -1,7 +1,7 @@ """ The uWSGI script for launching the preservation service """ -from __future__ import print_function + import os, sys, yaml, json, logging try: import uwsgi @@ -23,17 +23,24 @@ def __init__(self): import nistoar from nistoar.rmm import config -from nistoar.rmm.exceptions import ConfigurationException from nistoar.rmm.ingest import wsgi +def get_uwsgi_opt(key, default=None): + out = uwsgi.opt.get(key) + if out is None: + return default + elif isinstance(out, bytes): + return out.decode('utf-8') + return out + confsrvc = None def get_confservice(): cfgsrvc = None if 'oar_config_service' in uwsgi.opt: # this service is based on uwsgi command-line inputs - cfgsrvc = config.ConfigService(uwsgi.opt.get('oar_config_service'), - uwsgi.opt.get('oar_config_env')) - timeout = int(uwsgi.opt.get('oar_config_timeout', 10)) + cfgsrvc = config.ConfigService(get_uwsgi_opt('oar_config_service'), + get_uwsgi_opt('oar_config_env')) + timeout = int(get_uwsgi_opt('oar_config_timeout', 10)) else: # this service is based on environment variables @@ -47,7 +54,7 @@ def get_confservice(): # determine where the configuration is coming from. Check first to see # files were provided via the uwsgi command line. cfg = None -confsrc = uwsgi.opt.get("oar_config_file") +confsrc = get_uwsgi_opt("oar_config_file") if confsrc: cfg = config.resolve_configuration("file:" + confsrc) @@ -55,13 +62,13 @@ def get_confservice(): # get the configuration from the config service confsrvc = get_confservice() if confsrvc: - appname = uwsgi.opt.get('oar_config_appname', + appname = get_uwsgi_opt('oar_config_appname', os.environ.get('OAR_CONFIG_APP', 'rmm-ingest')) cfg = confsrvc.get(appname) if not cfg: - raise ConfigurationException("ingester: nist-oar configuration not "+ - "provided") + raise config.ConfigurationException("ingester: nist-oar configuration not "+ + "provided") # set up logging if 'logfile' not in cfg: @@ -79,7 +86,7 @@ def get_confservice(): acfg = cfg['db_authn'] try: rmmcfg = None - rmmconfsrc = uwsgi.opt.get("oar_rmm_config_file", + rmmconfsrc = get_uwsgi_opt("oar_rmm_config_file", acfg.get("rmm_config_file")) if rmmconfsrc: rmmcfg = config.resolve_configuration(rmmconfsrc) @@ -88,8 +95,8 @@ def get_confservice(): if not confsrvc: convsrvc = get_confservice() if not confsrvc: - raise ConfigurationException("ingester: configuration not "+ - "available; set db_authn.rmm_config_file") + raise config.ConfigurationException("ingester: configuration not "+ + "available; set db_authn.rmm_config_file") rmmcfg = confsrvc.get(acfg.get('rmm_config_loc', 'oar-rmm'), flat=True) @@ -98,9 +105,8 @@ def get_confservice(): acfg['rouser'] = rmmcfg['oar.mongodb.read.user'] acfg['ropass'] = rmmcfg['oar.mongodb.read.password'] - except Exception, ex: - raise ConfigurationException("Failed to retrieve Mongo authentication "+ - "info: "+str(ex), cause=ex) + except Exception as ex: + raise config.ConfigurationException("Failed to retrieve Mongo authentication info: "+str(ex), cause=ex) application = wsgi.app(cfg) diff --git a/scripts/install.sh b/scripts/install.sh index 4a4a9ca..36db68b 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -13,7 +13,8 @@ execdir=`dirname $0` #install the nerdm library mkdir -p $PY_LIBDIR echo Installing python libraries into $PY_LIBDIR... -(cd $SOURCE_DIR/python && python setup.py install --install-purelib=$PY_LIBDIR --install-scripts=$BINDIR) +(cd $SOURCE_DIR/python && \ + python3 setup.py install --install-purelib=$PY_LIBDIR --install-scripts=$BINDIR --old-and-unmanageable) #install the JAVA jars # None at this time diff --git a/scripts/make_taxonomy.py b/scripts/make_taxonomy.py index 470f855..4c3c6e1 100755 --- a/scripts/make_taxonomy.py +++ b/scripts/make_taxonomy.py @@ -1,4 +1,4 @@ -#! /usr/bin/python +#! /usr/bin/env python # """ a script that reads taxonomy terms from an Excel spreadsheet and write them @@ -71,7 +71,7 @@ def fill(thisrow, lastrow): out.reverse() for i in range(len(out)): - if isinstance(out[i], (str, unicode)): + if isinstance(out[i], str): out[i] = out[i].strip() if not out[i] and i < len(lastrow): out[i] = lastrow[i] @@ -87,7 +87,7 @@ def get_header(): if os.path.exists(taxfile): with open(taxfile) as fd: header = json.load(fd, object_pairs_hook=OrderedDict) - except Exception, ex: + except Exception as ex: pass header['vocab'] = [] diff --git a/scripts/makedist.nerdmdocs b/scripts/makedist.nerdmdocs index cacd9e9..3c34540 100755 --- a/scripts/makedist.nerdmdocs +++ b/scripts/makedist.nerdmdocs @@ -10,6 +10,7 @@ execdir=`dirname $0` [ "$execdir" = "" -o "$execdir" = "." ] && execdir=$PWD PACKAGE_DIR=`(cd $execdir/.. > /dev/null 2>&1; pwd)` SOURCE_DIR=$PACKAGE_DIR +alias python=python3 # Update this list with the names of the individual component names # diff --git a/scripts/makedist.pdr b/scripts/makedist.pdr index 5e6c0e9..0486e37 100755 --- a/scripts/makedist.pdr +++ b/scripts/makedist.pdr @@ -10,6 +10,7 @@ execdir=`dirname $0` [ "$execdir" = "" -o "$execdir" = "." ] && execdir=$PWD PACKAGE_DIR=`(cd $execdir/.. > /dev/null 2>&1; pwd)` SOURCE_DIR=$PACKAGE_DIR +alias python=python3 # Update this list with the names of the individual component names # diff --git a/scripts/pdl2resources.py b/scripts/pdl2resources.py index a0ac60c..58bbcd5 100755 --- a/scripts/pdl2resources.py +++ b/scripts/pdl2resources.py @@ -1,4 +1,4 @@ -#! /usr/bin/python +#! /usr/bin/env python3 # # Usage: pdl2resources [-d DIR] [-i START] [-c COUNT] PDLFILE # @@ -6,7 +6,6 @@ # NERDm Resource records, and write them out into individual files. New ARK # identifiers will be assigned to each one. # -from __future__ import print_function import os, sys, errno, json, re from argparse import ArgumentParser from collections import OrderedDict @@ -39,7 +38,7 @@ sys.path.extend(oarpypath.split(os.pathsep)) try: import nistoar -except ImportError, e: +except ImportError as e: nistoardir = os.path.join(basedir, "python") sys.path.append(nistoardir) import nistoar @@ -96,13 +95,13 @@ def main(args): try: with open(opts.pdlfile) as fd: pdldata = json.load(fd) - except IOError, e: + except IOError as e: raise RuntimeError("Unable to read PDL file ({0}): {1}". format(opts.pdlfile, str(e))) - except ValueError, e: + except ValueError as e: raise RuntimeError("JSON Syntax error: "+str(e)) - if not pdldata.has_key('dataset'): + if 'dataset' not in pdldata: raise RuntimeError("PDL catalog document is missing its 'dataset' property") dss = pdldata['dataset'] @@ -193,7 +192,7 @@ def set_theme_as_topic(rec, tax): if count > 0: print("Wrote {0} files".format(count)) sys.exit(0) - except RuntimeError, e: + except RuntimeError as e: print("Error: ", str(e), file=sys.stderr) sys.exit(1) diff --git a/scripts/record_deps.py b/scripts/record_deps.py index 94ec83c..6bccde7 100755 --- a/scripts/record_deps.py +++ b/scripts/record_deps.py @@ -1,4 +1,4 @@ -#! /usr/bin/python +#! /usr/bin/env python3 # # record_deps.py -- encode the dependencies of a distribution as JSON object, # writing it to standard output. @@ -12,7 +12,6 @@ # The default package name (oar-sdp) can be over-ridden by the environment # variable PACKAGE_NAME # -from __future__ import print_function import os, sys, json, re from collections import OrderedDict diff --git a/scripts/test_pdl2resources.py b/scripts/test_pdl2resources.py index 9290366..4a1a86f 100755 --- a/scripts/test_pdl2resources.py +++ b/scripts/test_pdl2resources.py @@ -1,7 +1,5 @@ -#!/usr/bin/python +#!/usr/bin/env python # -from __future__ import print_function - import os, pdb, sys, shutil, json import unittest as test import ejsonschema as ejs @@ -40,8 +38,8 @@ def setUp(self): def test_convert(self): - script = "python {0} -d {1} -T {2}".format(cvtscript, outdir, pdlfile) - self.assertEquals(os.system(script), 0) + script = "python3 {0} -d {1} -T {2}".format(cvtscript, outdir, pdlfile) + self.assertEqual(os.system(script), 0) files = [f for f in os.listdir(outdir) if f.endswith(".json")] failed = [] @@ -69,7 +67,7 @@ def test_convert(self): self.assertTrue(all([':' in t for t in nerd['theme']])) sys.stderr.write("\nValidated {0} files".format(str(passed))) - self.assertEquals(len(failed), 0, + self.assertEqual(len(failed), 0, "{0} converted file(s) failed validation".format(str(len(failed)))) diff --git a/scripts/testall.py b/scripts/testall.py index 9052461..d393794 100755 --- a/scripts/testall.py +++ b/scripts/testall.py @@ -1,8 +1,8 @@ -#!/usr/bin/python +#!/usr/bin/env python3 # import os, sys, unittest, pdb -from nistoar.rmm.mongo.tests import warnings -sys.modules['warnings'] = warnings +# from nistoar.rmm.mongo.tests import warnings +# sys.modules['warnings'] = warnings basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) jqlib = os.path.join(basedir, "jq") @@ -15,39 +15,42 @@ pdltest = os.path.join(basedir, "scripts", "test_pdl2resources.py") extest = os.path.join(basedir, "model", "tests", "test_examples.py") pydir = os.path.join(basedir, "python") +pytestdir = os.path.join(pydir, "tests") -print "Executing all tests..." +print("Executing all tests...") -print "Executing jq translation library tests..." +print("Executing jq translation library tests...") status = 0 notok = os.system("jq -L {0} --run-tests {1}".format(jqlib, jqtest[0])) notok2 = os.system("jq -L {0} --run-tests {1}".format(jqlib, jqtest[1])) notok3 = os.system("jq -L {0} --run-tests {1}".format(jqlib, jqtest[2])) if notok or notok2 or notok3: - print "**ERROR: some or all jq tests have failed" + print("**ERROR: some or all jq tests have failed") status += 1 -print "Executing validation tests..." +print("Executing validation tests...") -notok = os.system("python {0}".format(nerdmtest[0])) -notok2 = os.system("python {0}".format(nerdmtest[1])) +notok = os.system("/usr/bin/env python3 {0}".format(nerdmtest[0])) +notok2 = os.system("/usr/bin/env python3 {0}".format(nerdmtest[1])) if notok or notok2: - print "**ERROR: some or all basic validation tests have failed" + print("**ERROR: some or all basic validation tests have failed") status += 2 -notok = os.system("python {0}".format(extest)) +notok = os.system("/usr/bin/env python3 {0}".format(extest)) if notok: - print "**ERROR: some or all example files have failed validation" + print("**ERROR: some or all example files have failed validation") status += 4 -notok = os.system("python {0}".format(pdltest)) +notok = os.system("/usr/bin/env python3 {0}".format(pdltest)) if notok: - print "**ERROR: some or all pdl2resources output files have failed validation" + print("**ERROR: some or all pdl2resources output files have failed validation") status += 8 -print "Executing nistoar python tests..." +print("Executing nistoar python tests...") +os.environ.setdefault('OAR_TEST_INCLUDE', '') +os.environ['OAR_TEST_INCLUDE'] += " noreload" ldr = unittest.TestLoader() -suite = ldr.discover(pydir, "test_*.py") +suite = ldr.discover(pytestdir, "test_*.py", pydir) result = unittest.TextTestRunner().run(suite) if not result.wasSuccessful(): status += 16