Skip to content

Commit

Permalink
fixing issue497
Browse files Browse the repository at this point in the history
  • Loading branch information
proccaserra committed Oct 10, 2023
2 parents 27c5797 + 6ce0008 commit 42b5d4b
Show file tree
Hide file tree
Showing 281 changed files with 7,637 additions and 2,732 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/buildandtestpython.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8, 3.9]
python-version: [3.8, 3.9, '3.10', '3.11']

steps:
- uses: actions/checkout@v2
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pythonpublish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Upload Python Package

on:
release:
types: [created]
types: [published]

jobs:
deploy:
Expand Down
6 changes: 4 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,8 @@ performances/profiles/*
.github/workflows/_build.yml
course/
bin/
isa-reports/
isa-reports/*
isaRDF/
venv*/
venv*/
instance/*
instance/
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 17,
"outputs": [],
"source": [
"# Let's first import all the packages we need"
Expand All @@ -35,7 +35,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 18,
"outputs": [],
"source": [
"from os import path\n",
Expand All @@ -61,7 +61,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 19,
"outputs": [],
"source": [
"filepath = path.join('json', 'BII-S-3', 'BII-S-3.json')\n",
Expand Down Expand Up @@ -90,8 +90,16 @@
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"execution_count": 20,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['sample collection', 'nucleic acid extraction', 'reverse transcription', 'library construction', 'nucleic acid sequencing', 'data transformation']\n"
]
}
],
"source": [
"query = \"\"\"\n",
"{\n",
Expand Down Expand Up @@ -127,10 +135,10 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 21,
"outputs": [],
"source": [
"set_context(new_context='wdt', combine=False, local=False)"
"set_context(vocab='wd', local=True, prepend_url='https://example.com', all_in_one=False)"
],
"metadata": {
"collapsed": false
Expand All @@ -139,10 +147,12 @@
{
"cell_type": "markdown",
"source": [
"The `set_context()` method takes three parameters:\n",
" - new_context: to choose the vocabulary to use between `sdo`, `obo` and `wdt`\n",
" - combine: if `True`, only one context will be used\n",
" - local: if `True`, uses local files else the GitHub contexts"
"The `set_context()` method takes five parameters:\n",
" - vocab: to choose the vocabulary to use between `sdo`, `obo`, `wdt`, `wd` and `sio`\n",
" - local: if `True`, uses local files else the GitHub contexts\n",
" - prepend_url: the url to prepend to the isa identifiers (this is the URL of your SPARQL endpoint)\n",
" - all_in_on: if `True`, all the contexts are pulled from a single file instead of separate context files\n",
" - include_context: if `True`, the context is included in the JSON-LD serialization, else it only contains the URL or local path to the context file."
],
"metadata": {
"collapsed": false
Expand All @@ -159,7 +169,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 22,
"outputs": [],
"source": [
"ld = investigation.to_dict(ld=True)"
Expand Down Expand Up @@ -190,22 +200,20 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 23,
"outputs": [],
"source": [
"# Creating the namespace\n",
"WDT = Namespace(\"http://www.wikidata.org/wiki/\")\n",
"WDTP = Namespace('https://www.wikidata.org/wiki/Property:')\n",
"WD = Namespace(\"http://www.wikidata.org/entity/\")\n",
"ISA = Namespace('https://isa.org/')\n",
"\n",
"ld_string = json.dumps(ld) # Get a string representation of the ld variable\n",
"graph = Graph() # Create an empty graph\n",
"graph.parse(data=ld_string, format='json-ld') # Load the data into the graph\n",
"\n",
"# Finally, bind the namespaces to the graph\n",
"graph.bind('wdt', WDT)\n",
"graph.bind('isa', ISA)\n",
"graph.bind('wdtp', WDTP)"
"graph.bind('wdt', WD)\n",
"graph.bind('isa', ISA)"
],
"metadata": {
"collapsed": false
Expand All @@ -222,23 +230,30 @@
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"execution_count": 24,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['sample collection', 'nucleic acid extraction', 'reverse transcription', 'library construction', 'nucleic acid sequencing', 'data transformation']\n"
]
}
],
"source": [
"query = \"\"\"\n",
"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
"PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
"PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
"PREFIX wdtp: <https://www.wikidata.org/wiki/Property:>\n",
"PREFIX wdt: <http://www.wikidata.org/wiki/>\n",
"PREFIX wd: <http://www.wikidata.org/entity/>\n",
"\n",
"SELECT distinct ?protocolTypeName\n",
"WHERE {\n",
" ?p rdf:type wdt:Q41689629 . # Is a protocol\n",
" ?p wdtp:P7793 ?protocolType .\n",
" ?protocolType wdtp:P527 ?protocolTypeName . # Get each protocol type name\n",
" FILTER (?protocolTypeName!=\"\"^^wdt:Q1417099) # Filter out empty protocol type name\n",
" ?p rdf:type wd:Q41689629 . # Is a protocol\n",
" ?p wd:P7793 ?protocolType .\n",
" ?protocolType wd:P527 ?protocolTypeName . # Get each protocol type name\n",
" FILTER (?protocolTypeName!=\"\"^^wd:Q1417099) # Filter out empty protocol type name\n",
"}\n",
"\"\"\"\n",
"protocols_sparql = []\n",
Expand Down
2 changes: 1 addition & 1 deletion isatools/convert/isatab2json.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
log = logging.getLogger('isatools')

SCHEMAS_PATH = join(os.path.dirname(os.path.realpath(__file__)),
"../resources/schemas/v1.0.1/")
"..", "resources", "schemas", "isa_model_version_1_0_schemas", "core")
INVESTIGATION_SCHEMA = "investigation_schema.json"

# REGEXES
Expand Down
4 changes: 2 additions & 2 deletions isatools/create/connectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,9 +119,9 @@ def assay_ordered_dict_to_template(assay_ord_dict):
:return: dict, can be directly serialized to JSON
"""
res = dict()
res['measurement_type'] = _reverse_map_ontology_annotation(assay_ord_dict['measurement_type'],
res['measurement_type'] = _reverse_map_ontology_annotation(assay_ord_dict.get('measurement_type', None),
compress_strings=True)
res['technology_type'] = _reverse_map_ontology_annotation(assay_ord_dict['technology_type'],
res['technology_type'] = _reverse_map_ontology_annotation(assay_ord_dict.get('technology_type', None),
compress_strings=True)
res['workflow'] = []
for name, nodes in assay_ord_dict.items():
Expand Down
3 changes: 2 additions & 1 deletion isatools/create/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
import itertools
import json
import re
from collections import OrderedDict, Iterable
from collections import OrderedDict
from collections.abc import Iterable
from copy import deepcopy
import logging
from numbers import Number
Expand Down
17 changes: 17 additions & 0 deletions isatools/database/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""
The isatools database package contains the SQLAlchemy models for the ISA tools library.
It works by dynamically adding methods to the models defined in the isatools.model package.
The database model is highly sensitive to identifiers. We suggest serializing all your ISA-tab files to json
to enforce the generation of identifiers. This will ensure that the database model can be used to serialize the ISA
objects in SQLAlchemy.
Authors: D. Batista (@Terazus)
"""

from isatools.database.utils import app, db
from isatools.database.models import (
Comment, Publication, Investigation, Study, OntologyAnnotation, OntologySource,
Parameter, Person, Process, Protocol, Source, Characteristic, Factor, Sample,
FactorValue, Material, ParameterValue, Assay, Datafile as DataFile
)
92 changes: 92 additions & 0 deletions isatools/database/models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
from isatools.database.models.comment import (
CommentModel as Comment, Comment as CommentTable, make_comment_methods
)
from isatools.database.models.publication import (
PublicationModel as Publication, Publication as PublicationTable, make_publication_methods
)
from isatools.database.models.investigation import (
InvestigationModel as Investigation, Investigation as InvestigationTable, make_investigation_methods
)
from isatools.database.models.study import (
StudyModel as Study, Study as StudyTable, make_study_methods
)
from isatools.database.models.ontology_annotation import (
OntologyAnnotationModel as OntologyAnnotation, OntologyAnnotation as OntologyAnnotationTable,
make_ontology_annotation_methods
)
from isatools.database.models.ontology_source import (
OntologySourceModel as OntologySource, OntologySource as OntologySourceTable, make_ontology_source_methods
)
from isatools.database.models.parameter import (
ParameterModel as Parameter, Parameter as ParameterTable, make_parameter_methods
)
from isatools.database.models.person import (
PersonModel as Person, Person as PersonTable, make_person_methods
)
from isatools.database.models.process import (
ProcessModel as Process, Process as ProcessTable, make_process_methods
)
from isatools.database.models.protocol import (
ProtocolModel as Protocol, Protocol as ProtocolTable, make_protocol_methods
)
from isatools.database.models.source import (
SourceModel as Source, Source as SourceTable, make_source_methods
)
from isatools.database.models.characteristic import (
CharacteristicModel as Characteristic, Characteristic as CharacteristicTable, make_characteristic_methods
)
from isatools.database.models.study_factor import (
StudyFactorModel as Factor, StudyFactor as FactorTable, make_study_factor_methods
)
from isatools.database.models.sample import (
SampleModel as Sample, Sample as SampleTable, make_sample_methods
)
from isatools.database.models.factor_value import (
FactorValueModel as FactorValue, FactorValue as FactorValueTable, make_factor_value_methods
)
from isatools.database.models.material import (
MaterialModel as Material, Material as MaterialTable, make_material_methods
)
from isatools.database.models.parameter_value import (
ParameterValueModel as ParameterValue, ParameterValue as ParameterValueTable, make_parameter_value_methods
)
from isatools.database.models.assay import (
AssayModel as Assay, Assay as AssayTable, make_assay_methods
)
from isatools.database.models.datafile import (
DataFileModel as Datafile, Datafile as DatafileTable, make_datafile_methods
)


def __make_methods():
# base methods
make_comment_methods()
make_ontology_source_methods()
make_ontology_annotation_methods()
make_publication_methods()
make_person_methods()

# studies methods
make_parameter_methods()
make_parameter_value_methods()
make_process_methods()
make_protocol_methods()
make_study_factor_methods()
make_factor_value_methods()

# materials methods
make_characteristic_methods()
make_source_methods()
make_sample_methods()
make_material_methods()
make_datafile_methods()

# assays
make_assay_methods()

# investigation methods
make_study_methods()
make_investigation_methods()


__make_methods()
Loading

0 comments on commit 42b5d4b

Please sign in to comment.