Skip to content

Commit

Permalink
Fix mypy errors in docs/examples/
Browse files Browse the repository at this point in the history
Also:
- Update examples to use the ``invoke()`` method instead of the
  removed ``run()``.
- Use ``requests`` instead of ``scripts/api/common.py`` from Galaxy.
  • Loading branch information
nsoranzo committed Nov 10, 2023
1 parent dd18305 commit 49930ac
Show file tree
Hide file tree
Showing 10 changed files with 66 additions and 39 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
``GalaxyClient``, ``GalaxyInstance`` and ``ToolShedInstance`` classes are now
keyword-only.

* Classes defined in ``bioblend.galaxy.objects.wrappers`` are no more
re-exported by ``bioblend.galaxy.objects``.

### BioBlend v1.2.0 - 2023-06-30

* Dropped support for Galaxy releases 17.09-19.01. Added support for Galaxy
Expand Down
3 changes: 2 additions & 1 deletion bioblend/galaxy/objects/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
from .galaxy_instance import GalaxyInstance # noqa: F401
from .wrappers import * # noqa: F401,F403

__all__ = ("GalaxyInstance",)
9 changes: 8 additions & 1 deletion bioblend/galaxy/objects/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1827,6 +1827,8 @@ class DatasetContainerPreview(Wrapper):
"deleted",
"name",
)
deleted: bool
name: str


class LibraryPreview(DatasetContainerPreview):
Expand Down Expand Up @@ -1872,6 +1874,12 @@ class WorkflowPreview(Wrapper):
"show_in_tool_panel",
"tags",
)
deleted: bool
name: str
owner: str
published: bool
show_in_tool_panel: bool
tags: List[str]


class InvocationPreview(Wrapper):
Expand All @@ -1884,7 +1892,6 @@ class InvocationPreview(Wrapper):

BASE_ATTRS = Wrapper.BASE_ATTRS + (
"history_id",
"id",
"state",
"update_time",
"uuid",
Expand Down
10 changes: 9 additions & 1 deletion docs/examples/objects/common.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
def get_one(iterable):
from typing import (
Iterable,
TypeVar,
)

T = TypeVar("T")


def get_one(iterable: Iterable[T]) -> T:
seq = list(iterable)
assert len(seq) == 1
return seq[0]
10 changes: 6 additions & 4 deletions docs/examples/objects/small.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,15 @@
history_name = "get_col output"
params = {"Cut1": {"columnList": "c2"}}
print(f"Running workflow: {wf.name} [{wf.id}]")
outputs, out_hist = wf.run(input_map, history_name, params=params, wait=True)
inv = wf.invoke(input_map, params=params, history=history_name, inputs_by="name")
out_hist = gi.histories.get(inv.history_id)
inv.wait()
print("Job has finished")
assert out_hist.name == history_name
print(f"Output history: {out_hist.name} [{out_hist.id}]")

# Save results to local disk
out_ds = get_one([_ for _ in outputs if _.name == "Cut on data 1"])
with tempfile.NamedTemporaryFile(prefix="bioblend_", delete=False) as f:
out_ds.download(f)
out_ds = get_one(out_hist.get_datasets(name="Cut on data 1"))
with tempfile.NamedTemporaryFile(prefix="bioblend_", delete=False) as tmp_f:
out_ds.download(tmp_f)
print(f'Output downloaded to "{f.name}"')
3 changes: 2 additions & 1 deletion docs/examples/objects/w2_bacterial_reseq.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@

# Run the workflow on a new history with the selected datasets as inputs

outputs, out_hist = iw.run(input_map, h, params=params)
inv = iw.invoke(input_map, params=params, history=h, inputs_by="name")
out_hist = gi.histories.get(inv.history_id)
assert out_hist.name == history_name

print(f"Running workflow: {iw.name} [{iw.id}]")
Expand Down
9 changes: 7 additions & 2 deletions docs/examples/objects/w3_bacterial_denovo.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import json
import os
import sys
from typing import (
Any,
Dict,
)

from common import get_one # noqa:I100,I201

Expand Down Expand Up @@ -51,7 +55,7 @@
lengths = {"19", "23", "29"}
ws_ids = iw.tool_labels_to_ids["velveth"]
assert len(ws_ids) == len(lengths)
params = {id_: {"hash_length": v} for id_, v in zip(ws_ids, lengths)}
params: Dict[str, Any] = {id_: {"hash_length": v} for id_, v in zip(ws_ids, lengths)}

# Set the "ins_length" runtime parameter to the same value for the 3
# "velvetg" steps
Expand All @@ -70,7 +74,8 @@

# Run the workflow on a new history with the selected datasets as inputs

outputs, out_hist = iw.run(input_map, h, params=params)
inv = iw.invoke(input_map, params=params, history=h, inputs_by="name")
out_hist = gi.histories.get(inv.history_id)
assert out_hist.name == history_name

print(f"Running workflow: {iw.name} [{iw.id}]")
Expand Down
53 changes: 26 additions & 27 deletions docs/examples/objects/w5_galaxy_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,61 +2,59 @@
import os
import sys

import requests

# This example, provided for comparison with w5_metagenomics.py,
# contains the code required to run the metagenomics workflow
# *without* BioBlend.

URL = os.getenv("GALAXY_URL", "https://orione.crs4.it").rstrip("/")
API_URL = f"{URL}/api"
API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY")
if API_KEY == "YOUR_API_KEY":
API_KEY = os.getenv("GALAXY_API_KEY")
if not API_KEY:
sys.exit("API_KEY not set, see the README.txt file")

# Clone the galaxy git repository and replace
# YOUR_GALAXY_PATH with the clone's local path in the following code, e.g.:
# cd /tmp
# git clone https://github.com/galaxyproject/galaxy
# GALAXY_PATH = '/tmp/galaxy'

GALAXY_PATH = "YOUR_GALAXY_PATH"
sys.path.insert(1, os.path.join(GALAXY_PATH, "scripts/api"))
import common # noqa: E402,I100,I202
headers = {"Content-Type": "application/json", "x-api-key": API_KEY}

# Select "W5 - Metagenomics" from published workflows

workflow_name = "W5 - Metagenomics"
workflows = common.get(API_KEY, f"{API_URL}/workflows?show_published=True")
w = [_ for _ in workflows if _["published"] and _["name"] == workflow_name]
assert len(w) == 1
w = w[0]
r = requests.get(f"{API_URL}/workflows", params={"show_published": True}, headers=headers)
workflows = r.json()
filtered_workflows = [_ for _ in workflows if _["published"] and _["name"] == workflow_name]
assert len(filtered_workflows) == 1
w = filtered_workflows[0]

# Import the workflow to user space

data = {"workflow_id": w["id"]}
iw = common.post(API_KEY, f"{API_URL}/workflows/import", data)
iw_details = common.get(API_KEY, f"{API_URL}/workflows/{iw['id']}")
r = requests.post(f"{API_URL}/workflows/import", data=json.dumps(data), headers=headers)
iw = r.json()
r = requests.get(f"{API_URL}/workflows/{iw['id']}", headers=headers)
iw_details = r.json()

# Select the "Orione SupMat" library

library_name = "Orione SupMat"
libraries = common.get(API_KEY, f"{API_URL}/libraries")
r = requests.get(f"{API_URL}/libraries", headers=headers)
libraries = r.json()
filtered_libraries = [_ for _ in libraries if _["name"] == library_name]
assert len(filtered_libraries) == 1
library = filtered_libraries[0]

# Select the "/Metagenomics/MetagenomicsDataset.fq" dataset

ds_name = "/Metagenomics/MetagenomicsDataset.fq"
contents = common.get(API_KEY, f"{API_URL}/libraries/{library['id']}/contents")
ld = [_ for _ in contents if _["type"] == "file" and _["name"] == ds_name]
assert len(ld) == 1
ld = ld[0]
r = requests.get(f"{API_URL}/libraries/{library['id']}/contents", headers=headers)
contents = r.json()
filtered_contents = [_ for _ in contents if _["type"] == "file" and _["name"] == ds_name]
assert len(filtered_contents) == 1
ld = filtered_contents[0]

# Select the blastn step

ws = [_ for _ in iw_details["steps"].values() if _["tool_id"] and "blastn" in _["tool_id"]]
assert len(ws) == 1
ws = ws[0]
filtered_wf_steps = [_ for _ in iw_details["steps"].values() if _["tool_id"] and "blastn" in _["tool_id"]]
assert len(filtered_wf_steps) == 1
ws = filtered_wf_steps[0]
tool_id = ws["tool_id"]

# Get (a copy of) the parameters dict for the selected step
Expand All @@ -78,7 +76,8 @@
input_step_id = iw_details["inputs"].keys()[0]
data["ds_map"] = {input_step_id: {"src": "ld", "id": ld["id"]}}
data["history"] = history_name
r_dict = common.post(API_KEY, f"{API_URL}/workflows", data)
r = requests.post(f"{API_URL}/workflows", data=json.dumps(data), headers=headers)
r_dict = r.json()

print(f"Running workflow: {iw['name']} [{iw['id']}]")
print(f"Output history: {history_name} [{r_dict['history']}]")
3 changes: 2 additions & 1 deletion docs/examples/objects/w5_metagenomics.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@

params = {tool_id: {"db_opts": json.loads(ws_parameters["db_opts"])}}
params[tool_id]["db_opts"]["database"] = "16SMicrobial-20131106"
outputs, out_hist = iw.run(input_map, h, params=params)
inv = iw.invoke(input_map, params=params, history=h, inputs_by="name")
out_hist = gi.histories.get(inv.history_id)
assert out_hist.name == history_name

print(f"Running workflow: {iw.name} [{iw.id}]")
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ commands =
flake8 .
black --check --diff .
isort --check --diff .
mypy bioblend/
mypy bioblend/ docs/examples/
deps =
black
flake8
Expand Down

0 comments on commit 49930ac

Please sign in to comment.