From 45834a114fc36e649024379e60301251698ca4b3 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Fri, 2 Aug 2024 14:57:37 +0100 Subject: [PATCH 01/34] Make empty cuttlefish.py file --- brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py new file mode 100644 index 00000000..e69de29b From 2ace3d2bd7233f756a44688144a7d76fd7d7ada7 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Thu, 8 Aug 2024 22:32:58 +0100 Subject: [PATCH 02/34] Download cuttlefish hierarchy file from github and amend format --- .../atlas_scripts/cuttlefish.py | 87 +++++++++++++++++++ 1 file changed, 87 insertions(+) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index e69de29b..ebbbd0e4 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -0,0 +1,87 @@ +__version__ = "0" + +import csv +import glob as glob +import time +from pathlib import Path + +import numpy as np +import pooch +import tifffile +from rich.progress import track + +from brainglobe_atlasapi import utils + +# from skimage import io +'''from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +)''' +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree + +def create_atlas(working_dir, resolution): + + HIERARCHY_FILE_URL = 'https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv' + + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + atlas_path = download_dir_path / "atlas_files" + + + # download hierarchy files + utils.check_internet_connection() + csv_path = pooch.retrieve( + HIERARCHY_FILE_URL, + known_hash='023418e626bdefbd177d4bb8c08661bd63a95ccff47720e64bb7a71546935b77', + progressbar=True, + ) + + # create dictionaries + print("Creating structure tree") + with open( + csv_path, mode="r", encoding="utf-8-sig" + ) as cuttlefish_file: + cuttlefish_dict_reader = csv.DictReader(cuttlefish_file) + + # empty list to populate with dictionaries + hierarchy = [] + + # parse through csv file and populate hierarchy list + for row in cuttlefish_dict_reader: + hierarchy.append(row) + + + # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys + for i in range(0, len(hierarchy)): + hierarchy[i]['acronym'] = hierarchy[i].pop('abbreviation') + hierarchy[i].pop('hasSides') + hierarchy[i].pop('function') + hierarchy[i]["structure_id_path"] = list( + (map(int, (hierarchy[i]["index"].split("-")))) + ) + hierarchy[i]["structure_id_path"].insert(0, 999) + hierarchy[i].pop('index') + hierarchy[i]['parent_structure_id']=hierarchy[i]["structure_id_path"][-2] + + # add the 'root' structure + hierarchy.append({ + "name":"Brain", + "acronym":"root", + "structure_id_path":[999], + "parent_structure_id":'', + }) + + # check the transformed version of the hierarchy.csv file + print(hierarchy) + + return None + + +if __name__ == "__main__": + res = 2, 2, 2 + home = str(Path.home()) + bg_root_dir = Path.home() / "brainglobe_workingdir" + bg_root_dir.mkdir(exist_ok=True, parents=True) + + create_atlas(bg_root_dir, res) From 1edcf8289804ada2399e86984026c7322eee1be7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 21:33:35 +0000 Subject: [PATCH 03/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_scripts/cuttlefish.py | 57 +++++++++---------- 1 file changed, 26 insertions(+), 31 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index ebbbd0e4..ecc7f508 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -2,46 +2,38 @@ import csv import glob as glob -import time from pathlib import Path -import numpy as np import pooch -import tifffile -from rich.progress import track from brainglobe_atlasapi import utils # from skimage import io -'''from brainglobe_atlasapi.atlas_generation.mesh_utils import ( +"""from brainglobe_atlasapi.atlas_generation.mesh_utils import ( Region, create_region_mesh, -)''' -from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data -from brainglobe_atlasapi.structure_tree_util import get_structures_tree +)""" + def create_atlas(working_dir, resolution): - - HIERARCHY_FILE_URL = 'https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv' + + HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) atlas_path = download_dir_path / "atlas_files" - # download hierarchy files utils.check_internet_connection() csv_path = pooch.retrieve( HIERARCHY_FILE_URL, - known_hash='023418e626bdefbd177d4bb8c08661bd63a95ccff47720e64bb7a71546935b77', + known_hash="023418e626bdefbd177d4bb8c08661bd63a95ccff47720e64bb7a71546935b77", progressbar=True, ) - + # create dictionaries print("Creating structure tree") - with open( - csv_path, mode="r", encoding="utf-8-sig" - ) as cuttlefish_file: + with open(csv_path, mode="r", encoding="utf-8-sig") as cuttlefish_file: cuttlefish_dict_reader = csv.DictReader(cuttlefish_file) # empty list to populate with dictionaries @@ -51,30 +43,33 @@ def create_atlas(working_dir, resolution): for row in cuttlefish_dict_reader: hierarchy.append(row) - # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys for i in range(0, len(hierarchy)): - hierarchy[i]['acronym'] = hierarchy[i].pop('abbreviation') - hierarchy[i].pop('hasSides') - hierarchy[i].pop('function') + hierarchy[i]["acronym"] = hierarchy[i].pop("abbreviation") + hierarchy[i].pop("hasSides") + hierarchy[i].pop("function") hierarchy[i]["structure_id_path"] = list( (map(int, (hierarchy[i]["index"].split("-")))) ) hierarchy[i]["structure_id_path"].insert(0, 999) - hierarchy[i].pop('index') - hierarchy[i]['parent_structure_id']=hierarchy[i]["structure_id_path"][-2] - + hierarchy[i].pop("index") + hierarchy[i]["parent_structure_id"] = hierarchy[i][ + "structure_id_path" + ][-2] + # add the 'root' structure - hierarchy.append({ - "name":"Brain", - "acronym":"root", - "structure_id_path":[999], - "parent_structure_id":'', - }) - + hierarchy.append( + { + "name": "Brain", + "acronym": "root", + "structure_id_path": [999], + "parent_structure_id": "", + } + ) + # check the transformed version of the hierarchy.csv file print(hierarchy) - + return None From 4f16ca5ff4348b5b3e4b3927af923018989533cf Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Sun, 11 Aug 2024 00:36:44 +0100 Subject: [PATCH 04/34] Tweaked region ID structure, loaded in region colour map from cuttlebase --- .../atlas_scripts/cuttlefish.py | 39 ++++++++++++++++--- 1 file changed, 33 insertions(+), 6 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index ebbbd0e4..b18e8e42 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -4,6 +4,7 @@ import glob as glob import time from pathlib import Path +import json import numpy as np import pooch @@ -23,6 +24,7 @@ def create_atlas(working_dir, resolution): HIERARCHY_FILE_URL = 'https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv' + BRAIN_SCENE_URL = 'https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-scene.json' download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) @@ -31,16 +33,17 @@ def create_atlas(working_dir, resolution): # download hierarchy files utils.check_internet_connection() - csv_path = pooch.retrieve( + hierarchy_path = pooch.retrieve( HIERARCHY_FILE_URL, known_hash='023418e626bdefbd177d4bb8c08661bd63a95ccff47720e64bb7a71546935b77', progressbar=True, ) + # create dictionaries print("Creating structure tree") with open( - csv_path, mode="r", encoding="utf-8-sig" + hierarchy_path, mode="r", encoding="utf-8-sig" ) as cuttlefish_file: cuttlefish_dict_reader = csv.DictReader(cuttlefish_file) @@ -62,18 +65,42 @@ def create_atlas(working_dir, resolution): ) hierarchy[i]["structure_id_path"].insert(0, 999) hierarchy[i].pop('index') - hierarchy[i]['parent_structure_id']=hierarchy[i]["structure_id_path"][-2] + path_string = [str(i) for i in hierarchy[i]["structure_id_path"]] + hierarchy[i]['id'] = int("".join(path_string)) + hierarchy[i]['parent_structure_id']=int(str(hierarchy[i]['id'])[:-1]) + prev = "" + for index, id in enumerate(hierarchy[i]["structure_id_path"]): + hierarchy[i]["structure_id_path"][index] = (str(prev) + str(id)) + prev = hierarchy[i]["structure_id_path"][index] # add the 'root' structure hierarchy.append({ - "name":"Brain", + "name":"root", "acronym":"root", "structure_id_path":[999], - "parent_structure_id":'', + "id":999, + "parent_structure_id":None, }) + + # download region colour data + brain_scene_path = pooch.retrieve( + BRAIN_SCENE_URL, + known_hash='057fe98ea5ae24c5f9a10aebec072a12f6df19447c3c027f0f12ddba61a1bb90', + progressbar=True, + ) + + # apply colour map to each region + print("Applying colours:") + f = open(brain_scene_path) + brain_scene = json.load() + colormap = brain_scene['params']['colors'] + + print(colormap) + f.close() # check the transformed version of the hierarchy.csv file - print(hierarchy) + #print(hierarchy) + return None From e440e5b10b4206c26fb1bfd7af657fdc4ff3bfb7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 10 Aug 2024 23:39:18 +0000 Subject: [PATCH 05/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_scripts/cuttlefish.py | 53 +++++++++---------- 1 file changed, 26 insertions(+), 27 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 1ac8f638..55540bd3 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -2,8 +2,8 @@ import csv import glob as glob -from pathlib import Path import json +from pathlib import Path import pooch @@ -17,9 +17,9 @@ def create_atlas(working_dir, resolution): - - HIERARCHY_FILE_URL = 'https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv' - BRAIN_SCENE_URL = 'https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-scene.json' + + HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" + BRAIN_SCENE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-scene.json" download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) @@ -32,8 +32,7 @@ def create_atlas(working_dir, resolution): known_hash="023418e626bdefbd177d4bb8c08661bd63a95ccff47720e64bb7a71546935b77", progressbar=True, ) - - + # create dictionaries print("Creating structure tree") with open( @@ -57,44 +56,44 @@ def create_atlas(working_dir, resolution): (map(int, (hierarchy[i]["index"].split("-")))) ) hierarchy[i]["structure_id_path"].insert(0, 999) - hierarchy[i].pop('index') + hierarchy[i].pop("index") path_string = [str(i) for i in hierarchy[i]["structure_id_path"]] - hierarchy[i]['id'] = int("".join(path_string)) - hierarchy[i]['parent_structure_id']=int(str(hierarchy[i]['id'])[:-1]) + hierarchy[i]["id"] = int("".join(path_string)) + hierarchy[i]["parent_structure_id"] = int(str(hierarchy[i]["id"])[:-1]) prev = "" for index, id in enumerate(hierarchy[i]["structure_id_path"]): - hierarchy[i]["structure_id_path"][index] = (str(prev) + str(id)) + hierarchy[i]["structure_id_path"][index] = str(prev) + str(id) prev = hierarchy[i]["structure_id_path"][index] - + # add the 'root' structure - hierarchy.append({ - "name":"root", - "acronym":"root", - "structure_id_path":[999], - "id":999, - "parent_structure_id":None, - }) - - + hierarchy.append( + { + "name": "root", + "acronym": "root", + "structure_id_path": [999], + "id": 999, + "parent_structure_id": None, + } + ) + # download region colour data brain_scene_path = pooch.retrieve( BRAIN_SCENE_URL, - known_hash='057fe98ea5ae24c5f9a10aebec072a12f6df19447c3c027f0f12ddba61a1bb90', + known_hash="057fe98ea5ae24c5f9a10aebec072a12f6df19447c3c027f0f12ddba61a1bb90", progressbar=True, ) - + # apply colour map to each region print("Applying colours:") f = open(brain_scene_path) brain_scene = json.load() - colormap = brain_scene['params']['colors'] - + colormap = brain_scene["params"]["colors"] + print(colormap) f.close() # check the transformed version of the hierarchy.csv file - #print(hierarchy) - - + # print(hierarchy) + return None From 09ed09f91de148b5603f661c5f4122406f11c80f Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Mon, 26 Aug 2024 16:51:10 +0100 Subject: [PATCH 06/34] Applied RGB triplets and fixed acronyms for all regions --- .../atlas_scripts/cuttlefish.py | 53 +++++++++++++++++-- 1 file changed, 49 insertions(+), 4 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 1ac8f638..d82927dc 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -9,12 +9,22 @@ from brainglobe_atlasapi import utils +import pandas as pd + # from skimage import io """from brainglobe_atlasapi.atlas_generation.mesh_utils import ( Region, create_region_mesh, )""" +def hex_to_rgb(hex): + hex = hex.lstrip('#') + rgb = [] + for i in (0, 2, 4): + decimal = int(hex[i:i+2], 16) + rgb.append(decimal) + + return rgb def create_atlas(working_dir, resolution): @@ -51,6 +61,8 @@ def create_atlas(working_dir, resolution): # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys for i in range(0, len(hierarchy)): hierarchy[i]["acronym"] = hierarchy[i].pop("abbreviation") + if hierarchy[i]["hasSides"] == 'Y': + hierarchy[i]["acronym"] = hierarchy[i]["acronym"] + "l" hierarchy[i].pop("hasSides") hierarchy[i].pop("function") hierarchy[i]["structure_id_path"] = list( @@ -66,6 +78,13 @@ def create_atlas(working_dir, resolution): hierarchy[i]["structure_id_path"][index] = (str(prev) + str(id)) prev = hierarchy[i]["structure_id_path"][index] + # fix 'parent_structure_id' for VS and HR + hierarchy[55]['parent_structure_id']=int(str(hierarchy[i]['parent_structure_id'])[:-1]) + hierarchy[56]['parent_structure_id']=int(str(hierarchy[i]['parent_structure_id'])[:-1]) + + # remove erroneous key for the VS region (error due to commas being included in the 'function' column) + hierarchy[-2].pop(None) + # add the 'root' structure hierarchy.append({ "name":"root", @@ -86,14 +105,40 @@ def create_atlas(working_dir, resolution): # apply colour map to each region print("Applying colours:") f = open(brain_scene_path) - brain_scene = json.load() - colormap = brain_scene['params']['colors'] + brain_scene = json.load(f) + colourmap = brain_scene['params']['colors'] + + for index, region in enumerate(hierarchy): + for colour in colourmap: + if region['acronym'] == colour['name']: + hierarchy[index]['rgb_triplet'] = hex_to_rgb(colour['color']) + + # give random RGB triplets to regions without specified RGB triplet values + random_rgb_triplets = [[156, 23, 189],[45, 178, 75],[231, 98, 50],[12, 200, 155],[87, 34, 255],[190, 145, 66],[64, 199, 225], + [255, 120, 5],[10, 45, 90],[145, 222, 33],[35, 167, 204],[76, 0, 89], [27, 237, 236], [255, 255, 255]] + + n = 0 + for index, region in enumerate(hierarchy): + if 'rgb_triplet' not in region: + hierarchy[index]['rgb_triplet'] = random_rgb_triplets[n] + n = n+1 + + # give filler acronyms for regions without specified acronyms + missing_acronyms = ['SpEM', 'VLC', 'BLC', 'SbEM', 'PLC', 'McLC', 'PvLC', 'BLC', 'PeM', + 'OTC', 'NF'] + n = 0 + for index, region in enumerate(hierarchy): + if hierarchy[index]['acronym'] == '': + hierarchy[index]['acronym'] = missing_acronyms[n] + n = n+1 + + - print(colormap) f.close() # check the transformed version of the hierarchy.csv file #print(hierarchy) - + #df = pd.DataFrame(hierarchy) + #df.to_csv('hierarchy_test.csv') return None From 6d5244d389c62a05dae0c17408322e6e043c3419 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 22:49:54 +0000 Subject: [PATCH 07/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_scripts/cuttlefish.py | 97 ++++++++++++------- 1 file changed, 62 insertions(+), 35 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index eefe0fe7..6f9bb70c 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -9,23 +9,23 @@ from brainglobe_atlasapi import utils -import pandas as pd - # from skimage import io """from brainglobe_atlasapi.atlas_generation.mesh_utils import ( Region, create_region_mesh, )""" + def hex_to_rgb(hex): - hex = hex.lstrip('#') + hex = hex.lstrip("#") rgb = [] for i in (0, 2, 4): - decimal = int(hex[i:i+2], 16) + decimal = int(hex[i : i + 2], 16) rgb.append(decimal) - + return rgb + def create_atlas(working_dir, resolution): HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" @@ -60,7 +60,7 @@ def create_atlas(working_dir, resolution): # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys for i in range(0, len(hierarchy)): hierarchy[i]["acronym"] = hierarchy[i].pop("abbreviation") - if hierarchy[i]["hasSides"] == 'Y': + if hierarchy[i]["hasSides"] == "Y": hierarchy[i]["acronym"] = hierarchy[i]["acronym"] + "l" hierarchy[i].pop("hasSides") hierarchy[i].pop("function") @@ -76,14 +76,18 @@ def create_atlas(working_dir, resolution): for index, id in enumerate(hierarchy[i]["structure_id_path"]): hierarchy[i]["structure_id_path"][index] = str(prev) + str(id) prev = hierarchy[i]["structure_id_path"][index] - + # fix 'parent_structure_id' for VS and HR - hierarchy[55]['parent_structure_id']=int(str(hierarchy[i]['parent_structure_id'])[:-1]) - hierarchy[56]['parent_structure_id']=int(str(hierarchy[i]['parent_structure_id'])[:-1]) - + hierarchy[55]["parent_structure_id"] = int( + str(hierarchy[i]["parent_structure_id"])[:-1] + ) + hierarchy[56]["parent_structure_id"] = int( + str(hierarchy[i]["parent_structure_id"])[:-1] + ) + # remove erroneous key for the VS region (error due to commas being included in the 'function' column) hierarchy[-2].pop(None) - + # add the 'root' structure hierarchy.append( { @@ -106,40 +110,63 @@ def create_atlas(working_dir, resolution): print("Applying colours:") f = open(brain_scene_path) brain_scene = json.load(f) - colourmap = brain_scene['params']['colors'] - + colourmap = brain_scene["params"]["colors"] + for index, region in enumerate(hierarchy): - for colour in colourmap: - if region['acronym'] == colour['name']: - hierarchy[index]['rgb_triplet'] = hex_to_rgb(colour['color']) - + for colour in colourmap: + if region["acronym"] == colour["name"]: + hierarchy[index]["rgb_triplet"] = hex_to_rgb(colour["color"]) + # give random RGB triplets to regions without specified RGB triplet values - random_rgb_triplets = [[156, 23, 189],[45, 178, 75],[231, 98, 50],[12, 200, 155],[87, 34, 255],[190, 145, 66],[64, 199, 225], - [255, 120, 5],[10, 45, 90],[145, 222, 33],[35, 167, 204],[76, 0, 89], [27, 237, 236], [255, 255, 255]] - + random_rgb_triplets = [ + [156, 23, 189], + [45, 178, 75], + [231, 98, 50], + [12, 200, 155], + [87, 34, 255], + [190, 145, 66], + [64, 199, 225], + [255, 120, 5], + [10, 45, 90], + [145, 222, 33], + [35, 167, 204], + [76, 0, 89], + [27, 237, 236], + [255, 255, 255], + ] + n = 0 for index, region in enumerate(hierarchy): - if 'rgb_triplet' not in region: - hierarchy[index]['rgb_triplet'] = random_rgb_triplets[n] - n = n+1 - + if "rgb_triplet" not in region: + hierarchy[index]["rgb_triplet"] = random_rgb_triplets[n] + n = n + 1 + # give filler acronyms for regions without specified acronyms - missing_acronyms = ['SpEM', 'VLC', 'BLC', 'SbEM', 'PLC', 'McLC', 'PvLC', 'BLC', 'PeM', - 'OTC', 'NF'] + missing_acronyms = [ + "SpEM", + "VLC", + "BLC", + "SbEM", + "PLC", + "McLC", + "PvLC", + "BLC", + "PeM", + "OTC", + "NF", + ] n = 0 for index, region in enumerate(hierarchy): - if hierarchy[index]['acronym'] == '': - hierarchy[index]['acronym'] = missing_acronyms[n] - n = n+1 - + if hierarchy[index]["acronym"] == "": + hierarchy[index]["acronym"] = missing_acronyms[n] + n = n + 1 - f.close() # check the transformed version of the hierarchy.csv file - #print(hierarchy) - #df = pd.DataFrame(hierarchy) - #df.to_csv('hierarchy_test.csv') - + # print(hierarchy) + # df = pd.DataFrame(hierarchy) + # df.to_csv('hierarchy_test.csv') + return None From 74d47c19ddee06a9b206e1aeee1de4201f07b879 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Mon, 16 Sep 2024 00:37:13 +0100 Subject: [PATCH 08/34] Revised hierarchy creation using annotation data, and added both left and right sides of regions to hierarchy file --- .../atlas_scripts/cuttlefish.py | 121 +++++++++++++----- 1 file changed, 87 insertions(+), 34 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index eefe0fe7..f4aae5c8 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -6,8 +6,11 @@ from pathlib import Path import pooch +import re from brainglobe_atlasapi import utils +from brainglobe_utils.IO.image import load + import pandas as pd @@ -29,7 +32,8 @@ def hex_to_rgb(hex): def create_atlas(working_dir, resolution): HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" - BRAIN_SCENE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-scene.json" + TEMPLATE_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ABflM0-v-b4_2WthGaeYM4s/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template.nii.gz?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" + ANNOTATION_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ALfSeAj81IM0v56bEeoTfUQ/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template_lobe-labels.nii.seg.nrrd?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) @@ -42,6 +46,35 @@ def create_atlas(working_dir, resolution): known_hash="023418e626bdefbd177d4bb8c08661bd63a95ccff47720e64bb7a71546935b77", progressbar=True, ) + + # import cuttlefish .nrrd file + annotation_path = pooch.retrieve( + ANNOTATION_URL, + known_hash="768973251b179902ab48499093a4cc870cb6507c09ce46ff76b8203daf243f82", + progressbar=True, + ) + + import nrrd + # process brain annotation file. There are a total of 70 segments. + print("Processing brain annotations:") + readdata, header = nrrd.read(annotation_path) + + # Extract annotation mapping information from nrrd headers, to be applied to hierarchy file later. + mapping = [] + for n in range(0,70): + mapping.append({'color':header[f'Segment{n}_Color'], 'ID':header[f'Segment{n}_LabelValue'], 'acronym':header[f'Segment{n}_Name']}) + + # convert the color information stored as a string of 3 RGB floats into a list of 3 RGB integers from 0 to 255. + for index, Map in enumerate(mapping): + mapping[index]['color'] = Map['color'].split(' ') + mapping[index]['color'] = list(map(float, mapping[index]['color'])) + mapping[index]['color'] = [int(255*x) for x in mapping[index]['color']] + + #print(mapping) + #df = pd.DataFrame(mapping) + #df.to_csv('mappingtest.csv') + + # create dictionaries print("Creating structure tree") @@ -55,13 +88,23 @@ def create_atlas(working_dir, resolution): # parse through csv file and populate hierarchy list for row in cuttlefish_dict_reader: - hierarchy.append(row) + if row['hasSides'] == 'Y': + leftSide = dict(row) + leftSide['abbreviation'] = leftSide['abbreviation'] + 'l' + leftSide['name'] = leftSide['name'] + ' (left)' + + rightSide = dict(row) + rightSide['abbreviation'] = rightSide['abbreviation'] + 'r' + rightSide['name'] = rightSide['name'] + ' (right)' + + hierarchy.append(leftSide) + hierarchy.append(rightSide) + else: + hierarchy.append(row) # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys for i in range(0, len(hierarchy)): hierarchy[i]["acronym"] = hierarchy[i].pop("abbreviation") - if hierarchy[i]["hasSides"] == 'Y': - hierarchy[i]["acronym"] = hierarchy[i]["acronym"] + "l" hierarchy[i].pop("hasSides") hierarchy[i].pop("function") hierarchy[i]["structure_id_path"] = list( @@ -69,20 +112,24 @@ def create_atlas(working_dir, resolution): ) hierarchy[i]["structure_id_path"].insert(0, 999) hierarchy[i].pop("index") - path_string = [str(i) for i in hierarchy[i]["structure_id_path"]] - hierarchy[i]["id"] = int("".join(path_string)) - hierarchy[i]["parent_structure_id"] = int(str(hierarchy[i]["id"])[:-1]) + #TODO: Fix error! still has issues with duplicate values. + if len(hierarchy[i]['structure_id_path']) < 4 and hierarchy[i]['structure_id_path'][-2] != '9993': + if len(hierarchy[i]['structure_id_path']) == 3: + hierarchy[i]['ID'] = int(hierarchy[i]['structure_id_path'][-1]) + 200 + elif len(hierarchy[i]['structure_id_path']) == 2: + hierarchy[i]['ID'] = int(hierarchy[i]['structure_id_path'][-1]) + 100 + #hierarchy[i]["parent_structure_id"] = int(str(hierarchy[i]["id"])[:-1]) prev = "" for index, id in enumerate(hierarchy[i]["structure_id_path"]): hierarchy[i]["structure_id_path"][index] = str(prev) + str(id) prev = hierarchy[i]["structure_id_path"][index] # fix 'parent_structure_id' for VS and HR - hierarchy[55]['parent_structure_id']=int(str(hierarchy[i]['parent_structure_id'])[:-1]) - hierarchy[56]['parent_structure_id']=int(str(hierarchy[i]['parent_structure_id'])[:-1]) + # remove erroneous key for the VS region (error due to commas being included in the 'function' column) - hierarchy[-2].pop(None) + hierarchy[-3].pop(None) + hierarchy[-4].pop(None) # add the 'root' structure hierarchy.append( @@ -90,55 +137,61 @@ def create_atlas(working_dir, resolution): "name": "root", "acronym": "root", "structure_id_path": [999], - "id": 999, + "ID": 999, "parent_structure_id": None, } ) + + #print(hierarchy) - # download region colour data - brain_scene_path = pooch.retrieve( - BRAIN_SCENE_URL, - known_hash="057fe98ea5ae24c5f9a10aebec072a12f6df19447c3c027f0f12ddba61a1bb90", - progressbar=True, - ) - # apply colour map to each region - print("Applying colours:") - f = open(brain_scene_path) - brain_scene = json.load(f) - colourmap = brain_scene['params']['colors'] - + # apply colour and ID map to each region for index, region in enumerate(hierarchy): - for colour in colourmap: - if region['acronym'] == colour['name']: - hierarchy[index]['rgb_triplet'] = hex_to_rgb(colour['color']) + for Map in mapping: + if region['acronym'] == Map['acronym']: + hierarchy[index]['rgb_triplet'] = Map['color'] + hierarchy[index]['ID'] = Map['ID'] - # give random RGB triplets to regions without specified RGB triplet values + # original atlas does not give colours to some regions, so we give random RGB triplets to regions without specified RGB triplet values random_rgb_triplets = [[156, 23, 189],[45, 178, 75],[231, 98, 50],[12, 200, 155],[87, 34, 255],[190, 145, 66],[64, 199, 225], [255, 120, 5],[10, 45, 90],[145, 222, 33],[35, 167, 204],[76, 0, 89], [27, 237, 236], [255, 255, 255]] n = 0 - for index, region in enumerate(hierarchy): + '''for index, region in enumerate(hierarchy): if 'rgb_triplet' not in region: hierarchy[index]['rgb_triplet'] = random_rgb_triplets[n] - n = n+1 + n = n+1''' # give filler acronyms for regions without specified acronyms missing_acronyms = ['SpEM', 'VLC', 'BLC', 'SbEM', 'PLC', 'McLC', 'PvLC', 'BLC', 'PeM', 'OTC', 'NF'] n = 0 - for index, region in enumerate(hierarchy): + '''for index, region in enumerate(hierarchy): if hierarchy[index]['acronym'] == '': hierarchy[index]['acronym'] = missing_acronyms[n] - n = n+1 + n = n+1''' + + # import cuttlefish .nii file + template_path = pooch.retrieve( + TEMPLATE_URL, + known_hash="195125305a11abe6786be1b32830a8aed1bc8f68948ad53fa84bf74efe7cbe9c", + progressbar=True, + ) + # process brain template MRI file + print("Processing brain template:") + brain_template = load.load_nii(template_path) + #print(brain_template) + + + + - f.close() # check the transformed version of the hierarchy.csv file #print(hierarchy) - #df = pd.DataFrame(hierarchy) - #df.to_csv('hierarchy_test.csv') + df = pd.DataFrame(hierarchy) + df.to_csv('hierarchy_test.csv') return None From 57d7f5b3041a8d54ef1cad064f9058244c1580bf Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Sun, 29 Sep 2024 12:59:10 +0100 Subject: [PATCH 09/34] Fixed structure ID path of all regions using the new IDs found from annotation data --- .../atlas_scripts/cuttlefish.py | 71 +++++++++++-------- 1 file changed, 43 insertions(+), 28 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index f4aae5c8..37ce3f52 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -101,7 +101,10 @@ def create_atlas(working_dir, resolution): hierarchy.append(rightSide) else: hierarchy.append(row) - + + # use layer1 and layer2 to give IDs to regions which do not have existing IDs. + layer1 = 100 + layer2 = 200 # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys for i in range(0, len(hierarchy)): hierarchy[i]["acronym"] = hierarchy[i].pop("abbreviation") @@ -112,20 +115,18 @@ def create_atlas(working_dir, resolution): ) hierarchy[i]["structure_id_path"].insert(0, 999) hierarchy[i].pop("index") - #TODO: Fix error! still has issues with duplicate values. - if len(hierarchy[i]['structure_id_path']) < 4 and hierarchy[i]['structure_id_path'][-2] != '9993': + if len(hierarchy[i]['structure_id_path']) < 4 and hierarchy[i]['structure_id_path'][-2] != 3: if len(hierarchy[i]['structure_id_path']) == 3: - hierarchy[i]['ID'] = int(hierarchy[i]['structure_id_path'][-1]) + 200 + hierarchy[i]['ID'] = layer2 + layer2 += 1 elif len(hierarchy[i]['structure_id_path']) == 2: - hierarchy[i]['ID'] = int(hierarchy[i]['structure_id_path'][-1]) + 100 - #hierarchy[i]["parent_structure_id"] = int(str(hierarchy[i]["id"])[:-1]) - prev = "" - for index, id in enumerate(hierarchy[i]["structure_id_path"]): - hierarchy[i]["structure_id_path"][index] = str(prev) + str(id) - prev = hierarchy[i]["structure_id_path"][index] - - # fix 'parent_structure_id' for VS and HR - + hierarchy[i]['ID'] = layer1 + layer1 += 1 + if hierarchy[i]['acronym'] == 'SB': + hierarchy[i]['ID'] = 71 + elif hierarchy[i]['acronym'] == 'IB': + hierarchy[i]['ID'] = 72 + # remove erroneous key for the VS region (error due to commas being included in the 'function' column) hierarchy[-3].pop(None) @@ -141,8 +142,6 @@ def create_atlas(working_dir, resolution): "parent_structure_id": None, } ) - - #print(hierarchy) # apply colour and ID map to each region @@ -150,26 +149,46 @@ def create_atlas(working_dir, resolution): for Map in mapping: if region['acronym'] == Map['acronym']: hierarchy[index]['rgb_triplet'] = Map['color'] - hierarchy[index]['ID'] = Map['ID'] + hierarchy[index]['ID'] = int(Map['ID']) + + # amend each region's structure_id_path by iterating through entire list, and replacing dummy values with actual ID values. + for i in range(0, len(hierarchy)): + if len(hierarchy[i]['structure_id_path']) == 2: + hierarchy[i]['structure_id_path'][1] = hierarchy[i]['ID'] + len2_shortest_index = i + + elif len(hierarchy[i]['structure_id_path']) == 3: + hierarchy[i]['structure_id_path'][1] = hierarchy[len2_shortest_index]['ID'] + hierarchy[i]['structure_id_path'][2] = hierarchy[i]['ID'] + len3_shortest_index = i + + elif len(hierarchy[i]['structure_id_path']) == 4: + hierarchy[i]['structure_id_path'][1] = hierarchy[len2_shortest_index]['ID'] + hierarchy[i]['structure_id_path'][2] = hierarchy[len3_shortest_index]['ID'] + hierarchy[i]['structure_id_path'][3] = hierarchy[i]["ID"] + # find parent_structure_id using resulting structure_id_path + if hierarchy[i]['name'] != 'root': + hierarchy[i]['parent_structure_id'] = hierarchy[i]['structure_id_path'][-2] + # original atlas does not give colours to some regions, so we give random RGB triplets to regions without specified RGB triplet values random_rgb_triplets = [[156, 23, 189],[45, 178, 75],[231, 98, 50],[12, 200, 155],[87, 34, 255],[190, 145, 66],[64, 199, 225], [255, 120, 5],[10, 45, 90],[145, 222, 33],[35, 167, 204],[76, 0, 89], [27, 237, 236], [255, 255, 255]] n = 0 - '''for index, region in enumerate(hierarchy): + for index, region in enumerate(hierarchy): if 'rgb_triplet' not in region: hierarchy[index]['rgb_triplet'] = random_rgb_triplets[n] - n = n+1''' + n = n+1 # give filler acronyms for regions without specified acronyms - missing_acronyms = ['SpEM', 'VLC', 'BLC', 'SbEM', 'PLC', 'McLC', 'PvLC', 'BLC', 'PeM', + missing_acronyms = ['SpEM', 'VLC', 'BsLC', 'SbEM', 'PLC', 'McLC', 'PvLC', 'BLC', 'PeM', 'OTC', 'NF'] n = 0 - '''for index, region in enumerate(hierarchy): + for index, region in enumerate(hierarchy): if hierarchy[index]['acronym'] == '': hierarchy[index]['acronym'] = missing_acronyms[n] - n = n+1''' + n = n+1 # import cuttlefish .nii file @@ -182,16 +201,12 @@ def create_atlas(working_dir, resolution): # process brain template MRI file print("Processing brain template:") brain_template = load.load_nii(template_path) - #print(brain_template) - - - # check the transformed version of the hierarchy.csv file - #print(hierarchy) - df = pd.DataFrame(hierarchy) - df.to_csv('hierarchy_test.csv') + print(hierarchy) + #df = pd.DataFrame(hierarchy) + #df.to_csv('hierarchy_test.csv') return None From ede9d78a54a1baff4e8a38e8d7b73363229fba94 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 12:01:14 +0000 Subject: [PATCH 10/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_scripts/cuttlefish.py | 199 ++++++++++-------- 1 file changed, 116 insertions(+), 83 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 450315ad..9165bcf9 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -2,17 +2,12 @@ import csv import glob as glob -import json from pathlib import Path import pooch -import re - -from brainglobe_atlasapi import utils from brainglobe_utils.IO.image import load - -import pandas as pd +from brainglobe_atlasapi import utils # from skimage import io """from brainglobe_atlasapi.atlas_generation.mesh_utils import ( @@ -48,35 +43,42 @@ def create_atlas(working_dir, resolution): known_hash="023418e626bdefbd177d4bb8c08661bd63a95ccff47720e64bb7a71546935b77", progressbar=True, ) - - # import cuttlefish .nrrd file + + # import cuttlefish .nrrd file annotation_path = pooch.retrieve( ANNOTATION_URL, known_hash="768973251b179902ab48499093a4cc870cb6507c09ce46ff76b8203daf243f82", progressbar=True, ) - + import nrrd - # process brain annotation file. There are a total of 70 segments. + + # process brain annotation file. There are a total of 70 segments. print("Processing brain annotations:") readdata, header = nrrd.read(annotation_path) - - # Extract annotation mapping information from nrrd headers, to be applied to hierarchy file later. + + # Extract annotation mapping information from nrrd headers, to be applied to hierarchy file later. mapping = [] - for n in range(0,70): - mapping.append({'color':header[f'Segment{n}_Color'], 'ID':header[f'Segment{n}_LabelValue'], 'acronym':header[f'Segment{n}_Name']}) - + for n in range(0, 70): + mapping.append( + { + "color": header[f"Segment{n}_Color"], + "ID": header[f"Segment{n}_LabelValue"], + "acronym": header[f"Segment{n}_Name"], + } + ) + # convert the color information stored as a string of 3 RGB floats into a list of 3 RGB integers from 0 to 255. for index, Map in enumerate(mapping): - mapping[index]['color'] = Map['color'].split(' ') - mapping[index]['color'] = list(map(float, mapping[index]['color'])) - mapping[index]['color'] = [int(255*x) for x in mapping[index]['color']] - - #print(mapping) - #df = pd.DataFrame(mapping) - #df.to_csv('mappingtest.csv') - - + mapping[index]["color"] = Map["color"].split(" ") + mapping[index]["color"] = list(map(float, mapping[index]["color"])) + mapping[index]["color"] = [ + int(255 * x) for x in mapping[index]["color"] + ] + + # print(mapping) + # df = pd.DataFrame(mapping) + # df.to_csv('mappingtest.csv') # create dictionaries print("Creating structure tree") @@ -90,21 +92,21 @@ def create_atlas(working_dir, resolution): # parse through csv file and populate hierarchy list for row in cuttlefish_dict_reader: - if row['hasSides'] == 'Y': + if row["hasSides"] == "Y": leftSide = dict(row) - leftSide['abbreviation'] = leftSide['abbreviation'] + 'l' - leftSide['name'] = leftSide['name'] + ' (left)' - + leftSide["abbreviation"] = leftSide["abbreviation"] + "l" + leftSide["name"] = leftSide["name"] + " (left)" + rightSide = dict(row) - rightSide['abbreviation'] = rightSide['abbreviation'] + 'r' - rightSide['name'] = rightSide['name'] + ' (right)' - + rightSide["abbreviation"] = rightSide["abbreviation"] + "r" + rightSide["name"] = rightSide["name"] + " (right)" + hierarchy.append(leftSide) hierarchy.append(rightSide) else: hierarchy.append(row) - - # use layer1 and layer2 to give IDs to regions which do not have existing IDs. + + # use layer1 and layer2 to give IDs to regions which do not have existing IDs. layer1 = 100 layer2 = 200 # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys @@ -117,23 +119,25 @@ def create_atlas(working_dir, resolution): ) hierarchy[i]["structure_id_path"].insert(0, 999) hierarchy[i].pop("index") - if len(hierarchy[i]['structure_id_path']) < 4 and hierarchy[i]['structure_id_path'][-2] != 3: - if len(hierarchy[i]['structure_id_path']) == 3: - hierarchy[i]['ID'] = layer2 + if ( + len(hierarchy[i]["structure_id_path"]) < 4 + and hierarchy[i]["structure_id_path"][-2] != 3 + ): + if len(hierarchy[i]["structure_id_path"]) == 3: + hierarchy[i]["ID"] = layer2 layer2 += 1 - elif len(hierarchy[i]['structure_id_path']) == 2: - hierarchy[i]['ID'] = layer1 + elif len(hierarchy[i]["structure_id_path"]) == 2: + hierarchy[i]["ID"] = layer1 layer1 += 1 - if hierarchy[i]['acronym'] == 'SB': - hierarchy[i]['ID'] = 71 - elif hierarchy[i]['acronym'] == 'IB': - hierarchy[i]['ID'] = 72 - - + if hierarchy[i]["acronym"] == "SB": + hierarchy[i]["ID"] = 71 + elif hierarchy[i]["acronym"] == "IB": + hierarchy[i]["ID"] = 72 + # remove erroneous key for the VS region (error due to commas being included in the 'function' column) hierarchy[-3].pop(None) hierarchy[-4].pop(None) - + # add the 'root' structure hierarchy.append( { @@ -145,38 +149,58 @@ def create_atlas(working_dir, resolution): } ) - # apply colour and ID map to each region for index, region in enumerate(hierarchy): - for Map in mapping: - if region['acronym'] == Map['acronym']: - hierarchy[index]['rgb_triplet'] = Map['color'] - hierarchy[index]['ID'] = int(Map['ID']) - - # amend each region's structure_id_path by iterating through entire list, and replacing dummy values with actual ID values. + for Map in mapping: + if region["acronym"] == Map["acronym"]: + hierarchy[index]["rgb_triplet"] = Map["color"] + hierarchy[index]["ID"] = int(Map["ID"]) + + # amend each region's structure_id_path by iterating through entire list, and replacing dummy values with actual ID values. for i in range(0, len(hierarchy)): - if len(hierarchy[i]['structure_id_path']) == 2: - hierarchy[i]['structure_id_path'][1] = hierarchy[i]['ID'] + if len(hierarchy[i]["structure_id_path"]) == 2: + hierarchy[i]["structure_id_path"][1] = hierarchy[i]["ID"] len2_shortest_index = i - - elif len(hierarchy[i]['structure_id_path']) == 3: - hierarchy[i]['structure_id_path'][1] = hierarchy[len2_shortest_index]['ID'] - hierarchy[i]['structure_id_path'][2] = hierarchy[i]['ID'] + + elif len(hierarchy[i]["structure_id_path"]) == 3: + hierarchy[i]["structure_id_path"][1] = hierarchy[ + len2_shortest_index + ]["ID"] + hierarchy[i]["structure_id_path"][2] = hierarchy[i]["ID"] len3_shortest_index = i - - elif len(hierarchy[i]['structure_id_path']) == 4: - hierarchy[i]['structure_id_path'][1] = hierarchy[len2_shortest_index]['ID'] - hierarchy[i]['structure_id_path'][2] = hierarchy[len3_shortest_index]['ID'] - hierarchy[i]['structure_id_path'][3] = hierarchy[i]["ID"] + + elif len(hierarchy[i]["structure_id_path"]) == 4: + hierarchy[i]["structure_id_path"][1] = hierarchy[ + len2_shortest_index + ]["ID"] + hierarchy[i]["structure_id_path"][2] = hierarchy[ + len3_shortest_index + ]["ID"] + hierarchy[i]["structure_id_path"][3] = hierarchy[i]["ID"] # find parent_structure_id using resulting structure_id_path - if hierarchy[i]['name'] != 'root': - hierarchy[i]['parent_structure_id'] = hierarchy[i]['structure_id_path'][-2] - - + if hierarchy[i]["name"] != "root": + hierarchy[i]["parent_structure_id"] = hierarchy[i][ + "structure_id_path" + ][-2] + # original atlas does not give colours to some regions, so we give random RGB triplets to regions without specified RGB triplet values - random_rgb_triplets = [[156, 23, 189],[45, 178, 75],[231, 98, 50],[12, 200, 155],[87, 34, 255],[190, 145, 66],[64, 199, 225], - [255, 120, 5],[10, 45, 90],[145, 222, 33],[35, 167, 204],[76, 0, 89], [27, 237, 236], [255, 255, 255]] - + random_rgb_triplets = [ + [156, 23, 189], + [45, 178, 75], + [231, 98, 50], + [12, 200, 155], + [87, 34, 255], + [190, 145, 66], + [64, 199, 225], + [255, 120, 5], + [10, 45, 90], + [145, 222, 33], + [35, 167, 204], + [76, 0, 89], + [27, 237, 236], + [255, 255, 255], + ] + n = 0 for index, region in enumerate(hierarchy): if "rgb_triplet" not in region: @@ -184,16 +208,26 @@ def create_atlas(working_dir, resolution): n = n + 1 # give filler acronyms for regions without specified acronyms - missing_acronyms = ['SpEM', 'VLC', 'BsLC', 'SbEM', 'PLC', 'McLC', 'PvLC', 'BLC', 'PeM', - 'OTC', 'NF'] + missing_acronyms = [ + "SpEM", + "VLC", + "BsLC", + "SbEM", + "PLC", + "McLC", + "PvLC", + "BLC", + "PeM", + "OTC", + "NF", + ] n = 0 for index, region in enumerate(hierarchy): - if hierarchy[index]['acronym'] == '': - hierarchy[index]['acronym'] = missing_acronyms[n] - n = n+1 - - - # import cuttlefish .nii file + if hierarchy[index]["acronym"] == "": + hierarchy[index]["acronym"] = missing_acronyms[n] + n = n + 1 + + # import cuttlefish .nii file template_path = pooch.retrieve( TEMPLATE_URL, known_hash="195125305a11abe6786be1b32830a8aed1bc8f68948ad53fa84bf74efe7cbe9c", @@ -203,13 +237,12 @@ def create_atlas(working_dir, resolution): # process brain template MRI file print("Processing brain template:") brain_template = load.load_nii(template_path) - - + # check the transformed version of the hierarchy.csv file print(hierarchy) - #df = pd.DataFrame(hierarchy) - #df.to_csv('hierarchy_test.csv') - + # df = pd.DataFrame(hierarchy) + # df.to_csv('hierarchy_test.csv') + return None From b42a409938153aed0f77daee07b92a3b4dea2e5f Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Fri, 4 Oct 2024 12:04:57 +0100 Subject: [PATCH 11/34] add wrapup function + minor tweaks --- .../atlas_scripts/cuttlefish.py | 108 +++++++++++------- 1 file changed, 67 insertions(+), 41 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 9165bcf9..f49beb05 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -8,12 +8,7 @@ from brainglobe_utils.IO.image import load from brainglobe_atlasapi import utils - -# from skimage import io -"""from brainglobe_atlasapi.atlas_generation.mesh_utils import ( - Region, - create_region_mesh, -)""" +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data def hex_to_rgb(hex): @@ -27,14 +22,22 @@ def hex_to_rgb(hex): def create_atlas(working_dir, resolution): + ATLAS_NAME = "columbia_cuttlefish" + SPECIES = "Sepia bandensis" + ATLAS_LINK = "https://www.cuttlebase.org/" + CITATION = ( + "Montague et al, 2023, https://doi.org/10.1016/j.cub.2023.06.007" + ) + ORIENTATION = "srp" + ATLAS_PACKAGER = "Jung Woo Kim" + ADDITIONAL_METADATA = {} - HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" - TEMPLATE_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ABflM0-v-b4_2WthGaeYM4s/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template.nii.gz?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" - ANNOTATION_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ALfSeAj81IM0v56bEeoTfUQ/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template_lobe-labels.nii.seg.nrrd?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" + HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" # noqa E501 + TEMPLATE_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ABflM0-v-b4_2WthGaeYM4s/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template.nii.gz?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 + ANNOTATION_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ALfSeAj81IM0v56bEeoTfUQ/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template_lobe-labels.nii.seg.nrrd?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) - atlas_path = download_dir_path / "atlas_files" # download hierarchy files utils.check_internet_connection() @@ -57,18 +60,20 @@ def create_atlas(working_dir, resolution): print("Processing brain annotations:") readdata, header = nrrd.read(annotation_path) - # Extract annotation mapping information from nrrd headers, to be applied to hierarchy file later. + # Extract annotation mapping information from nrrd headers, + # to be applied to hierarchy file later. mapping = [] for n in range(0, 70): mapping.append( { "color": header[f"Segment{n}_Color"], - "ID": header[f"Segment{n}_LabelValue"], + "id": header[f"Segment{n}_LabelValue"], "acronym": header[f"Segment{n}_Name"], } ) - # convert the color information stored as a string of 3 RGB floats into a list of 3 RGB integers from 0 to 255. + # convert the color information stored as a string of 3 RGB floats + # into a list of 3 RGB integers from 0 to 255. for index, Map in enumerate(mapping): mapping[index]["color"] = Map["color"].split(" ") mapping[index]["color"] = list(map(float, mapping[index]["color"])) @@ -106,10 +111,11 @@ def create_atlas(working_dir, resolution): else: hierarchy.append(row) - # use layer1 and layer2 to give IDs to regions which do not have existing IDs. + # use layers to give IDs to regions which do not have existing IDs. layer1 = 100 layer2 = 200 - # remove 'hasSides' and 'function' keys, reorder and rename the remaining keys + # remove 'hasSides' and 'function' keys, + # reorder and rename the remaining keys for i in range(0, len(hierarchy)): hierarchy[i]["acronym"] = hierarchy[i].pop("abbreviation") hierarchy[i].pop("hasSides") @@ -124,17 +130,18 @@ def create_atlas(working_dir, resolution): and hierarchy[i]["structure_id_path"][-2] != 3 ): if len(hierarchy[i]["structure_id_path"]) == 3: - hierarchy[i]["ID"] = layer2 + hierarchy[i]["id"] = layer2 layer2 += 1 elif len(hierarchy[i]["structure_id_path"]) == 2: - hierarchy[i]["ID"] = layer1 + hierarchy[i]["id"] = layer1 layer1 += 1 if hierarchy[i]["acronym"] == "SB": - hierarchy[i]["ID"] = 71 + hierarchy[i]["id"] = 71 elif hierarchy[i]["acronym"] == "IB": - hierarchy[i]["ID"] = 72 + hierarchy[i]["id"] = 72 - # remove erroneous key for the VS region (error due to commas being included in the 'function' column) + # remove erroneous key for the VS region + # (error due to commas being included in the 'function' column) hierarchy[-3].pop(None) hierarchy[-4].pop(None) @@ -144,46 +151,42 @@ def create_atlas(working_dir, resolution): "name": "root", "acronym": "root", "structure_id_path": [999], - "ID": 999, - "parent_structure_id": None, + "id": 999, } ) - # apply colour and ID map to each region + # apply colour and id map to each region for index, region in enumerate(hierarchy): for Map in mapping: if region["acronym"] == Map["acronym"]: hierarchy[index]["rgb_triplet"] = Map["color"] - hierarchy[index]["ID"] = int(Map["ID"]) + hierarchy[index]["id"] = int(Map["id"]) - # amend each region's structure_id_path by iterating through entire list, and replacing dummy values with actual ID values. + # amend each region's structure_id_path by iterating through entire list, + # and replacing dummy values with actual ID values. for i in range(0, len(hierarchy)): if len(hierarchy[i]["structure_id_path"]) == 2: - hierarchy[i]["structure_id_path"][1] = hierarchy[i]["ID"] + hierarchy[i]["structure_id_path"][1] = hierarchy[i]["id"] len2_shortest_index = i elif len(hierarchy[i]["structure_id_path"]) == 3: hierarchy[i]["structure_id_path"][1] = hierarchy[ len2_shortest_index - ]["ID"] - hierarchy[i]["structure_id_path"][2] = hierarchy[i]["ID"] + ]["id"] + hierarchy[i]["structure_id_path"][2] = hierarchy[i]["id"] len3_shortest_index = i elif len(hierarchy[i]["structure_id_path"]) == 4: hierarchy[i]["structure_id_path"][1] = hierarchy[ len2_shortest_index - ]["ID"] + ]["id"] hierarchy[i]["structure_id_path"][2] = hierarchy[ len3_shortest_index - ]["ID"] - hierarchy[i]["structure_id_path"][3] = hierarchy[i]["ID"] - # find parent_structure_id using resulting structure_id_path - if hierarchy[i]["name"] != "root": - hierarchy[i]["parent_structure_id"] = hierarchy[i][ - "structure_id_path" - ][-2] - - # original atlas does not give colours to some regions, so we give random RGB triplets to regions without specified RGB triplet values + ]["id"] + hierarchy[i]["structure_id_path"][3] = hierarchy[i]["id"] + + # original atlas does not give colours to some regions, so we give + # random RGB triplets to regions without specified RGB triplet values random_rgb_triplets = [ [156, 23, 189], [45, 178, 75], @@ -230,20 +233,43 @@ def create_atlas(working_dir, resolution): # import cuttlefish .nii file template_path = pooch.retrieve( TEMPLATE_URL, - known_hash="195125305a11abe6786be1b32830a8aed1bc8f68948ad53fa84bf74efe7cbe9c", + known_hash="195125305a11abe6786be1b32830a8aed1bc8f68948ad53fa84bf74efe7cbe9c", # noqa E501 progressbar=True, ) # process brain template MRI file print("Processing brain template:") - brain_template = load.load_nii(template_path) + brain_template = load.load_nii(template_path, as_array=True) # check the transformed version of the hierarchy.csv file print(hierarchy) # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') - return None + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=resolution, + orientation=ORIENTATION, + root_id=999, + reference_stack=brain_template, + annotation_stack=readdata, + structures_list=hierarchy, + meshes_dict={}, + scale_meshes=True, + working_dir=working_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + atlas_packager=ATLAS_PACKAGER, + additional_metadata=ADDITIONAL_METADATA, + additional_references={}, + ) + + return output_filename if __name__ == "__main__": From 6dd72b98837a4d0923ffe6e505fbe135cf957704 Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Fri, 4 Oct 2024 15:29:01 +0100 Subject: [PATCH 12/34] draft mesh creation --- .../atlas_scripts/cuttlefish.py | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index f49beb05..56b0647b 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -3,9 +3,14 @@ import csv import glob as glob from pathlib import Path +from typing import Tuple +import numpy as np import pooch from brainglobe_utils.IO.image import load +from numpy.typing import NDArray +from pygltflib import GLTF2 +from vedo import Mesh, write from brainglobe_atlasapi import utils from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data @@ -21,6 +26,68 @@ def hex_to_rgb(hex): return rgb +def points_and_triangles_from_gltf( + gltf, mesh_index +) -> Tuple[NDArray, NDArray]: + """ + Extracts points and triangles from a GLTF mesh. + See "Decode numpy arrays from GLTF2" at + https://gitlab.com/dodgyville/pygltflib + + Parameters + ---------- + gltf : object + The GLTF object containing the mesh data. + mesh_index : int + The index of the mesh to extract data from. + + Returns + ------- + Tuple[NDArray, NDArray] + A tuple containing two numpy arrays: + - points: + An array of shape (n, 3) representing the vertex positions. + - triangles: + An array of shape (m, 3) representing the triangle indices. + """ + binary_blob = gltf.binary_blob() + + triangles_accessor = gltf.accessors[ + gltf.meshes[mesh_index].primitives[0].indices + ] + triangles_buffer_view = gltf.bufferViews[triangles_accessor.bufferView] + triangles = np.frombuffer( + binary_blob[ + triangles_buffer_view.byteOffset + + triangles_accessor.byteOffset : triangles_buffer_view.byteOffset + + triangles_buffer_view.byteLength + ], + dtype="uint16", # cuttlefish triangle indices are uint16 + count=triangles_accessor.count, + ).reshape((-1, 3)) + + points_accessor = gltf.accessors[ + gltf.meshes[mesh_index].primitives[0].attributes.POSITION + ] + points_buffer_view = gltf.bufferViews[points_accessor.bufferView] + points = np.frombuffer( + binary_blob[ + points_buffer_view.byteOffset + + points_accessor.byteOffset : points_buffer_view.byteOffset + + points_buffer_view.byteLength + ], + dtype="float32", + count=points_accessor.count * 3, + ).reshape((-1, 3)) + + return points, triangles + + +def write_obj(points, triangles, obj_filepath): + mesh = Mesh((points, triangles)) + write(mesh, str(obj_filepath)) + + def create_atlas(working_dir, resolution): ATLAS_NAME = "columbia_cuttlefish" SPECIES = "Sepia bandensis" @@ -35,6 +102,7 @@ def create_atlas(working_dir, resolution): HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" # noqa E501 TEMPLATE_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ABflM0-v-b4_2WthGaeYM4s/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template.nii.gz?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 ANNOTATION_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ALfSeAj81IM0v56bEeoTfUQ/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template_lobe-labels.nii.seg.nrrd?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 + MESH_URL = r"https://www.cuttlebase.org/assets/models/cuttlefish_brain.glb" download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) @@ -246,6 +314,35 @@ def create_atlas(working_dir, resolution): # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') + # write meshes + atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" + mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" + mesh_dir.mkdir(exist_ok=True, parents=True) + glbfile = pooch.retrieve(MESH_URL, known_hash=None, progressbar=True) + gltf = GLTF2.load(glbfile) + for node in gltf.nodes: + # gltf stores meshes/nodes in alphabetical order of region name! + mesh_index = ( + node.mesh + ) # needs to be matched to annotation label instead + # maybe useful for matching: + print( + f"writing mesh for region {gltf.meshes[mesh_index].name}" + f" and index {mesh_index}" + ) + points, triangles = points_and_triangles_from_gltf( + gltf=gltf, mesh_index=mesh_index + ) + # points need to be transformed from SRP to ASR + # see `map_points to` function in `brainglobe-space`, + # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 + write_obj(points, triangles, mesh_dir / f"{mesh_index}.obj") + + # we need to think about the points' scale (should be in microns)! + + # create meshes for regions that don't have a premade mesh, e.g. the root? + # in a separate loop + output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, atlas_minor_version=__version__, From 0bae2f62957bcb7bc7c7cff4799eab44a9d693fb Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Mon, 21 Oct 2024 17:35:10 +0100 Subject: [PATCH 13/34] Added validation script to brainglobe main folder. --- validation.py | 97 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 validation.py diff --git a/validation.py b/validation.py new file mode 100644 index 00000000..5f1cd972 --- /dev/null +++ b/validation.py @@ -0,0 +1,97 @@ +import shutil +from pathlib import Path + +import napari +from brainrender_napari.napari_atlas_representation import ( + NapariAtlasRepresentation, +) +from napari.viewer import Viewer + +from brainglobe_atlasapi import BrainGlobeAtlas +from brainglobe_atlasapi.atlas_generation.validate_atlases import ( + catch_missing_mesh_files, + catch_missing_structures, + open_for_visual_check, + validate_additional_references, + validate_atlas_files, + validate_checksum, + validate_image_dimensions, + validate_mesh_matches_image_extents, +) + +all_validation_functions = [ + validate_atlas_files, + #validate_mesh_matches_image_extents, + open_for_visual_check, + validate_checksum, + validate_image_dimensions, + validate_additional_references, + catch_missing_mesh_files, + catch_missing_structures, +] + + +# adapt this code block for newly packaged atlases +brainglobe_dir = Path.home() / ".brainglobe/" +working_dir = Path.home() / "brainglobe_workingdir/" +atlas_name = "columbia_cuttlefish" +resolution = 2 +minor_version = 0 + +# nothing below this needs changing + +# make sure we have the latest packaged version in .brainglobe +# by replacing it with the working_dir version if needed +#atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" +#source_dir = working_dir / atlas_name / atlas_name_with_version +#destination_dir = brainglobe_dir / atlas_name_with_version +#destination_dir = working_dir / atlas_name_with_version +#if destination_dir.exists() and destination_dir.is_dir(): +# shutil.rmtree(destination_dir) +#assert source_dir.exists() +#if source_dir.exists(): +# shutil.copytree(source_dir, destination_dir) +#assert destination_dir.exists() + +# run validation functions on the new atlas +atlas = BrainGlobeAtlas(f"{atlas_name}_{resolution}um") +validation_results = {atlas_name: []} + +for i, validation_function in enumerate(all_validation_functions): + try: + validation_function(atlas) + validation_results[atlas_name].append( + (validation_function.__name__, None, str("Pass")) + ) + except AssertionError as error: + validation_results[atlas_name].append( + (validation_function.__name__, str(error), str("Fail")) + ) + +# print validation results and open napari for a visual check +# in napari, we should see three layers: +# - the annotation +# - the reference image (visibility turned off by default) +# - the root mesh + +failed_validations = [ + (result[0], result[1]) + for result in validation_results[atlas_name] + if result[2] == "Fail" +] +if failed_validations: + print("Failed validations:") + for failed in failed_validations: + print(failed) +else: + print(f"{atlas_name} is a valid atlas") + +viewer = Viewer() +viewer.dims.ndisplay = 3 +napari_atlas = NapariAtlasRepresentation( + atlas, viewer +) +napari_atlas.add_structure_to_viewer("AAB") +napari_atlas.add_to_viewer() + +napari.run() \ No newline at end of file From f2b7d022a40180eef65ac9225bce31f0d4499e47 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:35:23 +0000 Subject: [PATCH 14/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- validation.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/validation.py b/validation.py index 5f1cd972..693be21b 100644 --- a/validation.py +++ b/validation.py @@ -1,4 +1,3 @@ -import shutil from pathlib import Path import napari @@ -16,12 +15,11 @@ validate_atlas_files, validate_checksum, validate_image_dimensions, - validate_mesh_matches_image_extents, ) all_validation_functions = [ validate_atlas_files, - #validate_mesh_matches_image_extents, + # validate_mesh_matches_image_extents, open_for_visual_check, validate_checksum, validate_image_dimensions, @@ -42,16 +40,16 @@ # make sure we have the latest packaged version in .brainglobe # by replacing it with the working_dir version if needed -#atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" -#source_dir = working_dir / atlas_name / atlas_name_with_version -#destination_dir = brainglobe_dir / atlas_name_with_version -#destination_dir = working_dir / atlas_name_with_version -#if destination_dir.exists() and destination_dir.is_dir(): +# atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" +# source_dir = working_dir / atlas_name / atlas_name_with_version +# destination_dir = brainglobe_dir / atlas_name_with_version +# destination_dir = working_dir / atlas_name_with_version +# if destination_dir.exists() and destination_dir.is_dir(): # shutil.rmtree(destination_dir) -#assert source_dir.exists() -#if source_dir.exists(): +# assert source_dir.exists() +# if source_dir.exists(): # shutil.copytree(source_dir, destination_dir) -#assert destination_dir.exists() +# assert destination_dir.exists() # run validation functions on the new atlas atlas = BrainGlobeAtlas(f"{atlas_name}_{resolution}um") @@ -88,10 +86,8 @@ viewer = Viewer() viewer.dims.ndisplay = 3 -napari_atlas = NapariAtlasRepresentation( - atlas, viewer -) +napari_atlas = NapariAtlasRepresentation(atlas, viewer) napari_atlas.add_structure_to_viewer("AAB") napari_atlas.add_to_viewer() -napari.run() \ No newline at end of file +napari.run() From ce5fecd75f92a57bdca6a9aa2d52b27f657360c3 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Tue, 26 Nov 2024 15:32:29 +0000 Subject: [PATCH 15/34] Updated mesh generation code to store meshes by correct ID. (In previous code, mesh name did not match the region ID) --- .../atlas_scripts/cuttlefish.py | 82 +++++++++++++++++-- 1 file changed, 74 insertions(+), 8 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 56b0647b..90effc93 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -15,6 +15,8 @@ from brainglobe_atlasapi import utils from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +import brainglobe_space as bg + def hex_to_rgb(hex): hex = hex.lstrip("#") @@ -310,22 +312,53 @@ def create_atlas(working_dir, resolution): brain_template = load.load_nii(template_path, as_array=True) # check the transformed version of the hierarchy.csv file - print(hierarchy) + #print(hierarchy) # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') + import matplotlib.pyplot as plt + sc = bg.AnatomicalSpace("srp") # origin for the stack to be plotted + + '''fig, axs = plt.subplots(1,3) + for i, (plane, labels) in enumerate(zip(sc.sections, sc.axis_labels)): + axs[i].imshow(brain_template.mean(i)) + axs[i].set_title(f"{plane.capitalize()} view") + axs[i].set_ylabel(labels[0]) + axs[i].set_xlabel(labels[1]) + plt.show()''' + # write meshes + source_origin = ("Superior", "Right", "Posterior") + source_space = bg.AnatomicalSpace(source_origin, brain_template.shape) atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) glbfile = pooch.retrieve(MESH_URL, known_hash=None, progressbar=True) gltf = GLTF2.load(glbfile) for node in gltf.nodes: + #print(node) # gltf stores meshes/nodes in alphabetical order of region name! - mesh_index = ( - node.mesh - ) # needs to be matched to annotation label instead - # maybe useful for matching: + # given that the gtlf meshes id don't match the region ids, + # match the mesh names to our region names to find the correct id + for region in hierarchy: + if node.name == region["acronym"]: + mesh_id = region["id"] + break + else: + mesh_id = -1 + + # the following code tests for which meshes did not have a corresponding region in + # our hierarchy region list. + # they are: C, GLASS and SK. + # manual checking on Blender shows that: + # SK is the cuttlefish body (unnecessary) + # GLASS is the overall mesh for the brain + # C is the cartilage behind the brain (unnecessary) + + #if mesh_id == -1: + # print("error for ", node) + + mesh_index = node.mesh print( f"writing mesh for region {gltf.meshes[mesh_index].name}" f" and index {mesh_index}" @@ -333,16 +366,49 @@ def create_atlas(working_dir, resolution): points, triangles = points_and_triangles_from_gltf( gltf=gltf, mesh_index=mesh_index ) + mapped_points = source_space.map_points_to("asr", points) + # points need to be transformed from SRP to ASR # see `map_points to` function in `brainglobe-space`, # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 - write_obj(points, triangles, mesh_dir / f"{mesh_index}.obj") - - # we need to think about the points' scale (should be in microns)! + + points = np.multiply(points, 1000) + write_obj(points, triangles, mesh_dir / f"{mesh_id}.obj") + test = np.asarray(points) + #print(test.shape) + #print(brain_template.shape) + #np.savetxt("footest.csv", test, delimiter=',') + # we need to think about the points' scale (should be in microns)! # create meshes for regions that don't have a premade mesh, e.g. the root? # in a separate loop + # create meshes_dict + + ############################## FIND A WAY TO MATCH THE MESH ID WITH THE ACRONYMS. + + + meshes_dict = dict() + structures_with_mesh = [] + for s in hierarchy: + # check if a mesh was created + mesh_path = mesh_dir / f"{s['id']}.obj" + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it.") + continue + else: + # check that the mesh actually exists and isn't empty + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" + ) + output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, atlas_minor_version=__version__, From 411fe4d00480f6339274396d099fa026c2f8180d Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Mon, 2 Dec 2024 14:31:26 +0000 Subject: [PATCH 16/34] Scaled, rotated and inverted mesh to match the size of the annotation image. --- .../atlas_generation/atlas_scripts/cuttlefish.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 90effc93..4c64c081 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -335,6 +335,11 @@ def create_atlas(working_dir, resolution): mesh_dir.mkdir(exist_ok=True, parents=True) glbfile = pooch.retrieve(MESH_URL, known_hash=None, progressbar=True) gltf = GLTF2.load(glbfile) + + transformation_matrix = np.array([[0,0,-1], + [0,-1,0], + [1,0,0]]) + for node in gltf.nodes: #print(node) # gltf stores meshes/nodes in alphabetical order of region name! @@ -372,7 +377,11 @@ def create_atlas(working_dir, resolution): # see `map_points to` function in `brainglobe-space`, # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 - points = np.multiply(points, 1000) + points = np.multiply(points, 2000) + #print("pre-transformation: ", points) + for index, point in enumerate(points): + points[index] = np.matmul(transformation_matrix,point) + #print("post-transformation: ", points) write_obj(points, triangles, mesh_dir / f"{mesh_id}.obj") test = np.asarray(points) #print(test.shape) From a8c23aab2a0c30d94526444cb260ddc94e972395 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Wed, 4 Dec 2024 13:58:40 +0000 Subject: [PATCH 17/34] Corrected meshes_dict in wrapup function, changed mesh generation to use map_points_to, but currently the meshes are still not correctly aligned. --- .../atlas_scripts/cuttlefish.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 4c64c081..54db4118 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -328,8 +328,8 @@ def create_atlas(working_dir, resolution): plt.show()''' # write meshes - source_origin = ("Superior", "Right", "Posterior") - source_space = bg.AnatomicalSpace(source_origin, brain_template.shape) + mesh_source_origin = ("Right", "Anterior", "Inferior") + mesh_source_space = bg.AnatomicalSpace(mesh_source_origin, brain_template.shape) atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) @@ -371,19 +371,21 @@ def create_atlas(working_dir, resolution): points, triangles = points_and_triangles_from_gltf( gltf=gltf, mesh_index=mesh_index ) - mapped_points = source_space.map_points_to("asr", points) + mapped_points = mesh_source_space.map_points_to("srp", points) # points need to be transformed from SRP to ASR # see `map_points to` function in `brainglobe-space`, # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 - points = np.multiply(points, 2000) + mapped_points = np.multiply(points, 1000) #print("pre-transformation: ", points) - for index, point in enumerate(points): - points[index] = np.matmul(transformation_matrix,point) + + #for index, point in enumerate(points): + # points[index] = np.matmul(transformation_matrix,point) + #print("post-transformation: ", points) - write_obj(points, triangles, mesh_dir / f"{mesh_id}.obj") - test = np.asarray(points) + write_obj(mapped_points, triangles, mesh_dir / f"{mesh_id}.obj") + #print(test.shape) #print(brain_template.shape) #np.savetxt("footest.csv", test, delimiter=',') @@ -430,7 +432,7 @@ def create_atlas(working_dir, resolution): reference_stack=brain_template, annotation_stack=readdata, structures_list=hierarchy, - meshes_dict={}, + meshes_dict=meshes_dict, scale_meshes=True, working_dir=working_dir, hemispheres_stack=None, From 6c45b7c4c8b05675a1a965e139efcdcc2d910769 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 23:59:24 +0000 Subject: [PATCH 18/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_scripts/cuttlefish.py | 69 +++++++++---------- 1 file changed, 33 insertions(+), 36 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 54db4118..33220f71 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -5,6 +5,7 @@ from pathlib import Path from typing import Tuple +import brainglobe_space as bg import numpy as np import pooch from brainglobe_utils.IO.image import load @@ -15,8 +16,6 @@ from brainglobe_atlasapi import utils from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data -import brainglobe_space as bg - def hex_to_rgb(hex): hex = hex.lstrip("#") @@ -312,36 +311,35 @@ def create_atlas(working_dir, resolution): brain_template = load.load_nii(template_path, as_array=True) # check the transformed version of the hierarchy.csv file - #print(hierarchy) + # print(hierarchy) # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') - import matplotlib.pyplot as plt sc = bg.AnatomicalSpace("srp") # origin for the stack to be plotted - '''fig, axs = plt.subplots(1,3) + """fig, axs = plt.subplots(1,3) for i, (plane, labels) in enumerate(zip(sc.sections, sc.axis_labels)): axs[i].imshow(brain_template.mean(i)) axs[i].set_title(f"{plane.capitalize()} view") axs[i].set_ylabel(labels[0]) axs[i].set_xlabel(labels[1]) - plt.show()''' - + plt.show()""" + # write meshes mesh_source_origin = ("Right", "Anterior", "Inferior") - mesh_source_space = bg.AnatomicalSpace(mesh_source_origin, brain_template.shape) + mesh_source_space = bg.AnatomicalSpace( + mesh_source_origin, brain_template.shape + ) atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) glbfile = pooch.retrieve(MESH_URL, known_hash=None, progressbar=True) gltf = GLTF2.load(glbfile) - - transformation_matrix = np.array([[0,0,-1], - [0,-1,0], - [1,0,0]]) - + + transformation_matrix = np.array([[0, 0, -1], [0, -1, 0], [1, 0, 0]]) + for node in gltf.nodes: - #print(node) + # print(node) # gltf stores meshes/nodes in alphabetical order of region name! # given that the gtlf meshes id don't match the region ids, # match the mesh names to our region names to find the correct id @@ -351,18 +349,18 @@ def create_atlas(working_dir, resolution): break else: mesh_id = -1 - + # the following code tests for which meshes did not have a corresponding region in # our hierarchy region list. - # they are: C, GLASS and SK. - # manual checking on Blender shows that: + # they are: C, GLASS and SK. + # manual checking on Blender shows that: # SK is the cuttlefish body (unnecessary) - # GLASS is the overall mesh for the brain + # GLASS is the overall mesh for the brain # C is the cartilage behind the brain (unnecessary) - - #if mesh_id == -1: + + # if mesh_id == -1: # print("error for ", node) - + mesh_index = node.mesh print( f"writing mesh for region {gltf.meshes[mesh_index].name}" @@ -376,29 +374,28 @@ def create_atlas(working_dir, resolution): # points need to be transformed from SRP to ASR # see `map_points to` function in `brainglobe-space`, # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 - + mapped_points = np.multiply(points, 1000) - #print("pre-transformation: ", points) - - #for index, point in enumerate(points): + # print("pre-transformation: ", points) + + # for index, point in enumerate(points): # points[index] = np.matmul(transformation_matrix,point) - - #print("post-transformation: ", points) + + # print("post-transformation: ", points) write_obj(mapped_points, triangles, mesh_dir / f"{mesh_id}.obj") - - #print(test.shape) - #print(brain_template.shape) - #np.savetxt("footest.csv", test, delimiter=',') + + # print(test.shape) + # print(brain_template.shape) + # np.savetxt("footest.csv", test, delimiter=',') # we need to think about the points' scale (should be in microns)! # create meshes for regions that don't have a premade mesh, e.g. the root? # in a separate loop # create meshes_dict - - ############################## FIND A WAY TO MATCH THE MESH ID WITH THE ACRONYMS. - - + + ############################## FIND A WAY TO MATCH THE MESH ID WITH THE ACRONYMS. + meshes_dict = dict() structures_with_mesh = [] for s in hierarchy: @@ -419,7 +416,7 @@ def create_atlas(working_dir, resolution): f"In the end, {len(structures_with_mesh)} " "structures with mesh are kept" ) - + output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, atlas_minor_version=__version__, From 98b3b3a9891ea572877964beed0f43f7cacc7b05 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Thu, 5 Dec 2024 00:01:13 +0000 Subject: [PATCH 19/34] Edited validation script --- validation.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/validation.py b/validation.py index 693be21b..c0486237 100644 --- a/validation.py +++ b/validation.py @@ -1,3 +1,4 @@ +import shutil from pathlib import Path import napari @@ -15,11 +16,12 @@ validate_atlas_files, validate_checksum, validate_image_dimensions, + validate_mesh_matches_image_extents, ) all_validation_functions = [ validate_atlas_files, - # validate_mesh_matches_image_extents, + #validate_mesh_matches_image_extents, open_for_visual_check, validate_checksum, validate_image_dimensions, @@ -40,16 +42,16 @@ # make sure we have the latest packaged version in .brainglobe # by replacing it with the working_dir version if needed -# atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" -# source_dir = working_dir / atlas_name / atlas_name_with_version -# destination_dir = brainglobe_dir / atlas_name_with_version -# destination_dir = working_dir / atlas_name_with_version -# if destination_dir.exists() and destination_dir.is_dir(): +#atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" +#source_dir = working_dir / atlas_name / atlas_name_with_version +#destination_dir = brainglobe_dir / atlas_name_with_version +#destination_dir = working_dir / atlas_name_with_version +#if destination_dir.exists() and destination_dir.is_dir(): # shutil.rmtree(destination_dir) -# assert source_dir.exists() -# if source_dir.exists(): +#assert source_dir.exists() +#if source_dir.exists(): # shutil.copytree(source_dir, destination_dir) -# assert destination_dir.exists() +#assert destination_dir.exists() # run validation functions on the new atlas atlas = BrainGlobeAtlas(f"{atlas_name}_{resolution}um") @@ -86,8 +88,13 @@ viewer = Viewer() viewer.dims.ndisplay = 3 -napari_atlas = NapariAtlasRepresentation(atlas, viewer) +napari_atlas = NapariAtlasRepresentation( + atlas, viewer +) napari_atlas.add_structure_to_viewer("AAB") +napari_atlas.add_structure_to_viewer("V") +napari_atlas.add_structure_to_viewer("Or") +napari_atlas.add_structure_to_viewer("BPCl") napari_atlas.add_to_viewer() -napari.run() +napari.run() \ No newline at end of file From bbf96d4236a125d4ca171645ad109ce7670614c0 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 00:01:52 +0000 Subject: [PATCH 20/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- validation.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/validation.py b/validation.py index c0486237..29969147 100644 --- a/validation.py +++ b/validation.py @@ -1,4 +1,3 @@ -import shutil from pathlib import Path import napari @@ -16,12 +15,11 @@ validate_atlas_files, validate_checksum, validate_image_dimensions, - validate_mesh_matches_image_extents, ) all_validation_functions = [ validate_atlas_files, - #validate_mesh_matches_image_extents, + # validate_mesh_matches_image_extents, open_for_visual_check, validate_checksum, validate_image_dimensions, @@ -42,16 +40,16 @@ # make sure we have the latest packaged version in .brainglobe # by replacing it with the working_dir version if needed -#atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" -#source_dir = working_dir / atlas_name / atlas_name_with_version -#destination_dir = brainglobe_dir / atlas_name_with_version -#destination_dir = working_dir / atlas_name_with_version -#if destination_dir.exists() and destination_dir.is_dir(): +# atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" +# source_dir = working_dir / atlas_name / atlas_name_with_version +# destination_dir = brainglobe_dir / atlas_name_with_version +# destination_dir = working_dir / atlas_name_with_version +# if destination_dir.exists() and destination_dir.is_dir(): # shutil.rmtree(destination_dir) -#assert source_dir.exists() -#if source_dir.exists(): +# assert source_dir.exists() +# if source_dir.exists(): # shutil.copytree(source_dir, destination_dir) -#assert destination_dir.exists() +# assert destination_dir.exists() # run validation functions on the new atlas atlas = BrainGlobeAtlas(f"{atlas_name}_{resolution}um") @@ -88,13 +86,11 @@ viewer = Viewer() viewer.dims.ndisplay = 3 -napari_atlas = NapariAtlasRepresentation( - atlas, viewer -) +napari_atlas = NapariAtlasRepresentation(atlas, viewer) napari_atlas.add_structure_to_viewer("AAB") napari_atlas.add_structure_to_viewer("V") napari_atlas.add_structure_to_viewer("Or") napari_atlas.add_structure_to_viewer("BPCl") napari_atlas.add_to_viewer() -napari.run() \ No newline at end of file +napari.run() From 84f9d477e541197bef8890bd08c854b4316f0626 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Sun, 8 Dec 2024 17:03:18 +0000 Subject: [PATCH 21/34] Corrected the orientation of the meshes using map_points_to rather than matrix multiplication, and attempted to translate meshes. --- .../atlas_scripts/cuttlefish.py | 40 ++++++++----------- 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 33220f71..3144e9ed 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -315,28 +315,25 @@ def create_atlas(working_dir, resolution): # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') - sc = bg.AnatomicalSpace("srp") # origin for the stack to be plotted + source_space = bg.AnatomicalSpace("srp") # origin for the stack to be plotted - """fig, axs = plt.subplots(1,3) - for i, (plane, labels) in enumerate(zip(sc.sections, sc.axis_labels)): - axs[i].imshow(brain_template.mean(i)) - axs[i].set_title(f"{plane.capitalize()} view") - axs[i].set_ylabel(labels[0]) - axs[i].set_xlabel(labels[1]) - plt.show()""" # write meshes mesh_source_origin = ("Right", "Anterior", "Inferior") mesh_source_space = bg.AnatomicalSpace( mesh_source_origin, brain_template.shape ) + print(brain_template.shape) atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) glbfile = pooch.retrieve(MESH_URL, known_hash=None, progressbar=True) gltf = GLTF2.load(glbfile) - transformation_matrix = np.array([[0, 0, -1], [0, -1, 0], [1, 0, 0]]) + transformation_matrix = np.array([[0, 0, -1], + [1, 0, 0], + [0, -1, 0]]) + displacement = np.array([11.150002, 14.350002, 0]) for node in gltf.nodes: # print(node) @@ -369,33 +366,28 @@ def create_atlas(working_dir, resolution): points, triangles = points_and_triangles_from_gltf( gltf=gltf, mesh_index=mesh_index ) - mapped_points = mesh_source_space.map_points_to("srp", points) - + + points = np.multiply(points, 2000) + mapped_points = mesh_source_space.map_points_to("pri", points) + print(mapped_points) + + for index, p in enumerate(mapped_points): + mapped_points[index] = np.add(p, displacement) + print(mapped_points) # points need to be transformed from SRP to ASR # see `map_points to` function in `brainglobe-space`, # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 - mapped_points = np.multiply(points, 1000) - # print("pre-transformation: ", points) - - # for index, point in enumerate(points): - # points[index] = np.matmul(transformation_matrix,point) - # print("post-transformation: ", points) write_obj(mapped_points, triangles, mesh_dir / f"{mesh_id}.obj") - # print(test.shape) - # print(brain_template.shape) - # np.savetxt("footest.csv", test, delimiter=',') + # we need to think about the points' scale (should be in microns)! # create meshes for regions that don't have a premade mesh, e.g. the root? # in a separate loop # create meshes_dict - - ############################## FIND A WAY TO MATCH THE MESH ID WITH THE ACRONYMS. - meshes_dict = dict() structures_with_mesh = [] for s in hierarchy: @@ -430,7 +422,7 @@ def create_atlas(working_dir, resolution): annotation_stack=readdata, structures_list=hierarchy, meshes_dict=meshes_dict, - scale_meshes=True, + scale_meshes=False, working_dir=working_dir, hemispheres_stack=None, cleanup_files=False, From ee46a9c4e391bee7bcd8f53ec5954201be40882b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 8 Dec 2024 17:03:32 +0000 Subject: [PATCH 22/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_generation/atlas_scripts/cuttlefish.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 3144e9ed..2b672306 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -315,8 +315,9 @@ def create_atlas(working_dir, resolution): # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') - source_space = bg.AnatomicalSpace("srp") # origin for the stack to be plotted - + source_space = bg.AnatomicalSpace( + "srp" + ) # origin for the stack to be plotted # write meshes mesh_source_origin = ("Right", "Anterior", "Inferior") @@ -330,9 +331,7 @@ def create_atlas(working_dir, resolution): glbfile = pooch.retrieve(MESH_URL, known_hash=None, progressbar=True) gltf = GLTF2.load(glbfile) - transformation_matrix = np.array([[0, 0, -1], - [1, 0, 0], - [0, -1, 0]]) + transformation_matrix = np.array([[0, 0, -1], [1, 0, 0], [0, -1, 0]]) displacement = np.array([11.150002, 14.350002, 0]) for node in gltf.nodes: @@ -366,11 +365,11 @@ def create_atlas(working_dir, resolution): points, triangles = points_and_triangles_from_gltf( gltf=gltf, mesh_index=mesh_index ) - + points = np.multiply(points, 2000) mapped_points = mesh_source_space.map_points_to("pri", points) print(mapped_points) - + for index, p in enumerate(mapped_points): mapped_points[index] = np.add(p, displacement) print(mapped_points) @@ -378,10 +377,8 @@ def create_atlas(working_dir, resolution): # see `map_points to` function in `brainglobe-space`, # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 - write_obj(mapped_points, triangles, mesh_dir / f"{mesh_id}.obj") - # we need to think about the points' scale (should be in microns)! # create meshes for regions that don't have a premade mesh, e.g. the root? From f2150426062b0c8d64408725f2efc62ce24cc536 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Mon, 9 Dec 2024 16:06:27 +0000 Subject: [PATCH 23/34] Edited the atlas generation to be in 50um resolution, and changed scaling of meshes to match the annotation volume. --- .../atlas_generation/atlas_scripts/cuttlefish.py | 6 +++--- validation.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 3144e9ed..6bcb6fa8 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -366,8 +366,8 @@ def create_atlas(working_dir, resolution): points, triangles = points_and_triangles_from_gltf( gltf=gltf, mesh_index=mesh_index ) - - points = np.multiply(points, 2000) + # figure out what number to put here + points = np.multiply(points, 40000) mapped_points = mesh_source_space.map_points_to("pri", points) print(mapped_points) @@ -436,7 +436,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": - res = 2, 2, 2 + res = 50, 50, 50 home = str(Path.home()) bg_root_dir = Path.home() / "brainglobe_workingdir" bg_root_dir.mkdir(exist_ok=True, parents=True) diff --git a/validation.py b/validation.py index 29969147..ac5e3004 100644 --- a/validation.py +++ b/validation.py @@ -33,7 +33,7 @@ brainglobe_dir = Path.home() / ".brainglobe/" working_dir = Path.home() / "brainglobe_workingdir/" atlas_name = "columbia_cuttlefish" -resolution = 2 +resolution = 50 minor_version = 0 # nothing below this needs changing From 92736a31d85dddac07acc05d69c7499c2771b15f Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Thu, 16 Jan 2025 16:16:32 +0000 Subject: [PATCH 24/34] test to check for functionality of new branch --- test.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 test.txt diff --git a/test.txt b/test.txt new file mode 100644 index 00000000..9e018b37 --- /dev/null +++ b/test.txt @@ -0,0 +1 @@ +test \ No newline at end of file From 2101b6b07b33041408d6a1bda9e1444871769886 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Thu, 16 Jan 2025 16:17:14 +0000 Subject: [PATCH 25/34] Remove test of branch functionality --- test.txt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 test.txt diff --git a/test.txt b/test.txt deleted file mode 100644 index 9e018b37..00000000 --- a/test.txt +++ /dev/null @@ -1 +0,0 @@ -test \ No newline at end of file From 13cb394f62bef64fe0bac1dd516808f55235459b Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Thu, 16 Jan 2025 16:20:53 +0000 Subject: [PATCH 26/34] Remove mesh generation code from atlas generation script. --- .../atlas_scripts/cuttlefish.py | 126 +----------------- 1 file changed, 1 insertion(+), 125 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index bacbc32e..aeb1427d 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -27,67 +27,6 @@ def hex_to_rgb(hex): return rgb -def points_and_triangles_from_gltf( - gltf, mesh_index -) -> Tuple[NDArray, NDArray]: - """ - Extracts points and triangles from a GLTF mesh. - See "Decode numpy arrays from GLTF2" at - https://gitlab.com/dodgyville/pygltflib - - Parameters - ---------- - gltf : object - The GLTF object containing the mesh data. - mesh_index : int - The index of the mesh to extract data from. - - Returns - ------- - Tuple[NDArray, NDArray] - A tuple containing two numpy arrays: - - points: - An array of shape (n, 3) representing the vertex positions. - - triangles: - An array of shape (m, 3) representing the triangle indices. - """ - binary_blob = gltf.binary_blob() - - triangles_accessor = gltf.accessors[ - gltf.meshes[mesh_index].primitives[0].indices - ] - triangles_buffer_view = gltf.bufferViews[triangles_accessor.bufferView] - triangles = np.frombuffer( - binary_blob[ - triangles_buffer_view.byteOffset - + triangles_accessor.byteOffset : triangles_buffer_view.byteOffset - + triangles_buffer_view.byteLength - ], - dtype="uint16", # cuttlefish triangle indices are uint16 - count=triangles_accessor.count, - ).reshape((-1, 3)) - - points_accessor = gltf.accessors[ - gltf.meshes[mesh_index].primitives[0].attributes.POSITION - ] - points_buffer_view = gltf.bufferViews[points_accessor.bufferView] - points = np.frombuffer( - binary_blob[ - points_buffer_view.byteOffset - + points_accessor.byteOffset : points_buffer_view.byteOffset - + points_buffer_view.byteLength - ], - dtype="float32", - count=points_accessor.count * 3, - ).reshape((-1, 3)) - - return points, triangles - - -def write_obj(points, triangles, obj_filepath): - mesh = Mesh((points, triangles)) - write(mesh, str(obj_filepath)) - def create_atlas(working_dir, resolution): ATLAS_NAME = "columbia_cuttlefish" @@ -315,75 +254,12 @@ def create_atlas(working_dir, resolution): # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') - source_space = bg.AnatomicalSpace( - "srp" - ) # origin for the stack to be plotted # write meshes - mesh_source_origin = ("Right", "Anterior", "Inferior") - mesh_source_space = bg.AnatomicalSpace( - mesh_source_origin, brain_template.shape - ) - print(brain_template.shape) atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) - glbfile = pooch.retrieve(MESH_URL, known_hash=None, progressbar=True) - gltf = GLTF2.load(glbfile) - - transformation_matrix = np.array([[0, 0, -1], [1, 0, 0], [0, -1, 0]]) - displacement = np.array([11.150002, 14.350002, 0]) - - for node in gltf.nodes: - # print(node) - # gltf stores meshes/nodes in alphabetical order of region name! - # given that the gtlf meshes id don't match the region ids, - # match the mesh names to our region names to find the correct id - for region in hierarchy: - if node.name == region["acronym"]: - mesh_id = region["id"] - break - else: - mesh_id = -1 - - # the following code tests for which meshes did not have a corresponding region in - # our hierarchy region list. - # they are: C, GLASS and SK. - # manual checking on Blender shows that: - # SK is the cuttlefish body (unnecessary) - # GLASS is the overall mesh for the brain - # C is the cartilage behind the brain (unnecessary) - - # if mesh_id == -1: - # print("error for ", node) - - mesh_index = node.mesh - print( - f"writing mesh for region {gltf.meshes[mesh_index].name}" - f" and index {mesh_index}" - ) - points, triangles = points_and_triangles_from_gltf( - gltf=gltf, mesh_index=mesh_index - ) - # figure out what number to put here - points = np.multiply(points, 40000) - mapped_points = mesh_source_space.map_points_to("pri", points) - print(mapped_points) - - for index, p in enumerate(mapped_points): - mapped_points[index] = np.add(p, displacement) - print(mapped_points) - # points need to be transformed from SRP to ASR - # see `map_points to` function in `brainglobe-space`, - # e.g. https://github.com/brainglobe/brainglobe-space?tab=readme-ov-file#the-anatomicalspace-class # noqa E501 - - write_obj(mapped_points, triangles, mesh_dir / f"{mesh_id}.obj") - - # we need to think about the points' scale (should be in microns)! - - # create meshes for regions that don't have a premade mesh, e.g. the root? - # in a separate loop - + # create meshes_dict meshes_dict = dict() structures_with_mesh = [] From 64c1fd3173a5970bef52d8dbe70cccbbf5284e61 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 16:21:59 +0000 Subject: [PATCH 27/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_generation/atlas_scripts/cuttlefish.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index aeb1427d..43cf39c6 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -3,15 +3,9 @@ import csv import glob as glob from pathlib import Path -from typing import Tuple -import brainglobe_space as bg -import numpy as np import pooch from brainglobe_utils.IO.image import load -from numpy.typing import NDArray -from pygltflib import GLTF2 -from vedo import Mesh, write from brainglobe_atlasapi import utils from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data @@ -27,7 +21,6 @@ def hex_to_rgb(hex): return rgb - def create_atlas(working_dir, resolution): ATLAS_NAME = "columbia_cuttlefish" SPECIES = "Sepia bandensis" @@ -254,12 +247,11 @@ def create_atlas(working_dir, resolution): # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') - # write meshes atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) - + # create meshes_dict meshes_dict = dict() structures_with_mesh = [] From 0b74776d53b05f89e2bd325a8a9785f0707582db Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Tue, 28 Jan 2025 18:09:42 +0000 Subject: [PATCH 28/34] Added mesh generation code to create our own meshes, rather than using the Cuttlebase meshes. --- .../atlas_scripts/cuttlefish.py | 58 ++++++++++++++++++- 1 file changed, 56 insertions(+), 2 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index aeb1427d..f56da7a1 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -1,5 +1,6 @@ __version__ = "0" +import time import csv import glob as glob from pathlib import Path @@ -12,9 +13,15 @@ from numpy.typing import NDArray from pygltflib import GLTF2 from vedo import Mesh, write +from rich.progress import track from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree def hex_to_rgb(hex): @@ -38,7 +45,7 @@ def create_atlas(working_dir, resolution): ORIENTATION = "srp" ATLAS_PACKAGER = "Jung Woo Kim" ADDITIONAL_METADATA = {} - + ROOT_ID = 999 HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" # noqa E501 TEMPLATE_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ABflM0-v-b4_2WthGaeYM4s/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template.nii.gz?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 ANNOTATION_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ALfSeAj81IM0v56bEeoTfUQ/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template_lobe-labels.nii.seg.nrrd?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 @@ -238,6 +245,13 @@ def create_atlas(working_dir, resolution): hierarchy[index]["acronym"] = missing_acronyms[n] n = n + 1 + # generate hierarchy tree + tree = get_structures_tree(hierarchy) + print( + f"Number of brain regions: {tree.size()}, " + f"max tree depth: {tree.depth()}" + ) + # import cuttlefish .nii file template_path = pooch.retrieve( TEMPLATE_URL, @@ -254,12 +268,52 @@ def create_atlas(working_dir, resolution): # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') + # generate binary mask for mesh creation + labels = np.unique(readdata).astype(np.int_) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) # write meshes atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) + closing_n_iters = 2 + decimate_fraction = 0.3 + smooth = True + + start = time.time() + + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + mesh_dir, + node, + tree, + labels, + readdata, + ROOT_ID, + closing_n_iters, + decimate_fraction, + smooth, + ) + ) + + print( + "Finished mesh extraction in : ", + round((time.time() - start) / 60, 2), + " minutes", + ) + # create meshes_dict meshes_dict = dict() structures_with_mesh = [] @@ -295,7 +349,7 @@ def create_atlas(working_dir, resolution): annotation_stack=readdata, structures_list=hierarchy, meshes_dict=meshes_dict, - scale_meshes=False, + scale_meshes=True, working_dir=working_dir, hemispheres_stack=None, cleanup_files=False, From d7e5e3ca47b549a3220c6e65e428a442b8e4055d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 18:11:19 +0000 Subject: [PATCH 29/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_generation/atlas_scripts/cuttlefish.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 35ff9669..5383372f 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -1,15 +1,12 @@ __version__ = "0" -import time import csv import glob as glob +import time from pathlib import Path import pooch from brainglobe_utils.IO.image import load -from numpy.typing import NDArray -from pygltflib import GLTF2 -from vedo import Mesh, write from rich.progress import track from brainglobe_atlasapi import utils @@ -264,7 +261,7 @@ def create_atlas(working_dir, resolution): # df = pd.DataFrame(hierarchy) # df.to_csv('hierarchy_test.csv') - # generate binary mask for mesh creation + # generate binary mask for mesh creation labels = np.unique(readdata).astype(np.int_) for key, node in tree.nodes.items(): if key in labels: @@ -278,11 +275,11 @@ def create_atlas(working_dir, resolution): atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) - + closing_n_iters = 2 decimate_fraction = 0.3 smooth = True - + start = time.time() for node in track( @@ -309,7 +306,7 @@ def create_atlas(working_dir, resolution): round((time.time() - start) / 60, 2), " minutes", ) - + # create meshes_dict meshes_dict = dict() structures_with_mesh = [] From 8f5f5f7bcc0e78f7f800505fe4afe7984f9c5b19 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Fri, 31 Jan 2025 18:44:02 +0000 Subject: [PATCH 30/34] Removed temporary validation script, and cleaned up comments on atlas generation script --- .../atlas_scripts/cuttlefish.py | 24 ++--- validation.py | 96 ------------------- 2 files changed, 8 insertions(+), 112 deletions(-) delete mode 100644 validation.py diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 35ff9669..f1aa0497 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -1,10 +1,12 @@ __version__ = "0" -import time import csv import glob as glob +import time from pathlib import Path +import nrrd +import numpy as np import pooch from brainglobe_utils.IO.image import load from numpy.typing import NDArray @@ -65,8 +67,6 @@ def create_atlas(working_dir, resolution): progressbar=True, ) - import nrrd - # process brain annotation file. There are a total of 70 segments. print("Processing brain annotations:") readdata, header = nrrd.read(annotation_path) @@ -92,11 +92,7 @@ def create_atlas(working_dir, resolution): int(255 * x) for x in mapping[index]["color"] ] - # print(mapping) - # df = pd.DataFrame(mapping) - # df.to_csv('mappingtest.csv') - - # create dictionaries + # create dictionaries for regions hierarchy print("Creating structure tree") with open( hierarchy_path, mode="r", encoding="utf-8-sig" @@ -162,7 +158,7 @@ def create_atlas(working_dir, resolution): "name": "root", "acronym": "root", "structure_id_path": [999], - "id": 999, + "id": ROOT_ID, } ) @@ -259,12 +255,7 @@ def create_atlas(working_dir, resolution): print("Processing brain template:") brain_template = load.load_nii(template_path, as_array=True) - # check the transformed version of the hierarchy.csv file - # print(hierarchy) - # df = pd.DataFrame(hierarchy) - # df.to_csv('hierarchy_test.csv') - - # generate binary mask for mesh creation + # generate binary mask for mesh creation labels = np.unique(readdata).astype(np.int_) for key, node in tree.nodes.items(): if key in labels: @@ -279,6 +270,7 @@ def create_atlas(working_dir, resolution): mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) + # define smoothing information for meshes closing_n_iters = 2 decimate_fraction = 0.3 smooth = True @@ -340,7 +332,7 @@ def create_atlas(working_dir, resolution): species=SPECIES, resolution=resolution, orientation=ORIENTATION, - root_id=999, + root_id=ROOT_ID, reference_stack=brain_template, annotation_stack=readdata, structures_list=hierarchy, diff --git a/validation.py b/validation.py deleted file mode 100644 index ac5e3004..00000000 --- a/validation.py +++ /dev/null @@ -1,96 +0,0 @@ -from pathlib import Path - -import napari -from brainrender_napari.napari_atlas_representation import ( - NapariAtlasRepresentation, -) -from napari.viewer import Viewer - -from brainglobe_atlasapi import BrainGlobeAtlas -from brainglobe_atlasapi.atlas_generation.validate_atlases import ( - catch_missing_mesh_files, - catch_missing_structures, - open_for_visual_check, - validate_additional_references, - validate_atlas_files, - validate_checksum, - validate_image_dimensions, -) - -all_validation_functions = [ - validate_atlas_files, - # validate_mesh_matches_image_extents, - open_for_visual_check, - validate_checksum, - validate_image_dimensions, - validate_additional_references, - catch_missing_mesh_files, - catch_missing_structures, -] - - -# adapt this code block for newly packaged atlases -brainglobe_dir = Path.home() / ".brainglobe/" -working_dir = Path.home() / "brainglobe_workingdir/" -atlas_name = "columbia_cuttlefish" -resolution = 50 -minor_version = 0 - -# nothing below this needs changing - -# make sure we have the latest packaged version in .brainglobe -# by replacing it with the working_dir version if needed -# atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" -# source_dir = working_dir / atlas_name / atlas_name_with_version -# destination_dir = brainglobe_dir / atlas_name_with_version -# destination_dir = working_dir / atlas_name_with_version -# if destination_dir.exists() and destination_dir.is_dir(): -# shutil.rmtree(destination_dir) -# assert source_dir.exists() -# if source_dir.exists(): -# shutil.copytree(source_dir, destination_dir) -# assert destination_dir.exists() - -# run validation functions on the new atlas -atlas = BrainGlobeAtlas(f"{atlas_name}_{resolution}um") -validation_results = {atlas_name: []} - -for i, validation_function in enumerate(all_validation_functions): - try: - validation_function(atlas) - validation_results[atlas_name].append( - (validation_function.__name__, None, str("Pass")) - ) - except AssertionError as error: - validation_results[atlas_name].append( - (validation_function.__name__, str(error), str("Fail")) - ) - -# print validation results and open napari for a visual check -# in napari, we should see three layers: -# - the annotation -# - the reference image (visibility turned off by default) -# - the root mesh - -failed_validations = [ - (result[0], result[1]) - for result in validation_results[atlas_name] - if result[2] == "Fail" -] -if failed_validations: - print("Failed validations:") - for failed in failed_validations: - print(failed) -else: - print(f"{atlas_name} is a valid atlas") - -viewer = Viewer() -viewer.dims.ndisplay = 3 -napari_atlas = NapariAtlasRepresentation(atlas, viewer) -napari_atlas.add_structure_to_viewer("AAB") -napari_atlas.add_structure_to_viewer("V") -napari_atlas.add_structure_to_viewer("Or") -napari_atlas.add_structure_to_viewer("BPCl") -napari_atlas.add_to_viewer() - -napari.run() From 7d9fac562aeb102d250c2f41db1ecbd9b16fd095 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 31 Jan 2025 18:44:53 +0000 Subject: [PATCH 31/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../atlas_generation/atlas_scripts/cuttlefish.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 214dacc6..ac448605 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -3,10 +3,9 @@ import csv import glob as glob import time -import time from pathlib import Path -import nrrd +import nrrd import numpy as np import pooch from brainglobe_utils.IO.image import load @@ -268,7 +267,7 @@ def create_atlas(working_dir, resolution): atlas_dir_name = f"{ATLAS_NAME}_{resolution[0]}um_v1.{__version__}" mesh_dir = Path(working_dir) / ATLAS_NAME / atlas_dir_name / "meshes" mesh_dir.mkdir(exist_ok=True, parents=True) - + # define smoothing information for meshes closing_n_iters = 2 decimate_fraction = 0.3 From 3051ae370c71f11cf02f9ad17f6010430f35c377 Mon Sep 17 00:00:00 2001 From: kjungwoo5 Date: Tue, 4 Feb 2025 16:22:20 +0000 Subject: [PATCH 32/34] Removed mesh url, and added code to rescale template image to uint16 --- .../atlas_generation/atlas_scripts/cuttlefish.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py index 214dacc6..59e6efb0 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py @@ -45,7 +45,6 @@ def create_atlas(working_dir, resolution): HIERARCHY_FILE_URL = "https://raw.githubusercontent.com/noisyneuron/cuttlebase-util/main/data/brain-hierarchy.csv" # noqa E501 TEMPLATE_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ABflM0-v-b4_2WthGaeYM4s/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template.nii.gz?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 ANNOTATION_URL = r"https://www.dropbox.com/scl/fo/fz8gnpt4xqduf0dnmgrat/ALfSeAj81IM0v56bEeoTfUQ/Averaged%2C%20template%20brain/2023_FINAL-Cuttlebase_warped_template_lobe-labels.nii.seg.nrrd?rlkey=eklemeh57slu7v6j1gphqup4z&dl=1" # noqa E501 - MESH_URL = r"https://www.cuttlebase.org/assets/models/cuttlefish_brain.glb" download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) @@ -252,6 +251,12 @@ def create_atlas(working_dir, resolution): # process brain template MRI file print("Processing brain template:") brain_template = load.load_nii(template_path, as_array=True) + dmin = np.min(brain_template) + dmax = np.max(brain_template) + drange = dmax - dmin + dscale = (2**16 - 1) / drange + brain_template = (brain_template - dmin) * dscale + brain_template = brain_template.astype(np.uint16) # generate binary mask for mesh creation # generate binary mask for mesh creation From 5e1bf8e2e63243a8b5c357090fc97b91f8ac042e Mon Sep 17 00:00:00 2001 From: Harry Carey Date: Wed, 5 Feb 2025 08:49:14 +0100 Subject: [PATCH 33/34] rename script to match api name --- .../atlas_scripts/{cuttlefish.py => columbia_cuttlefish.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename brainglobe_atlasapi/atlas_generation/atlas_scripts/{cuttlefish.py => columbia_cuttlefish.py} (100%) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/columbia_cuttlefish.py similarity index 100% rename from brainglobe_atlasapi/atlas_generation/atlas_scripts/cuttlefish.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/columbia_cuttlefish.py From 8cf0d381549eb65dbfae38feca69e9005214dcab Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Mon, 24 Feb 2025 15:35:06 +0000 Subject: [PATCH 34/34] symmetrise annotations works up to regions 71,72 --- .../atlas_scripts/columbia_cuttlefish.py | 29 +++++++++---------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/columbia_cuttlefish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/columbia_cuttlefish.py index 66a872ba..dc78ad3e 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/columbia_cuttlefish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/columbia_cuttlefish.py @@ -65,7 +65,9 @@ def create_atlas(working_dir, resolution): # process brain annotation file. There are a total of 70 segments. print("Processing brain annotations:") - readdata, header = nrrd.read(annotation_path) + annotations, header = nrrd.read(annotation_path) + half_lr = annotations.shape[1] // 2 + annotations[:, :half_lr, :] = np.flip(annotations[:, half_lr:, :], axis=1) # Extract annotation mapping information from nrrd headers, # to be applied to hierarchy file later. @@ -102,15 +104,9 @@ def create_atlas(working_dir, resolution): for row in cuttlefish_dict_reader: if row["hasSides"] == "Y": leftSide = dict(row) - leftSide["abbreviation"] = leftSide["abbreviation"] + "l" - leftSide["name"] = leftSide["name"] + " (left)" - - rightSide = dict(row) - rightSide["abbreviation"] = rightSide["abbreviation"] + "r" - rightSide["name"] = rightSide["name"] + " (right)" - + leftSide["abbreviation"] = leftSide["abbreviation"] + leftSide["name"] = leftSide["name"] hierarchy.append(leftSide) - hierarchy.append(rightSide) else: hierarchy.append(row) @@ -143,10 +139,11 @@ def create_atlas(working_dir, resolution): elif hierarchy[i]["acronym"] == "IB": hierarchy[i]["id"] = 72 - # remove erroneous key for the VS region - # (error due to commas being included in the 'function' column) - hierarchy[-3].pop(None) - hierarchy[-4].pop(None) + # manually fix visceral nerve region + visceral_nerves = hierarchy[55] + for s in visceral_nerves[None]: + visceral_nerves["name"] += f", {s}" + visceral_nerves.pop(None) # add the 'root' structure hierarchy.append( @@ -259,7 +256,7 @@ def create_atlas(working_dir, resolution): # generate binary mask for mesh creation # generate binary mask for mesh creation - labels = np.unique(readdata).astype(np.int_) + labels = np.unique(annotations).astype(np.int_) for key, node in tree.nodes.items(): if key in labels: is_label = True @@ -291,7 +288,7 @@ def create_atlas(working_dir, resolution): node, tree, labels, - readdata, + annotations, ROOT_ID, closing_n_iters, decimate_fraction, @@ -337,7 +334,7 @@ def create_atlas(working_dir, resolution): orientation=ORIENTATION, root_id=ROOT_ID, reference_stack=brain_template, - annotation_stack=readdata, + annotation_stack=annotations, structures_list=hierarchy, meshes_dict=meshes_dict, scale_meshes=True,