diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 967c14d..521af38 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,13 @@ Changelog ========= +Version 0.2.1 +------------- +* The following *cell-density* sub-commands can now optionally take a ``--group-ids-config-path``: + *cell-density*, *glia-cell-densities*, *inhibitory-and-excitatory-neuron-densities*, *fit-average-densities* + + Otherwise they use the default config for the AIBS atlas. + Version 0.2.0 ------------- diff --git a/atlas_densities/app/cell_densities.py b/atlas_densities/app/cell_densities.py index 2bec3d4..019145e 100644 --- a/atlas_densities/app/cell_densities.py +++ b/atlas_densities/app/cell_densities.py @@ -54,12 +54,18 @@ common_atlas_options, log_args, set_verbose, + verbose_option, ) from atlas_commons.typing import FloatArray from voxcell import RegionMap, VoxelData # type: ignore from atlas_densities.app.utils import AD_PATH, DATA_PATH -from atlas_densities.densities import excitatory_inhibitory_splitting +from atlas_densities.densities import ( + excitatory_inhibitory_splitting, + inhibitory_neuron_densities_optimization, + refined_inhibitory_neuron_densities, + utils, +) from atlas_densities.densities.cell_counts import ( extract_inhibitory_neurons_dataframe, glia_cell_counts, @@ -72,17 +78,11 @@ ) from atlas_densities.densities.fitting import linear_fitting from atlas_densities.densities.glia_densities import compute_glia_densities -from atlas_densities.densities.inhibitory_neuron_densities_optimization import ( - create_inhibitory_neuron_densities as linprog, -) from atlas_densities.densities.inhibitory_neuron_density import compute_inhibitory_neuron_density from atlas_densities.densities.measurement_to_density import ( measurement_to_average_density, remove_non_density_measurements, ) -from atlas_densities.densities.refined_inhibitory_neuron_densities import ( - create_inhibitory_neuron_densities as keep_proportions, -) from atlas_densities.exceptions import AtlasDensitiesError EXCITATORY_SPLIT_CORTEX_ALL_TO_EXC_MTYPES = ( @@ -95,8 +95,8 @@ LINPROG_PATH = "doc/source/bbpp82_628_linprog.pdf" ALGORITHMS: Dict[str, Callable] = { - "keep-proportions": keep_proportions, - "linprog": linprog, + "keep-proportions": refined_inhibitory_neuron_densities.create_inhibitory_neuron_densities, + "linprog": inhibitory_neuron_densities_optimization.create_inhibitory_neuron_densities, } L = logging.getLogger(__name__) @@ -155,7 +155,7 @@ def _get_voxel_volume_in_mm3(voxel_data: "VoxelData") -> float: @click.group() -@click.option("-v", "--verbose", count=True) +@verbose_option def app(verbose): """Run the cell densities CLI""" set_verbose(L, verbose) @@ -177,13 +177,14 @@ def app(verbose): "A voxel value is a number of cells per mm^3", ) @click.option( - "--root-region-name", - type=str, - default="root", - help="Name of the region in the hierarchy", + "--group-ids-config-path", + type=EXISTING_FILE_PATH, + default=utils.GROUP_IDS_PATH, + help="Path to density groups ids config", + show_default=True, ) @log_args(L) -def cell_density(annotation_path, hierarchy_path, nissl_path, output_path, root_region_name): +def cell_density(annotation_path, hierarchy_path, nissl_path, output_path, group_ids_config_path): """Compute and save the overall mouse brain cell density. The input Nissl stain volume of AIBS is turned into an actual density field complying with @@ -210,13 +211,14 @@ def cell_density(annotation_path, hierarchy_path, nissl_path, output_path, root_ assert_properties([annotation, nissl]) region_map = RegionMap.load_json(hierarchy_path) + group_ids_config = utils.load_json(group_ids_config_path) overall_cell_density = compute_cell_density( region_map, annotation.raw, _get_voxel_volume_in_mm3(annotation), nissl.raw, - root_region_name=root_region_name, + group_ids_config=group_ids_config, ) nissl.with_data(overall_cell_density).save_nrrd(output_path) @@ -268,6 +270,13 @@ def cell_density(annotation_path, hierarchy_path, nissl_path, output_path, root_ help="Path to the directory where to write the output cell density nrrd files." " It will be created if it doesn't exist already.", ) +@click.option( + "--group-ids-config-path", + type=EXISTING_FILE_PATH, + default=utils.GROUP_IDS_PATH, + help="Path to density groups ids config", + show_default=True, +) @log_args(L) def glia_cell_densities( annotation_path, @@ -279,6 +288,7 @@ def glia_cell_densities( microglia_density_path, glia_proportions_path, output_dir, + group_ids_config_path, ): # pylint: disable=too-many-arguments, too-many-locals """Compute and save the glia cell densities. @@ -338,20 +348,20 @@ def glia_cell_densities( "microglia": VoxelData.load_nrrd(microglia_density_path), } - atlases = list(glia_densities.values()) - atlases += [annotation, overall_cell_density] + atlases = list(glia_densities.values()) + [annotation, overall_cell_density] L.info("Checking input files consistency ...") assert_properties(atlases) L.info("Loading hierarchy ...") region_map = RegionMap.load_json(hierarchy_path) - with open(glia_proportions_path, "r", encoding="utf-8") as file_: - glia_proportions = json.load(file_) + glia_proportions = utils.load_json(glia_proportions_path) glia_densities = { - glia_cell_type: voxel_data.raw for (glia_cell_type, voxel_data) in glia_densities.items() + glia_cell_type: voxel_data.raw for glia_cell_type, voxel_data in glia_densities.items() } + group_ids_config = utils.load_json(group_ids_config_path) + L.info("Compute volumetric glia densities: started") glia_densities = compute_glia_densities( region_map, @@ -362,6 +372,7 @@ def glia_cell_densities( overall_cell_density.raw, glia_proportions, copy=False, + group_ids_config=group_ids_config, ) if not Path(output_dir).exists(): @@ -424,10 +435,11 @@ def glia_cell_densities( " It will be created if it doesn't exist already.", ) @click.option( - "--root-region-name", - type=str, - default="root", - help="Name of the region in the hierarchy", + "--group-ids-config-path", + type=EXISTING_FILE_PATH, + default=utils.GROUP_IDS_PATH, + help="Path to density groups ids config", + show_default=True, ) @log_args(L) def inhibitory_and_excitatory_neuron_densities( @@ -438,7 +450,7 @@ def inhibitory_and_excitatory_neuron_densities( neuron_density_path, inhibitory_neuron_counts_path, output_dir, - root_region_name, + group_ids_config_path, ): # pylint: disable=too-many-arguments """Compute and save the inhibitory and excitatory neuron densities. @@ -484,6 +496,7 @@ def inhibitory_and_excitatory_neuron_densities( region_map = RegionMap.load_json(hierarchy_path) inhibitory_df = extract_inhibitory_neurons_dataframe(inhibitory_neuron_counts_path) + group_ids_config = utils.load_json(group_ids_config_path) inhibitory_neuron_density = compute_inhibitory_neuron_density( region_map, annotation.raw, @@ -492,7 +505,7 @@ def inhibitory_and_excitatory_neuron_densities( VoxelData.load_nrrd(nrn1_path).raw, neuron_density.raw, inhibitory_data=inhibitory_data(inhibitory_df), - root_region_name=root_region_name, + group_ids_config=group_ids_config, ) if not Path(output_dir).exists(): @@ -775,6 +788,13 @@ def measurements_to_average_densities( help="Path to the json file containing the fitting coefficients and standard deviations" "for each region group and each cell type.", ) +@click.option( + "--group-ids-config-path", + type=EXISTING_FILE_PATH, + default=utils.GROUP_IDS_PATH, + help="Path to density groups ids config", + show_default=True, +) @log_args(L) def fit_average_densities( hierarchy_path, @@ -786,6 +806,7 @@ def fit_average_densities( homogenous_regions_path, fitted_densities_output_path, fitting_maps_output_path, + group_ids_config_path, ): # pylint: disable=too-many-arguments, too-many-locals """ Estimate average cell densities of brain regions in `hierarchy_path` for the cell types @@ -835,6 +856,7 @@ def fit_average_densities( - some regions can have NaN density values for one or more cell types because they are not covered by the selected slices of the volumetric gene marker intensities. """ + Path(fitted_densities_output_path).parent.mkdir(parents=True, exist_ok=True) L.info("Loading annotation ...") annotation = VoxelData.load_nrrd(annotation_path) @@ -857,17 +879,14 @@ def fit_average_densities( voxel_data += list(gene_voxeldata.values()) assert_properties(voxel_data) - with open(config["realignedSlicesPath"], "r", encoding="utf-8") as file_: - slices = json.load(file_) - - with open(config["cellDensityStandardDeviationsPath"], "r", encoding="utf-8") as file_: - cell_density_stddev = json.load(file_) - cell_density_stddev = { - # Use the AIBS name attribute as key (this is a unique identifier in 1.json) - # (Ex: former key "|root|Basic cell groups and regions|Cerebrum" -> new key: "Cerebrum") - name.split("|")[-1]: stddev - for (name, stddev) in cell_density_stddev.items() - } + slices = utils.load_json(config["realignedSlicesPath"]) + cell_density_stddev = utils.load_json(config["cellDensityStandardDeviationsPath"]) + cell_density_stddev = { + # Use the AIBS name attribute as key (this is a unique identifier in 1.json) + # (Ex: former key "|root|Basic cell groups and regions|Cerebrum" -> new key: "Cerebrum") + name.split("|")[-1]: stddev + for (name, stddev) in cell_density_stddev.items() + } gene_marker_volumes = { gene: { @@ -877,6 +896,8 @@ def fit_average_densities( for (gene, gene_data) in gene_voxeldata.items() } + group_ids_config = utils.load_json(group_ids_config_path) + L.info("Loading average densities dataframe ...") average_densities_df = pd.read_csv(average_densities_path) homogenous_regions_df = pd.read_csv(homogenous_regions_path) @@ -891,6 +912,7 @@ def fit_average_densities( homogenous_regions_df, cell_density_stddev, region_name=region_name, + group_ids_config=group_ids_config, ) # Turn index into column so as to ease off the save and load operations on csv files @@ -985,14 +1007,15 @@ def inhibitory_neuron_densities( neuron_density = VoxelData.load_nrrd(neuron_density_path) if np.any(neuron_density.raw < 0.0): raise AtlasDensitiesError(f"Negative density value found in {neuron_density_path}.") + L.info("Loading hierarchy ...") - with open(hierarchy_path, "r", encoding="utf-8") as file_: - hierarchy = json.load(file_) - if "msg" in hierarchy: - L.warning("Top-most object contains 'msg'; assuming AIBS JSON layout") - if len(hierarchy["msg"]) > 1: - raise AtlasDensitiesError("Unexpected JSON layout (more than one 'msg' child)") - hierarchy = hierarchy["msg"][0] + + hierarchy = utils.load_json(hierarchy_path) + if "msg" in hierarchy: + L.warning("Top-most object contains 'msg'; assuming AIBS JSON layout") + if len(hierarchy["msg"]) > 1: + raise AtlasDensitiesError("Unexpected JSON layout (more than one 'msg' child)") + hierarchy = hierarchy["msg"][0] # Consistency check L.info("Checking consistency ...") diff --git a/atlas_densities/app/combination.py b/atlas_densities/app/combination.py index a04f907..75b4461 100644 --- a/atlas_densities/app/combination.py +++ b/atlas_densities/app/combination.py @@ -10,7 +10,13 @@ import pandas as pd import voxcell # type: ignore import yaml # type: ignore -from atlas_commons.app_utils import EXISTING_FILE_PATH, common_atlas_options, log_args, set_verbose +from atlas_commons.app_utils import ( + EXISTING_FILE_PATH, + common_atlas_options, + log_args, + set_verbose, + verbose_option, +) from atlas_densities.combination import annotations_combinator, markers_combinator @@ -18,7 +24,7 @@ @click.group() -@click.option("-v", "--verbose", count=True) +@verbose_option def app(verbose): """Run the combination CLI""" set_verbose(L, verbose) diff --git a/atlas_densities/app/data/metadata/README.txt b/atlas_densities/app/data/metadata/README.txt new file mode 100644 index 0000000..dc8d151 --- /dev/null +++ b/atlas_densities/app/data/metadata/README.txt @@ -0,0 +1,47 @@ +The groups_ids.json has the following layout: + [ + { + "name": "Cerebellum group", + "UNION": [ + { "name": "Cerebellum", "with_descendants": true }, + { "name": "arbor vitae", "with_descendants": true } + ] + }, + { + "name": "Molecular layer", + "INTERSECT": [ + "!Cerebellar group", + { "name": "@.*molecular layer", "with_descendants": true } + ] + }, + { + "name": "Rest", + "REMOVE": [ + { "name": "root", "with_descendants": true }, + { "UNION": [ "!Cerebellum group", ] } + ] + } + [....] + ] + +The config is an ordered list of stanzas; each stanza is a dictionary with a "name" key, whose value is the Group Name. +This is followed by a key with one of the following keywords, and a list of clauses: + * `UNION` keyword creates a union of the ids found by the list of clauses. + * `INTERSECT` keyword creates an intersection of the ids found by the list of clauses. + * `REMOVE` keyword removes the ids in the second clause from the those of the first. + + +A clause is formed of dictionary of the form: + {"$attribute_name": "$attribute_value", "with_descendants": true} +The attribute name is used for the atlas lookup, things like "name" or "acronym" are valid. + +Finally, one can refer to a previous stanza by preceding it with a `!`. + +The current `group_ids.json was discussed here: + https://github.com/BlueBrain/atlas-densities/pull/51#issuecomment-1748445813 + In very short, in older versions of the annotations, fibers and main regions were in separated files (ccfv2 / ccfbbp). + In the main regions, the space allocated to the fibers were annotated to Basic cell groups and regions. + For some weird reasons, the fibers do not take the entire space allocated for them. + Which means that for most of the voxels annotated to Basic cell groups and regions, they correspond to fibers. + I say most of the voxels because there are also voxels at the frontier between two main regions (e.g. Cerebellum) that were not annotated to their closest leaf region. + These voxels are gone in ccfv3. diff --git a/atlas_densities/app/data/metadata/ca1_metadata.json b/atlas_densities/app/data/metadata/ca1_metadata.json index fbbe590..2124fd1 100644 --- a/atlas_densities/app/data/metadata/ca1_metadata.json +++ b/atlas_densities/app/data/metadata/ca1_metadata.json @@ -13,3 +13,4 @@ "with_descendants": true } } + diff --git a/atlas_densities/app/data/metadata/group_ids.json b/atlas_densities/app/data/metadata/group_ids.json new file mode 100644 index 0000000..4d8d918 --- /dev/null +++ b/atlas_densities/app/data/metadata/group_ids.json @@ -0,0 +1,52 @@ +[ + { + "name": "Cerebellum group", + "UNION": [ + { "name": "Cerebellum", "with_descendants": true }, + { "name": "arbor vitae", "with_descendants": true } + ] + }, + { + "name": "Isocortex group", + "UNION": [ + { "name": "Isocortex", "with_descendants": true }, + { "name": "Entorhinal area", "with_descendants": true }, + { "name": "Piriform area", "with_descendants": true } ] + }, + { + "name": "Fiber tracts group", + "UNION": [ + { "name": "fiber tracts", "with_descendants": true }, + { "name": "grooves", "with_descendants": true }, + { "name": "ventricular systems", "with_descendants": true }, + { "name": "Basic cell groups and regions", "with_descendants": false }, + { "name": "Cerebellum", "with_descendants": false } + ] + }, + { + "name": "Purkinje layer", + "UNION": [ + { "name": "@.*Purkinje layer", "with_descendants": true } + ] + }, + { + "name": "Cerebellar cortex", + "UNION": [ + { "name": "Cerebellar cortex", "with_descendants": true } + ] + }, + { + "name": "Molecular layer", + "INTERSECT": [ + "!Cerebellar cortex", + { "name": "@.*molecular layer", "with_descendants": true } + ] + }, + { + "name": "Rest", + "REMOVE": [ + { "name": "root", "with_descendants": true }, + { "UNION": [ "!Cerebellum group", "!Isocortex group" ] } + ] + } +] diff --git a/atlas_densities/app/mtype_densities.py b/atlas_densities/app/mtype_densities.py index e45f157..5aa5dd2 100644 --- a/atlas_densities/app/mtype_densities.py +++ b/atlas_densities/app/mtype_densities.py @@ -39,6 +39,7 @@ common_atlas_options, log_args, set_verbose, + verbose_option, ) from voxcell import RegionMap, VoxelData # type: ignore @@ -66,7 +67,7 @@ @click.group() -@click.option("-v", "--verbose", count=True) +@verbose_option def app(verbose): """Run the mtype densities CLI""" set_verbose(L, verbose) diff --git a/atlas_densities/densities/cell_density.py b/atlas_densities/densities/cell_density.py index dd1ebc1..7c806f4 100644 --- a/atlas_densities/densities/cell_density.py +++ b/atlas_densities/densities/cell_density.py @@ -59,7 +59,7 @@ def compute_cell_density( annotation: AnnotationT, voxel_volume: float, nissl: FloatArray, - root_region_name: str, + group_ids_config: dict, ) -> FloatArray: """ Compute the overall cell density based on Nissl staining and cell counts from literature. @@ -100,7 +100,7 @@ def compute_cell_density( nissl = utils.normalize_intensity(nissl, annotation, threshold_scale_factor=1.0, copy=False) nissl = utils.compensate_cell_overlap(nissl, annotation, gaussian_filter_stdv=-1.0, copy=False) - group_ids = utils.get_group_ids(region_map, root_region_name=root_region_name) + group_ids = utils.get_group_ids(region_map, config=group_ids_config) region_masks = utils.get_region_masks(group_ids, annotation) fix_purkinje_layer_intensity(group_ids, annotation, region_masks, nissl) non_zero_nissl = nissl > 0 diff --git a/atlas_densities/densities/fitting.py b/atlas_densities/densities/fitting.py index 5fd5289..1fa8dc6 100644 --- a/atlas_densities/densities/fitting.py +++ b/atlas_densities/densities/fitting.py @@ -527,25 +527,32 @@ def _check_average_densities_sanity(average_densities: pd.DataFrame) -> None: ) -def _get_group_names( - region_map: "RegionMap", cleanup_rest: bool = False, root_region_name: str | None = None -) -> dict[str, set[str]]: +def _get_group_names(region_map: "RegionMap", group_ids_config: dict) -> dict[str, set[str]]: """ Get AIBS names for regions in several region groups of interest. Args: region_map: object to navigate the mouse brain regions hierarchy (instantiated from AIBS 1.json). - cleanup_rest: (Optional) If True, the name of any ascendant region of the Cerebellum and - Isocortex groups are removed from the names of the Rest group. This makes sure that - the set of names of the Rest group is closed under taking descendants. + group_ids_config: mapping of regions to their constituent ids Returns: A dictionary whose keys are region group names and whose values are sets of brain region names. """ - group_ids = utils.get_group_ids(region_map, cleanup_rest, root_region_name) + group_ids = utils.get_group_ids(region_map, group_ids_config) + + # Make the Rest group stable under taking descendants + # The name of any ascendant region of the Cerebellum and + # Isocortex groups are removed from the names of the Rest group. This makes sure that + # the set of names of the Rest group is closed under taking descendants. + isocortex_id = region_map.find("Isocortex", attr="name").pop() + cerebellum_id = region_map.find("Cerebellum", attr="name").pop() + ascendant_ids = set(region_map.get(isocortex_id, attr="id", with_ascendants=True)) | set( + region_map.get(cerebellum_id, attr="id", with_ascendants=True) + ) + group_ids["Rest"] -= ascendant_ids return { group_name: {region_map.get(id_, attr="name") for id_ in group_ids[group_name]} @@ -562,6 +569,7 @@ def linear_fitting( # pylint: disable=too-many-arguments homogenous_regions: pd.DataFrame, cell_density_stddevs: Optional[dict[str, float]] = None, region_name: str = "root", + group_ids_config: dict | None = None, ) -> pd.DataFrame: """ Estimate the average densities of every region in `region_map` using a linear fitting @@ -600,6 +608,7 @@ def linear_fitting( # pylint: disable=too-many-arguments cell_density_stddevs: dict whose keys are brain regions names and whose values are standard deviations of average cell densities of the corresponding regions. region_name: (str) name of the root region of interest + group_ids_config: mapping of regions to their constituent ids Returns: tuple (densities, fitting_coefficients) @@ -610,6 +619,7 @@ def linear_fitting( # pylint: disable=too-many-arguments fitting_coefficients: dict returned by :fun:`atlas_densities.densities.fitting.compute_fitting_coefficients`. """ + assert group_ids_config is not None L.info("Checking input data frames sanity ...") _check_average_densities_sanity(average_densities) _check_homogenous_regions_sanity(homogenous_regions) @@ -647,7 +657,7 @@ def linear_fitting( # pylint: disable=too-many-arguments L.info("Getting group names ...") # We want group region names to be stable under taking descendants - groups = _get_group_names(region_map, cleanup_rest=True, root_region_name=region_name) + groups = _get_group_names(region_map, group_ids_config) L.info("Computing fitting coefficients ...") fitting_coefficients = compute_fitting_coefficients(groups, average_intensities, densities) diff --git a/atlas_densities/densities/glia_densities.py b/atlas_densities/densities/glia_densities.py index b9d4f37..0f3686c 100644 --- a/atlas_densities/densities/glia_densities.py +++ b/atlas_densities/densities/glia_densities.py @@ -19,6 +19,7 @@ def compute_glia_cell_counts_per_voxel( # pylint: disable=too-many-arguments glia_intensity: FloatArray, cell_counts_per_voxel: FloatArray, copy: bool = True, + group_ids_config: dict | None = None, ) -> FloatArray: """ Compute the overall glia cell counts per voxel using a prescribed total glia cell count and @@ -39,6 +40,7 @@ def compute_glia_cell_counts_per_voxel( # pylint: disable=too-many-arguments The overall cell density of the mouse brain expressed in number of cells per voxel. copy: If True, the input `glia_intensity` array is copied. Otherwise it is modified in-place and returned by the function. + group_ids_config: mapping of regions to their constituent ids Returns: float array of shape (W, H, D) with non-negative entries. The overall glia cell counts per @@ -47,10 +49,13 @@ def compute_glia_cell_counts_per_voxel( # pylint: disable=too-many-arguments layer). """ - fiber_tracts_mask = np.isin(annotation, list(utils.get_fiber_tract_ids(region_map))) + assert group_ids_config is not None + fiber_tracts_mask = np.isin( + annotation, list(utils.get_fiber_tract_ids(region_map, group_ids_config)) + ) fiber_tracts_free_mask = np.isin( annotation, - list(utils.get_purkinje_layer_ids(region_map)), + list(utils.get_purkinje_layer_ids(region_map, group_ids_config)), ) return utils.constrain_cell_counts_per_voxel( @@ -72,6 +77,7 @@ def compute_glia_densities( # pylint: disable=too-many-arguments cell_density: FloatArray, glia_proportions: dict[str, str], copy: bool = False, + group_ids_config: dict | None = None, ) -> dict[str, FloatArray]: """ Compute the overall glia cell density as well as astrocyte, olgidendrocyte and microglia @@ -101,6 +107,7 @@ def compute_glia_densities( # pylint: disable=too-many-arguments which sums up to 1.0. copy: If True, the input `glia_intensities` arrays are copied. Otherwise they are modified in-place + group_ids_config: mapping of regions to their constituent ids Returns: A dict whose keys are glia cell types and whose values are float64 arrays of shape (W, H, D) @@ -119,6 +126,8 @@ def compute_glia_densities( # pylint: disable=too-many-arguments voxel volume. """ + assert group_ids_config is not None + glia_densities = glia_intensities.copy() # The algorithm constraining cell counts per voxel requires double precision for glia_type in glia_densities: @@ -142,6 +151,7 @@ def compute_glia_densities( # pylint: disable=too-many-arguments annotation, glia_densities["glia"], cell_density * voxel_volume, + group_ids_config=group_ids_config, ) placed_cells = np.zeros_like(glia_densities["glia"]) for glia_type in ["astrocyte", "oligodendrocyte"]: diff --git a/atlas_densities/densities/inhibitory_neuron_density.py b/atlas_densities/densities/inhibitory_neuron_density.py index 9b98931..5cbb71b 100644 --- a/atlas_densities/densities/inhibitory_neuron_density.py +++ b/atlas_densities/densities/inhibitory_neuron_density.py @@ -5,12 +5,7 @@ import numpy as np from atlas_commons.typing import AnnotationT, BoolArray, FloatArray -from atlas_densities.densities.utils import ( - compensate_cell_overlap, - constrain_cell_counts_per_voxel, - get_group_ids, - get_region_masks, -) +from atlas_densities.densities import utils from atlas_densities.exceptions import AtlasDensitiesError if TYPE_CHECKING: @@ -89,7 +84,7 @@ def compute_inhibitory_neuron_density( # pylint: disable=too-many-arguments neuron_density: FloatArray, inhibitory_proportion: Optional[float] = None, inhibitory_data: Optional[InhibitoryData] = None, - root_region_name: Optional[str] = None, + group_ids_config: Optional[dict] = None, ) -> FloatArray: """ Compute the inhibitory neuron density using a prescribed neuron count and the overall neuron @@ -116,7 +111,7 @@ def compute_inhibitory_neuron_density( # pylint: disable=too-many-arguments assigning the proportion of ihnibitory neurons in each group named by a key string. 'neuron_count': the total number of inhibitory neurons (float). Used only if `inhibitory_proportion` is None. - root_region_name(str): Name of the root region in the hierarchy + group_ids_config: mapping of regions to their constituent ids Returns: float64 array of shape (W, H, D) with non-negative entries. @@ -140,8 +135,9 @@ def compute_inhibitory_neuron_density( # pylint: disable=too-many-arguments "Either inhibitory_proportion or inhibitory_data should be provided" ". Both are None." ) - group_ids = get_group_ids(region_map, root_region_name=root_region_name) - inhibitory_data["region_masks"] = get_region_masks(group_ids, annotation) + assert group_ids_config is not None + group_ids = utils.get_group_ids(region_map, config=group_ids_config) + inhibitory_data["region_masks"] = utils.get_region_masks(group_ids, annotation) else: inhibitory_data = { "proportions": {"whole brain": inhibitory_proportion}, @@ -152,8 +148,8 @@ def compute_inhibitory_neuron_density( # pylint: disable=too-many-arguments } inhibitory_neuron_intensity = compute_inhibitory_neuron_intensity( - compensate_cell_overlap(gad1, annotation, gaussian_filter_stdv=1.0), - compensate_cell_overlap(nrn1, annotation, gaussian_filter_stdv=1.0), + utils.compensate_cell_overlap(gad1, annotation, gaussian_filter_stdv=1.0), + utils.compensate_cell_overlap(nrn1, annotation, gaussian_filter_stdv=1.0), inhibitory_data, ) @@ -199,7 +195,7 @@ def compute_inhibitory_neuron_density( # pylint: disable=too-many-arguments assert isinstance(inhibitory_data["neuron_count"], int) return ( - constrain_cell_counts_per_voxel( + utils.constrain_cell_counts_per_voxel( inhibitory_data["neuron_count"], inhibitory_neuron_intensity, neuron_density * voxel_volume, diff --git a/atlas_densities/densities/utils.py b/atlas_densities/densities/utils.py index f0421a9..bc6430b 100644 --- a/atlas_densities/densities/utils.py +++ b/atlas_densities/densities/utils.py @@ -1,6 +1,9 @@ """Utility functions for cell density computation.""" from __future__ import annotations +import json +from copy import deepcopy +from pathlib import Path from typing import TYPE_CHECKING from warnings import warn @@ -18,6 +21,16 @@ from voxcell import RegionMap # type: ignore +GROUP_IDS_PATH = Path(__file__).parent.parent / "app/data/metadata/group_ids.json" + + +def load_json(path): + """Load a json file and return its decoded contents""" + with open(path, "r", encoding="utf-8") as fd: + ret = json.load(fd) + return ret + + def normalize_intensity( marker_intensity: FloatArray, annotation: AnnotationT, @@ -308,35 +321,91 @@ def constrain_cell_counts_per_voxel( # pylint: disable=too-many-arguments, too- return cell_counts -def get_fiber_tract_ids(region_map: "RegionMap") -> set[int]: - """ +def get_fiber_tract_ids(region_map: "RegionMap", config: dict) -> set[int]: + """Ibid. + Args: region_map: object to navigate the mouse brain regions hierarchy + config: mapping of regions to their constituent ids """ - fiber_tracts_ids = ( - region_map.find("fiber tracts", attr="name", with_descendants=True) - | region_map.find("grooves", attr="name", with_descendants=True) - | region_map.find("ventricular systems", attr="name", with_descendants=True) - | region_map.find("Basic cell groups and regions", attr="name") - | region_map.find("Cerebellum", attr="name") - ) + fiber_tracts_ids = get_group_ids(region_map, config, _skip_check=True)["Fiber tracts group"] assert fiber_tracts_ids, "Missing ids in Fiber tracts" return fiber_tracts_ids -def get_purkinje_layer_ids(region_map: "RegionMap") -> set[int]: - """ +def get_purkinje_layer_ids(region_map: "RegionMap", config: dict) -> set[int]: + """Ibid. + Args: region_map: object to navigate the mouse brain regions hierarchy + config: mapping of regions to their constituent ids """ - purkinje_layer_ids = region_map.find("@.*Purkinje layer", attr="name", with_descendants=True) + + purkinje_layer_ids = get_group_ids(region_map, config, _skip_check=True)["Purkinje layer"] assert purkinje_layer_ids, "Missing ids in Purkinje layer" return purkinje_layer_ids -def get_group_ids( - region_map: "RegionMap", cleanup_rest: bool = False, root_region_name: str | None = None -) -> dict[str, set[int]]: +UNION = "UNION" +INTERSECT = "INTERSECT" +REMOVE = "REMOVE" + + +def _interpret_region_groups(region_map, config): + # key: + # UNION - creates union of set of ids + # INTERSECT - creates intersection of ids + # REMOVE - removes ids + OPS = ( + UNION, + INTERSECT, + REMOVE, + ) + groups = {} + + def make_query(query): + if isinstance(query, dict) and any(op in query for op in OPS): + ids = materialize(query) + elif isinstance(query, dict): + query = dict(query) + with_descendants = query.pop("with_descendants") + assert len(query) == 1 + attr, value = next(iter(query.items())) + ids = region_map.find(value, attr=attr, with_descendants=with_descendants) + elif isinstance(query, str) and query[0] == "!": + ids = set(groups[query[1:]]) + else: + raise ValueError(f"unknown query: {query}") + return ids + + def materialize(node): + assert len(node) == 1 + op, queries = next(iter(node.items())) + assert op in OPS, f"{op} is not in {OPS}" + if op == UNION: + ids = set() + for query in queries: + ids |= make_query(query) + elif op == INTERSECT: + ids = make_query(queries[0]) + for query in queries[1:]: + ids &= make_query(query) + elif op == REMOVE: + ids = make_query(queries[0]) - make_query(queries[1]) + + return ids + + config = deepcopy(config) + for node in config: + name = node.pop("name") + assert name not in groups + groups[name] = materialize(node) + + return groups + + +def get_group_ids(region_map: "RegionMap", config: dict, _skip_check=False) -> dict[str, set[int]]: + """ Get AIBS structure ids for several region groups of interest. @@ -346,52 +415,15 @@ def get_group_ids( Args: region_map: object to navigate the mouse brain regions hierarchy (instantied from AIBS 1.json). - cleanup_rest: (Optional) If True, the id of any ascendant region of the Cerebellum and - Isocortex groups are removed from the ids of the Rest group. This makes sure that - the set of ids of the Rest group is closed under taking descendants. - + config: mapping of regions to their constituent ids Returns: A dictionary whose keys are region group names and whose values are sets of structure identifiers. """ - # pylint: disable=too-many-locals - cerebellum_group_ids = region_map.find( - "Cerebellum", attr="name", with_descendants=True - ) | region_map.find("arbor vitae", attr="name", with_descendants=True) - isocortex_group_ids = ( - region_map.find("Isocortex", attr="name", with_descendants=True) - | region_map.find("Entorhinal area", attr="name", with_descendants=True) - | region_map.find("Piriform area", attr="name", with_descendants=True) - ) - purkinje_layer_ids = get_purkinje_layer_ids(region_map) - fiber_tracts_ids = get_fiber_tract_ids(region_map) - cerebellar_cortex_ids = region_map.find("Cerebellar cortex", attr="name", with_descendants=True) - molecular_layer_ids = cerebellar_cortex_ids & region_map.find( - "@.*molecular layer", attr="name", with_descendants=True - ) - rest_ids = region_map.find(root_region_name, attr="name", with_descendants=True) - assert rest_ids, f"Did not find any ids in {root_region_name}" - rest_ids -= cerebellum_group_ids | isocortex_group_ids - - if cleanup_rest: - # Make the Rest group stable under taking descendants - isocortex_id = region_map.find("Isocortex", attr="name").pop() - cerebellum_id = region_map.find("Cerebellum", attr="name").pop() - ascendant_ids = set(region_map.get(isocortex_id, attr="id", with_ascendants=True)) - ascendant_ids |= set(region_map.get(cerebellum_id, attr="id", with_ascendants=True)) - rest_ids -= ascendant_ids - - ret = { - "Cerebellum group": cerebellum_group_ids, - "Isocortex group": isocortex_group_ids, - "Fiber tracts group": fiber_tracts_ids, - "Purkinje layer": purkinje_layer_ids, - "Molecular layer": molecular_layer_ids, - "Cerebellar cortex": cerebellar_cortex_ids, - "Rest": rest_ids, - } - for name, ids in ret.items(): - assert ids, f"Missing ids in {name}" + ret = _interpret_region_groups(region_map, config) + if not _skip_check: + for name, ids in ret.items(): + assert ids, f"Missing ids in {name}" return ret diff --git a/tests/app/test_cell_densities.py b/tests/app/test_cell_densities.py index 212999e..ddc894d 100644 --- a/tests/app/test_cell_densities.py +++ b/tests/app/test_cell_densities.py @@ -34,8 +34,8 @@ get_input_data as get_measurement_to_density_input_data, ) -TEST_PATH = Path(Path(__file__).parent.parent) -DATA_PATH = Path(TEST_PATH.parent, "atlas_densities", "app", "data") +TEST_PATH = Path(__file__).parent.parent +DATA_PATH = TEST_PATH.parent / "atlas_densities" / "app" / "data" MEASUREMENTS_PATH = DATA_PATH / "measurements" diff --git a/tests/app/test_combination.py b/tests/app/test_combination.py index 640cc8b..eddd708 100644 --- a/tests/app/test_combination.py +++ b/tests/app/test_combination.py @@ -9,10 +9,10 @@ import atlas_densities.app.combination as tested -TEST_PATH = Path(Path(__file__).parent.parent) +TEST_PATH = Path(__file__).parent.parent -def test_combine_markers(): +def test_combine_markers(tmp_path): expected_proportions = [4.0 / 23.0, 8.0 / 23.0, 11.0 / 23.0] input_ = { # Cerebellum ids: 512, 1143 @@ -98,20 +98,23 @@ def test_combine_markers(): ) voxel_dimensions = [25] * 3 runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem() as td: for name, array in input_.items(): VoxelData(array, voxel_dimensions=voxel_dimensions).save_nrrd(f"{name}.nrrd") result = runner.invoke( tested.app, [ + "--log-output-path", + str(td), "combine-markers", "--annotation-path", "annotation.nrrd", "--hierarchy-path", - str(Path(TEST_PATH, "1.json")), + str(TEST_PATH / "1.json"), "--config", - str(Path(TEST_PATH, "markers_config.yaml")), + str(TEST_PATH / "markers_config.yaml"), ], + catch_exceptions=False, ) assert result.exit_code == 0 with open("glia_proportions.json", encoding="utf-8") as file_: diff --git a/tests/app/test_mtype_densities.py b/tests/app/test_mtype_densities.py index e9d5f98..45610ed 100644 --- a/tests/app/test_mtype_densities.py +++ b/tests/app/test_mtype_densities.py @@ -3,7 +3,6 @@ import os from copy import deepcopy from pathlib import Path -from unittest.mock import patch import numpy as np import numpy.testing as npt @@ -26,10 +25,13 @@ ) -def get_result_from_profiles_(runner): +def get_result_from_profiles(runner, td): return runner.invoke( - tested.create_from_profile, + tested.app, [ + "--log-output-path", + str(td), + "create-from-profile", "--annotation-path", "annotation.nrrd", "--hierarchy-path", @@ -46,9 +48,9 @@ def get_result_from_profiles_(runner): ) -def test_mtype_densities_from_profiles(): +def test_mtype_densities_from_profiles(tmp_path): runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(temp_dir=tmp_path) as td: create_excitatory_neuron_density().save_nrrd("excitatory_neuron_density.nrrd") create_inhibitory_neuron_density().save_nrrd("inhibitory_neuron_density.nrrd") data = create_slicer_data() @@ -68,7 +70,7 @@ def test_mtype_densities_from_profiles(): } yaml.dump(config, file_) - result = get_result_from_profiles_(runner) + result = get_result_from_profiles(runner, td) assert result.exit_code == 0 expected_cell_densities = create_expected_cell_densities() for mtype, expected_cell_density in expected_cell_densities.items(): @@ -86,15 +88,18 @@ def test_mtype_densities_from_profiles(): } yaml.dump(config, file_) - result = get_result_from_profiles_(runner) + result = get_result_from_profiles(runner, td) assert result.exit_code == 1 assert "neuron density file" in str(result.exception) -def get_result_from_probablity_map_(runner): +def get_result_from_probablity_map_(runner, td): return runner.invoke( - tested.create_from_probability_map, + tested.app, [ + "--log-output-path", + str(td), + "create-from-probability-map", "--annotation-path", "annotation.nrrd", "--hierarchy-path", @@ -135,8 +140,8 @@ def save_input_data_to_file(self): with open("hierarchy.json", "w", encoding="utf-8") as file: json.dump(self.data["hierarchy"], file) - self.data["probability_map01"].to_csv(f"probability_map01.csv", index=True) - self.data["probability_map02"].to_csv(f"probability_map02.csv", index=True) + self.data["probability_map01"].to_csv("probability_map01.csv", index=True) + self.data["probability_map02"].to_csv("probability_map02.csv", index=True) for molecular_type, data in self.data["molecular_type_densities"].items(): VoxelData( @@ -144,11 +149,11 @@ def save_input_data_to_file(self): voxel_dimensions=self.data["annotation"].voxel_dimensions, ).save_nrrd(f"{molecular_type}.nrrd") - def test_output(self): + def test_output(self, tmp_path): runner = CliRunner() - with runner.isolated_filesystem(): + with runner.isolated_filesystem(temp_dir=tmp_path) as td: self.save_input_data_to_file() - result = get_result_from_probablity_map_(runner) + result = get_result_from_probablity_map_(runner, td) assert result.exit_code == 0 BPbAC = VoxelData.load_nrrd(str(Path("output_dir") / "BP|bAC_EXC_densities.nrrd")) @@ -394,8 +399,6 @@ def test_create_from_composition( runner = CliRunner() with runner.isolated_filesystem(temp_dir=class_tmpdir): - from voxcell import RegionMap, VoxelData # type: ignore - result = runner.invoke( tested.create_from_composition, [ diff --git a/tests/densities/test_cell_density.py b/tests/densities/test_cell_density.py index b46ba0f..0d1e93d 100644 --- a/tests/densities/test_cell_density.py +++ b/tests/densities/test_cell_density.py @@ -9,8 +9,8 @@ from voxcell import RegionMap # type: ignore import atlas_densities.densities.cell_density as tested +from atlas_densities.densities import utils from atlas_densities.densities.cell_counts import cell_counts -from atlas_densities.densities.utils import get_group_ids, get_region_masks TESTS_PATH = Path(__file__).parent.parent HIERARCHY_PATH = Path(TESTS_PATH, "1.json") @@ -18,6 +18,8 @@ def test_compute_cell_density(): region_map = RegionMap.load_json(HIERARCHY_PATH) + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) + annotation = np.arange(8000).reshape(20, 20, 20) voxel_volume = 25**3 / 1e9 @@ -30,11 +32,11 @@ def test_compute_cell_density(): annotation, voxel_volume, nissl, - root_region_name="root", + group_ids_config=group_ids_config, ) # Each group has a prescribed cell count - group_ids = get_group_ids(region_map, root_region_name="root") - region_masks = get_region_masks(group_ids, annotation) + group_ids = utils.get_group_ids(region_map, config=group_ids_config) + region_masks = utils.get_region_masks(group_ids, annotation) for group, mask in region_masks.items(): npt.assert_array_almost_equal( np.sum(cell_density[mask]) * voxel_volume, cell_counts()[group] @@ -50,6 +52,7 @@ def test_compute_cell_density(): def test_cell_density_with_soma_correction(): region_map = RegionMap.load_json(HIERARCHY_PATH) + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) annotation = np.arange(8000).reshape(20, 20, 20) voxel_volume = 25**3 / 1e9 rng = np.random.default_rng(seed=42) @@ -61,11 +64,11 @@ def test_cell_density_with_soma_correction(): annotation, voxel_volume, nissl, - root_region_name="root", + group_ids_config=group_ids_config, ) # Each group has a prescribed cell count - group_ids = get_group_ids(region_map, root_region_name="root") - region_masks = get_region_masks(group_ids, annotation) + group_ids = utils.get_group_ids(region_map, config=group_ids_config) + region_masks = utils.get_region_masks(group_ids, annotation) for group, mask in region_masks.items(): npt.assert_array_almost_equal( np.sum(cell_density[mask]) * voxel_volume, cell_counts()[group] @@ -81,13 +84,14 @@ def test_cell_density_with_soma_correction(): def test_cell_density_options(): region_map = RegionMap.load_json(HIERARCHY_PATH) + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) annotation = np.arange(8000).reshape(20, 20, 20) voxel_volume = 25**3 / 1e9 rng = np.random.default_rng(seed=42) nissl = rng.random(annotation.shape) nissl[0][0][0] = 1e-5 # the outside voxels' intensity should be low - group_ids = get_group_ids(region_map, root_region_name="root") - region_masks = get_region_masks(group_ids, annotation) + group_ids = utils.get_group_ids(region_map, config=group_ids_config) + region_masks = utils.get_region_masks(group_ids, annotation) with patch( "atlas_densities.densities.utils.compensate_cell_overlap", @@ -98,7 +102,7 @@ def test_cell_density_options(): annotation, voxel_volume, nissl, - root_region_name="root", + group_ids_config=group_ids_config, ) expected_intensity = nissl.copy() tested.fix_purkinje_layer_intensity( diff --git a/tests/densities/test_excitatory_inhibitory_splitting.py b/tests/densities/test_excitatory_inhibitory_splitting.py index b212da5..c5f220f 100644 --- a/tests/densities/test_excitatory_inhibitory_splitting.py +++ b/tests/densities/test_excitatory_inhibitory_splitting.py @@ -133,7 +133,7 @@ def test_make_excitatory_density(): res = tested.make_excitatory_density(neuron_density, inhibitory_density) assert res.shape == neuron_density.shape - assert np.sum(res.raw) == np.product(neuron_density.shape) + assert np.sum(res.raw) == np.prod(neuron_density.shape) # this would create negative densities; make sure they are clipped to zero res = tested.make_excitatory_density(inhibitory_density, neuron_density) diff --git a/tests/densities/test_fitting.py b/tests/densities/test_fitting.py index 8b7e09d..6bf319d 100644 --- a/tests/densities/test_fitting.py +++ b/tests/densities/test_fitting.py @@ -8,15 +8,20 @@ import pandas as pd import pandas.testing as pdt import pytest -from voxcell import RegionMap # type: ignore +from voxcell import RegionMap import atlas_densities.densities.fitting as tested -from atlas_densities.densities.utils import get_hierarchy_info +from atlas_densities.densities import utils from atlas_densities.exceptions import AtlasDensitiesError, AtlasDensitiesWarning TESTS_PATH = Path(__file__).parent.parent +@pytest.fixture +def group_ids_config(): + return utils.load_json(utils.GROUP_IDS_PATH) + + def test_create_dataframe_from_known_densities(): average_densities = pd.DataFrame( { @@ -44,7 +49,8 @@ def test_create_dataframe_from_known_densities(): pdt.assert_frame_equal(actual, expected) -def get_hierarchy_info_(): +@pytest.fixture +def hierarchy_info(): hierarchy = { "id": 8, "name": "Basic cell groups and regions", @@ -98,12 +104,7 @@ def get_hierarchy_info_(): ], } - return get_hierarchy_info(RegionMap.from_dict(hierarchy)) - - -@pytest.fixture -def hierarchy_info(): - return get_hierarchy_info_() + return utils.get_hierarchy_info(RegionMap.from_dict(hierarchy)) def test_fill_in_homogenous_regions(hierarchy_info): @@ -278,14 +279,13 @@ def test_linear_fitting_xy(): assert not np.isinf(actual["standard_deviation"]) -def get_fitting_input_data_(): - h = get_hierarchy_info_() +def get_fitting_input_data_(hierarchy_info): intensities = pd.DataFrame( { "gad67": [7.0 / 4.0, 1.0, 2.0, 1.0, np.nan, np.nan, np.nan, np.nan], "pv": [10.0 / 7.0, 2.0, 1.0, 2.0, np.nan, np.nan, np.nan, np.nan], }, - index=h["brain_region"], + index=hierarchy_info["brain_region"], ) densities = pd.DataFrame( { @@ -294,7 +294,7 @@ def get_fitting_input_data_(): "pv+": [30.0 / 7.0, 6.0, 3.0, 6.0, np.nan, np.nan, np.nan, np.nan], "pv+_standard_deviation": [2.0, 1.0, 3.0, 4.0, np.nan, np.nan, np.nan, np.nan], }, - index=h["brain_region"], + index=hierarchy_info["brain_region"], ) data = { "groups": {"Whole": {"Lobule II", "Declive (VI)"}, "Central lobule": {"Lobule II"}}, @@ -305,8 +305,8 @@ def get_fitting_input_data_(): return data -def test_compute_fitting_coefficients(): - data = get_fitting_input_data_() +def test_compute_fitting_coefficients(hierarchy_info): + data = get_fitting_input_data_(hierarchy_info) actual = tested.compute_fitting_coefficients( data["groups"], data["intensities"], data["densities"] @@ -321,20 +321,20 @@ def test_compute_fitting_coefficients(): assert not np.isnan(actual[group_name]["pv+"]["standard_deviation"]) -def test_compute_fitting_coefficients_exceptions(): - data = get_fitting_input_data_() +def test_compute_fitting_coefficients_exceptions(hierarchy_info): + data = get_fitting_input_data_(hierarchy_info) data["densities"].drop(index=["Central lobule"], inplace=True) with pytest.raises(AtlasDensitiesError): tested.compute_fitting_coefficients(data["groups"], data["intensities"], data["densities"]) - data = get_fitting_input_data_() + data = get_fitting_input_data_(hierarchy_info) data["densities"].drop(columns=["pv+"], inplace=True) with pytest.raises(AtlasDensitiesError): tested.compute_fitting_coefficients(data["groups"], data["intensities"], data["densities"]) - data = get_fitting_input_data_() + data = get_fitting_input_data_(hierarchy_info) data["densities"].at["Lobule II", "pv+_standard_deviation"] = np.nan with pytest.raises(AssertionError): tested.compute_fitting_coefficients(data["groups"], data["intensities"], data["densities"]) @@ -499,7 +499,7 @@ def get_fitting_input_data(): } -def test_linear_fitting(): +def test_linear_fitting(group_ids_config): data = get_fitting_input_data() with warnings.catch_warnings(record=True) as warnings_: @@ -510,6 +510,7 @@ def test_linear_fitting(): data["gene_marker_volumes"], data["average_densities"], data["homogenous_regions"], + group_ids_config=group_ids_config, ) warnings_ = [w for w in warnings_ if isinstance(w.message, AtlasDensitiesWarning)] # Three warnings for recording NaN coefficients, three warnings for using them @@ -526,7 +527,7 @@ def test_linear_fitting(): assert not np.isinf(densities.at["Hippocampal formation", "pv+_standard_deviation"]) -def test_linear_fitting_exception_average_densities(): +def test_linear_fitting_exception_average_densities(group_ids_config): data = get_fitting_input_data() data["average_densities"].at["Thalamus", "measurement_type"] = "volume" @@ -538,6 +539,7 @@ def test_linear_fitting_exception_average_densities(): data["gene_marker_volumes"], data["average_densities"], data["homogenous_regions"], + group_ids_config=group_ids_config, ) data["average_densities"].at["Thalamus", "measurement_type"] = "cell density" @@ -551,10 +553,11 @@ def test_linear_fitting_exception_average_densities(): data["gene_marker_volumes"], data["average_densities"], data["homogenous_regions"], + group_ids_config=group_ids_config, ) -def test_linear_fitting_exception_homogenous_regions(): +def test_linear_fitting_exception_homogenous_regions(group_ids_config): data = get_fitting_input_data() data["homogenous_regions"].at["Thalamus", "cell_type"] = "Inhibitory" @@ -566,4 +569,12 @@ def test_linear_fitting_exception_homogenous_regions(): data["gene_marker_volumes"], data["average_densities"], data["homogenous_regions"], + group_ids_config=group_ids_config, ) + + +def test__get_group_names(group_ids_config): + region_map = RegionMap.load_json(Path(TESTS_PATH, "1.json")) + ret = tested._get_group_names(region_map, group_ids_config=group_ids_config) + for k, length in (("Isocortex group", 409), ("Cerebellum group", 88), ("Rest", 825)): + assert length == len(ret[k]) diff --git a/tests/densities/test_glia_densities.py b/tests/densities/test_glia_densities.py index ab7319b..2361986 100644 --- a/tests/densities/test_glia_densities.py +++ b/tests/densities/test_glia_densities.py @@ -5,6 +5,7 @@ from voxcell import RegionMap import atlas_densities.densities.glia_densities as tested +from atlas_densities.densities import utils TESTS_PATH = Path(__file__).parent.parent @@ -39,8 +40,16 @@ def test_compute_glia_cell_counts_per_voxel(): annotation = np.array([[[1, 10, 10, 2, 3]]], dtype=np.uint32) cell_density = np.array([[[0.1, 0.5, 0.75, 0.1, 1.0]]], dtype=float) glia_density = np.array([[[0.15, 0.1, 0.8, 0.2, 0.8]]], dtype=float) + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) + corrected_glia_density = tested.compute_glia_cell_counts_per_voxel( - 2, region_map, annotation, glia_density, cell_density, copy=False + 2, + region_map, + annotation, + glia_density, + cell_density, + copy=False, + group_ids_config=group_ids_config, ) expected = np.array([[[0.0, 0.25, 0.75, 0.0, 1.0]]], dtype=float) npt.assert_allclose(corrected_glia_density, expected, rtol=1e-2) @@ -51,7 +60,12 @@ def test_compute_glia_cell_counts_per_voxel(): glia_density = np.array([[[0.15, 0.1, 0.2, 0.8, 0.8]]], dtype=float) glia_density_copy = glia_density.copy() corrected_glia_density = tested.compute_glia_cell_counts_per_voxel( - 2, region_map, annotation, glia_density, cell_density + 2, + region_map, + annotation, + glia_density, + cell_density, + group_ids_config=group_ids_config, ) expected = np.array([[[0.0, 1.0 / 3.0, 2.0 / 3.0, 0.0, 1.0]]], dtype=float) npt.assert_allclose(corrected_glia_density, expected, rtol=1e-2) @@ -61,6 +75,7 @@ def test_compute_glia_cell_counts_per_voxel(): def test_glia_cell_counts_per_voxel_input(): shape = (20, 20, 20) annotation = np.arange(8000).reshape(shape) + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) rng = np.random.default_rng(seed=42) @@ -79,6 +94,7 @@ def test_glia_cell_counts_per_voxel_input(): glia_density, cell_density, copy=False, + group_ids_config=group_ids_config, ) assert np.all(output_glia_density <= cell_density) @@ -117,6 +133,7 @@ def get_glia_input_data(glia_cell_count): def test_compute_glia_densities(): glia_cell_count = 25000 data = get_glia_input_data(glia_cell_count) + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) output_glia_densities = tested.compute_glia_densities( data["region_map"], data["annotation"], @@ -126,7 +143,9 @@ def test_compute_glia_densities(): data["cell_density"], data["glia_proportions"], copy=True, + group_ids_config=group_ids_config, ) + assert output_glia_densities["glia"].dtype == np.float64 assert np.all(output_glia_densities["glia"] <= data["cell_density"]) diff --git a/tests/densities/test_inhibitory_neuron_density.py b/tests/densities/test_inhibitory_neuron_density.py index 05bd351..0e1dbbd 100644 --- a/tests/densities/test_inhibitory_neuron_density.py +++ b/tests/densities/test_inhibitory_neuron_density.py @@ -10,6 +10,7 @@ from voxcell import RegionMap import atlas_densities.densities.inhibitory_neuron_density as tested +from atlas_densities.densities import utils from atlas_densities.exceptions import AtlasDensitiesError TESTS_PATH = Path(__file__).parent.parent @@ -74,15 +75,15 @@ def test_compute_inhibitory_neuron_density(): data = get_intensity_data() with patch( - "atlas_densities.densities.inhibitory_neuron_density.get_group_ids", + "atlas_densities.densities.inhibitory_neuron_density.utils.get_group_ids", return_value=group_ids, ): with patch( - "atlas_densities.densities.inhibitory_neuron_density.get_region_masks", + "atlas_densities.densities.inhibitory_neuron_density.utils.get_region_masks", return_value=data["region_masks"], ): with patch( - "atlas_densities.densities.inhibitory_neuron_density.compensate_cell_overlap", + "atlas_densities.densities.inhibitory_neuron_density.utils.compensate_cell_overlap", side_effect=[data["gad1"], data["nrn1"]], ): # have any empty region map @@ -97,7 +98,7 @@ def test_compute_inhibitory_neuron_density(): data["nrn1"], neuron_density, inhibitory_data=data["inhibitory_data"], - root_region_name="root", + group_ids_config="NOT_USED_SINCE_MOCK", ) expected = np.array([[[0.0, 0.0, 4.0, 25.0 / 62.0, 99.0 / 62.0]]]) / voxel_volume npt.assert_almost_equal(inhibitory_neuron_density, expected) @@ -105,6 +106,7 @@ def test_compute_inhibitory_neuron_density(): def test_compute_inhibitory_neuron_density_exception(): # At least one of `inhibitory_proportion` or `inhibitory_data` must be provided + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) with pytest.raises(AtlasDensitiesError) as error_: tested.compute_inhibitory_neuron_density( {}, @@ -113,7 +115,7 @@ def test_compute_inhibitory_neuron_density_exception(): np.array([[[1]]], dtype=float), np.array([[[1]]], dtype=float), np.array([[[1]]], dtype=float), - root_region_name="root", + group_ids_config=group_ids_config, ) assert "inhibitory_proportion" in str(error_) assert "inhibitory_data" in str(error_) @@ -153,6 +155,7 @@ def test_compute_inhibitory_density_large_input(): region_map = RegionMap.load_json(Path(TESTS_PATH, "1.json")) neuron_count = 30000 data = get_inhibitory_neuron_input_data(neuron_count) + group_ids_config = utils.load_json(utils.GROUP_IDS_PATH) inhibitory_neuron_density = tested.compute_inhibitory_neuron_density( region_map, @@ -162,7 +165,7 @@ def test_compute_inhibitory_density_large_input(): data["nrn1"], data["neuron_density"], inhibitory_data=data["inhibitory_data"], - root_region_name="root", + group_ids_config=group_ids_config, ) assert np.all(inhibitory_neuron_density <= data["neuron_density"]) diff --git a/tests/densities/test_mtype_densities_from_map.py b/tests/densities/test_mtype_densities_from_map.py index 8ca7cb7..ddd493c 100644 --- a/tests/densities/test_mtype_densities_from_map.py +++ b/tests/densities/test_mtype_densities_from_map.py @@ -8,7 +8,8 @@ import pytest from voxcell import RegionMap, VoxelData # type: ignore -import atlas_densities.densities.mtype_densities_from_map as tested +import atlas_densities.densities.mtype_densities_from_map.create as tested_create +import atlas_densities.densities.mtype_densities_from_map.utils as tested_utils from atlas_densities.exceptions import AtlasDensitiesError @@ -162,7 +163,7 @@ def setup_method(self, method): self.data = create_from_probability_map_data() def call_create(self, probability_maps_keys, synapse_class): - tested.create.create_from_probability_map( + tested_create.create_from_probability_map( self.data["annotation"], self.data["region_map"], self.data["molecular_type_densities"], @@ -197,7 +198,7 @@ def test_output_values(self, probability_maps_keys, synapse_class, metypes): with open(str(Path(tmpdir) / "metadata.json"), "r") as file: metadata = json.load(file) for metype in metypes: - mtype, etype = metype.split(tested.create.SEPARATOR) + mtype, etype = metype.split(tested_create.SEPARATOR) assert mtype in metadata["density_files"] assert etype in metadata["density_files"][mtype] assert synapse_class == metadata["synapse_class"] @@ -208,7 +209,7 @@ def test_empty_exception(self): self.tmpdir = tempfile.TemporaryDirectory() self.data = create_from_probability_map_data() with pytest.raises(AtlasDensitiesError): - tested.create.create_from_probability_map( + tested_create.create_from_probability_map( self.data["annotation"], self.data["region_map"], self.data["molecular_type_densities"], @@ -228,7 +229,7 @@ def teardown_method(self, method): self.tmpdir.cleanup() def create_densities(self): - tested.create.create_from_probability_map( + tested_create.create_from_probability_map( self.data["annotation"], self.data["region_map"], self.data["molecular_type_densities"], @@ -283,7 +284,7 @@ def test_index_intersection_success(self): }, ), ] - tested.utils._merge_probability_maps(probability_maps) + tested_utils._merge_probability_maps(probability_maps) def test_index_intersection_fail(self): probability_maps = [ @@ -311,7 +312,7 @@ def test_index_intersection_fail(self): ), ] with pytest.raises(ValueError): - tested.utils._merge_probability_maps(probability_maps) + tested_utils._merge_probability_maps(probability_maps) def test_merge(self): probability_maps = [ @@ -395,7 +396,7 @@ def test_merge(self): ), ] - result = tested.utils._merge_probability_maps(probability_maps) + result = tested_utils._merge_probability_maps(probability_maps) expected = self.create_probability_map( { diff --git a/tests/densities/test_utils.py b/tests/densities/test_utils.py index 7be242d..51b9ba8 100644 --- a/tests/densities/test_utils.py +++ b/tests/densities/test_utils.py @@ -101,7 +101,8 @@ def test_compensate_cell_overlap(): def test_get_group_ids(): region_map = RegionMap.load_json(Path(TESTS_PATH, "1.json")) - group_ids = tested.get_group_ids(region_map, root_region_name="root") + group_ids_config = tested.load_json(tested.GROUP_IDS_PATH) + group_ids = tested.get_group_ids(region_map, config=group_ids_config) for ids in group_ids.values(): assert len(ids) > 0 assert len(group_ids["Molecular layer"] & group_ids["Purkinje layer"]) == 0 @@ -113,11 +114,22 @@ def test_get_group_ids(): assert len(group_ids["Isocortex group"] & group_ids["Rest"]) == 0 assert len(group_ids["Cerebellum group"] & group_ids["Rest"]) == 0 assert group_ids["Cerebellar cortex"].issubset(group_ids["Cerebellum group"]) + for k, length in ( + ("Fiber tracts group", 227), + ("Isocortex group", 409), + ("Cerebellar cortex", 81), + ("Cerebellum group", 88), + ("Molecular layer", 19), + ("Purkinje layer", 19), + ("Rest", 830), + ): + assert length == len(group_ids[k]) def test_get_region_masks(): region_map = RegionMap.load_json(Path(TESTS_PATH, "1.json")) - group_ids = tested.get_group_ids(region_map, root_region_name="root") + group_ids_config = tested.load_json(tested.GROUP_IDS_PATH) + group_ids = tested.get_group_ids(region_map, config=group_ids_config) annotation_raw = np.arange(27000).reshape(30, 30, 30) region_masks = tested.get_region_masks(group_ids, annotation_raw) brain_mask = np.logical_or(