Commit 23849806 authored by Sean Fitzgibbon's avatar Sean Fitzgibbon
Browse files

Added initial support for chart registration

parent 153da78c
#!/usr/bin/env python
import os
import os.path as op
import json
from scipy.ndimage.interpolation import affine_transform
from slider.external import neurolucida
import glymur
from skimage.measure import regionprops, label
from skimage.color import rgb2gray
from skimage import filters, transform
from matplotlib import patches
import numpy as np
import matplotlib.pyplot as plt
def register_chart_to_slide(chart, slide, out, rlevel=3, boundary_key='outline'):
print(chart)
print(slide)
print(out)
# load chart
contour, cells = neurolucida.read(chart)
# load slide
jp2 = glymur.Jp2k(slide)
img = jp2.read(rlevel=rlevel)
# initial scaling based on boundng boxes
edge_crds = contour[boundary_key][:, :2] * [1, -1]
init = _init_scale(img, edge_crds, verbose=True, plots=False)
print(init)
print(
f'Rotation: {init.rotation}, Translation: {init.translation}, Scale: {init.scale}, Shear: {init.shear}')
# calculate normal line to boundary points
# TODO: move to function
xfm_edge_coords = _apply_xfm(init, edge_crds)
init_nrmls = normal(xfm_edge_coords, plot=False, distance=1, verbose=True)
edge_x, edge_y = np.round(xfm_edge_coords.T).astype(int)
nrml_x, nrml_y = init_nrmls.T
line_smpl = np.arange(-20, 20)
line_x = np.round(
edge_x[:, np.newaxis] + nrml_x[:, np.newaxis] * line_smpl
).astype(int)
line_y = np.round(
edge_y[:, np.newaxis] + nrml_y[:, np.newaxis] * line_smpl
).astype(int)
# --- refine edge coord by sampling along normal and looking for big change
# find threshold for background
image = rgb2gray(img)
threshold_value = filters.threshold_otsu(image)
print(f'threshold={threshold_value}')
# binarise foreground
# NOTE: this assumes a bright background!
labeled_foreground = (image < threshold_value).astype(int)
# calculate connected components
cc = label(labeled_foreground, background=0)
# get region properties for each cc and select the largest cc (in terms of area)
p = regionprops(cc, image)
ridx = np.argmax([p0.area for p0 in p])
p = p[ridx]
cc0 = (cc == p.label)
# plt.imshow(cc0)
line_int = cc0[line_y, line_x]
min_idx = np.argmax(np.diff(line_int, axis=-1), axis=-1)
refined_edge_coords = np.stack([
line_x[np.arange(min_idx.size), min_idx],
line_y[np.arange(min_idx.size), min_idx],
], axis=-1)
# estimate opimised affine transform
opt = transform.AffineTransform()
opt.estimate(edge_crds, refined_edge_coords)
print(opt)
print(
f'Rotation:\t{opt.rotation}\nTranslation:\t{opt.translation}\nScale:\t\t{list(opt.scale)}\nShear:\t\t{opt.shear}')
# save opt transform
if not op.exists(out):
os.makedirs(out)
np.savetxt(f'{out}/chart-to-image.xfm', opt.params)
# apply opt-xfm to contours and cells and save
contour_xfm = {k: _apply_xfm(
opt, v[:, :2] * [1, -1]).tolist() for k, v in contour.items()}
with open(f'{out}/contour.json', 'w') as fp:
json.dump(contour_xfm, fp, indent=4)
if cells.size > 0:
cells_xfm = _apply_xfm(cells[:, :2] * [1, -1])
with open(f'{out}/cells.json', 'w') as fp:
json.dump(cells_xfm, fp, indent=4)
def _img_props(img, verbose=False, plots=False):
# convert to grayscale
image = rgb2gray(img)
# find threshold for background
threshold_value = filters.threshold_otsu(image)
if verbose:
print(f'threshold={threshold_value}')
# binarise foreground
# NOTE: this assumes a bright background!
labeled_foreground = (image < threshold_value).astype(int)
# calculate connected components
cc = label(labeled_foreground, background=0)
# get region properties for each cc and select the largest cc (in terms of area)
p = regionprops(cc, image)
ridx = np.argmax([p0.area for p0 in p])
p = p[ridx]
bbox = p.bbox
centroid = (
(bbox[0] + bbox[2])/2,
(bbox[1] + bbox[3])/2,
)
# TODO: make plots work again
if plots:
plt.imshow(cc == p.label)
plt.colorbar()
rect = patches.Rectangle(
(bbox[1], bbox[0]),
bbox[3]-bbox[1],
bbox[2]-bbox[0],
linewidth=1,
edgecolor='r',
facecolor='none'
)
plt.gca().add_patch(rect)
plt.plot(centroid[1], centroid[0], 'ro')
plt.show()
return {'bbox': bbox, 'bbox_centroid': centroid, 'shape': (bbox[2]-bbox[0], bbox[3]-bbox[1])}
def _pnt_props(pnts, verbose=False, plots=False):
x = pnts[:, 0]
y = pnts[:, 1]
bbox = (
np.min(y),
np.min(x),
np.max(y),
np.max(x)
)
centroid = (
(bbox[0] + bbox[2])/2,
(bbox[1] + bbox[3])/2,
)
if verbose:
print(f'bbox={bbox}')
print(f'centroid={centroid}')
if plots:
plt.plot(x, y, 'b.')
plt.axis('equal')
plt.gca().invert_yaxis()
rect = patches.Rectangle(
(bbox[1], bbox[0]),
bbox[3]-bbox[1],
bbox[2]-bbox[0],
linewidth=1,
edgecolor='r',
facecolor='none'
)
plt.gca().add_patch(rect)
plt.plot(centroid[1], centroid[0], 'ro')
return {'bbox': bbox, 'bbox_centroid': centroid, 'shape': (bbox[2]-bbox[0], bbox[3]-bbox[1])}
def _init_scale(img, crd, verbose=False, plots=False):
img_p = _img_props(img)
crd_p = _pnt_props(crd)
if verbose:
print(f'image props = {img_p}')
print(f'chart props = {crd_p}')
idx = np.array([[1, 2], [1, 0], [3, 2], [3, 0]])
src = np.array(crd_p['bbox'])[idx]
dest = np.array(img_p['bbox'])[idx]
a = transform.AffineTransform()
a.estimate(src, dest)
return a
def _apply_xfm(xfm, pnts):
return (xfm.params @ np.concatenate((pnts, np.ones((pnts.shape[0], 1))), axis=-1).T).T[:, :2]
def normal(pnts, distance=100, plot=False, verbose=False):
d_pnts = np.roll(pnts, 1, axis=0) - np.roll(pnts, -1, axis=0)
normal = np.stack([d_pnts[:, 1], -d_pnts[:, 0]], axis=-1)
normal = normal / np.linalg.norm(normal, axis=-1, keepdims=True)
return normal
# Copyright (c) 2016, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Adapted from: https://github.com/BlueBrain/NeuroM/blob/v1.0.0/neurom/io/neurolucida.py
"""Reader for Neurolucida .ASC files, v3.
reversed engineered from looking at output from Neuroludica
"""
import warnings
from io import open
import numpy as np
# WANTED_SECTIONS = {
# 'Asterix': 0,
# }
UNWANTED_SECTION_NAMES = [
'Color', 'Closed','Stereology','StereologyPropertyExtension','MBFObjectType','FillDensity', 'GUID', 'ImageCoords', 'MBFObjectType',
'Marker', 'Name', 'Resolution', 'Set', 'Description',
'Cross', 'Dot', 'DoubleCircle', 'FilledCircle', 'FilledDownTriangle',
'FilledSquare', 'FilledStar', 'FilledUpTriangle', 'FilledUpTriangle', 'Flower',
'Flower2', 'OpenCircle', 'OpenDiamond', 'OpenDownTriangle', 'OpenSquare', 'OpenStar',
'OpenUpTriangle', 'Plus', 'ShadedStar', 'Splat', 'TriStar',
]
UNWANTED_SECTIONS = {name: True for name in UNWANTED_SECTION_NAMES}
def _match_section(section, match):
'''checks whether the `type` of section is in the `match` dictionary
Works around the unknown ordering of s-expressions in each section.
For instance, the `type` is the 3-rd one in for CellBodies
("CellBody"
(Color Yellow)
(CellBody)
(Set "cell10")
)
Returns:
value associated with match[section_type], None if no match
'''
# TODO: rewrite this so it is more clear, and handles sets & dictionaries for matching
for i in range(5):
if i >= len(section):
return None
if isinstance(section[i], str) and section[i] in match:
return match[section[i]]
return None
def _get_tokens(morph_fd):
'''split a file-like into tokens: split on whitespace
Note: this also strips newlines and comments
'''
for line in morph_fd:
line = line.rstrip() # remove \r\n
line = line.split(';', 1)[0] # strip comments
squash_token = [] # quoted strings get squashed into one token
if '<(' in line: # skip spines, which exist on a single line
assert ')>' in line, 'Missing end of spine'
continue
for token in line.replace('(', ' ( ').replace(')', ' ) ').split():
if squash_token:
squash_token.append(token)
if token.endswith('"'):
token = ' '.join(squash_token)
squash_token = []
yield token
elif token.startswith('"') and not token.endswith('"'):
squash_token.append(token)
else:
yield token
def _parse_section(token_iter):
'''take a stream of tokens, and create the tree structure that is defined
by the s-expressions
'''
sexp = []
for token in token_iter:
if token == '(':
new_sexp = _parse_section(token_iter)
if not _match_section(new_sexp, UNWANTED_SECTIONS):
sexp.append(new_sexp)
elif token == ')':
return sexp
else:
sexp.append(token)
return sexp
def _parse_sections(morph_fd):
'''returns array of all the sections that exist
The format is nested lists that correspond to the s-expressions
'''
sections = []
token_iter = _get_tokens(morph_fd)
for token in token_iter:
if token == '(': # find top-level sections
section = _parse_section(token_iter)
if not _match_section(section, UNWANTED_SECTIONS):
sections.append(section)
return sections
def _flatten_subsection(subsection, _type, offset, parent):
'''Flatten a subsection from its nested version
Args:
subsection: Nested subsection as produced by _parse_section, except one level in
_type: type of section, ie: AXON, etc
parent: first element has this as it's parent
offset: position in the final array of the first element
Returns:
Generator of values corresponding to [X, Y, Z, R, TYPE, ID, PARENT_ID]
'''
for row in subsection:
# TODO: Figure out what these correspond to in neurolucida
if row in ('Low', 'Generated', 'High', ):
continue
elif isinstance(row[0], str):
if len(row) in (4, 5, ):
if len(row) == 5:
assert row[4][0] == 'S', \
'Only known usage of a fifth member is Sn, found: %s' % row[4][0]
yield (float(row[0]), float(row[1]), float(row[2]), float(row[3]) / 2.,
_type, offset, parent)
parent = offset
offset += 1
elif isinstance(row[0], list):
split_parent = offset - 1
start_offset = 0
slices = []
start = 0
for i, value in enumerate(row):
if value == '|':
slices.append(slice(start + start_offset, i))
start = i + 1
slices.append(slice(start + start_offset, len(row)))
for split_slice in slices:
for _row in _flatten_subsection(row[split_slice], _type, offset,
split_parent):
offset += 1
yield _row
def _to_data(sections):
cells = []
contour = {}
xyz = []
for section in sections:
if section[0] == 'Asterisk': # Cell
cells.append( np.asarray(section[2],dtype=np.float64) )
else:
contour[section[0].replace('"','')] = np.asarray(section[1:],dtype=np.float64)
cells = np.asarray(cells)
return contour, cells
def read(file):
'''return a dict
'''
with open(file, encoding='utf-8', errors='replace') as fd:
sections = _parse_sections(fd)
contour, cells = _to_data(sections)
return contour, cells
\ No newline at end of file
......@@ -3,6 +3,7 @@ import argparse
import sys
from slider.slide_reg import register_slide_to_slide
from slider.chart_reg import register_chart_to_slide
def add_slide_cli(subparsers):
"""
......@@ -19,7 +20,7 @@ def add_slide_cli(subparsers):
parser.add_argument("fixed", metavar="<fixed>",
help="Fixed slide", type=str)
parser.add_argument("--out", metavar="<dir>",
help="Output directory", default=None, type=str,
help="Output directory", default='./slider.reg', type=str,
required=False)
parser.add_argument("--config", metavar="<default.json>",
help="configuration file", default='default.json', type=str,
......@@ -39,10 +40,16 @@ def add_chart_cli(subparsers):
description='Register charting to 2D slide',
formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=55, width=100)
)
parser.add_argument("chart", metavar="<chart>",
help="Chart file", type=str)
parser.add_argument("slide", metavar="<slide>",
help="Fixed slide", type=str)
parser.add_argument("--out", metavar="<dir>",
help="Output directory", default='./slider.reg', type=str,
required=False)
parser.set_defaults(method='chart')
if __name__ == "__main__":
""" Main program code. """
......@@ -64,6 +71,6 @@ if __name__ == "__main__":
if method == 'slide':
register_slide_to_slide(**args)
elif method == 'chart':
pass
register_chart_to_slide(**args)
else:
raise RuntimeError(f'Unknown method: {method}')
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment