Skip to content

Commit 34bfdf3

Browse files
authored
Merge pull request #21 from MetaCell/feature/update-default-layout
Feat: new examples
2 parents d2e1642 + 78dd888 commit 34bfdf3

19 files changed

Lines changed: 688 additions & 0 deletions

new_examples/README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# New examples
2+
3+
These are for working with the cryoet_data_portal_neuroglancer package.
4+
5+
## TODO
6+
7+
Convert constants to command line arguments in the examples.

new_examples/compare_decimated.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
import logging
2+
from pathlib import Path
3+
4+
import trimesh
5+
from cryoet_data_portal_neuroglancer.precompute.instance_mesh import (
6+
scale_and_decimate_mesh,
7+
)
8+
from cryoet_data_portal_neuroglancer.io import load_glb_file
9+
from cryoet_data_portal_neuroglancer.precompute.mesh import (
10+
generate_standalone_sharded_multiresolution_mesh,
11+
generate_multilabel_sharded_multiresolution_mesh,
12+
)
13+
14+
15+
JSON_PATH = Path(
16+
r"/media/starfish/LargeSSD/data/cryoET/data/Annotations-test-run/100-proton_transporting_atp_synthase_complex-1.0.json"
17+
)
18+
19+
MESH_PATH = Path(
20+
r"/media/starfish/LargeSSD/data/cryoET/data/meshes-oriented/atpase.glb"
21+
)
22+
23+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/converted-01122021/")
24+
25+
logging.basicConfig(level=logging.INFO, force=True)
26+
scene = load_glb_file(MESH_PATH)
27+
scaled, decimated_meshes = scale_and_decimate_mesh(scene, 10, 4.5)
28+
29+
for i, mesh in enumerate(decimated_meshes):
30+
print(i, len(mesh.faces))
31+
mesh.export(OUTPUT_PATH / f"mesh_lod{i}.glb")
32+
33+
label_to_scene_dict = {k + 1: trimesh.Scene(v) for k, v in enumerate(decimated_meshes)}
34+
generate_multilabel_sharded_multiresolution_mesh(
35+
label_to_scene_dict, OUTPUT_PATH / "mesh_lod", 0
36+
)
37+
38+
# new_scene = trimesh.Scene()
39+
# for i, mesh in enumerate(decimated_meshes):
40+
# new_scene.add_geometry(mesh.copy().apply_translation([i * 20, 0, 0]))
41+
# new_scene.export(OUTPUT_PATH / "meshoutput.glb")
42+
# new_scene.show()
43+
44+
# min_lod_mesh = decimated_meshes[0]
45+
# max_lod_mesh = decimated_meshes[-1]
46+
# print(len(decimated_meshes))
47+
48+
# generate_standalone_sharded_multiresolution_mesh(
49+
# trimesh.Scene(min_lod_mesh), OUTPUT_PATH / "min_lod_mesh", 0
50+
# )
51+
# generate_standalone_sharded_multiresolution_mesh(
52+
# trimesh.Scene(max_lod_mesh), OUTPUT_PATH / "max_lod_mesh", 0
53+
# )

new_examples/convert_annotation.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
import json
2+
3+
import ndjson
4+
from typing import Any
5+
from cryoet_data_portal_neuroglancer.precompute.points import (
6+
encode_annotation,
7+
)
8+
from cryoet_data_portal_neuroglancer.state_generator import (
9+
generate_point_layer,
10+
combine_json_layers,
11+
)
12+
from pathlib import Path
13+
14+
15+
def load_data(
16+
metadata_path: Path, annotations_path: Path
17+
) -> tuple[dict[str, Any], list[dict[str, Any]]]:
18+
"""Load in the metadata (json) and annotations (ndjson) files."""
19+
with open(metadata_path, mode="r") as f:
20+
metadata = json.load(f)
21+
with open(annotations_path, mode="r") as f:
22+
annotations = ndjson.load(f)
23+
return metadata, annotations
24+
25+
26+
JSON_PATH = Path(
27+
r"/media/starfish/LargeSSD/data/cryoET/data/10000_TS_26/Annotations/sara_goetz-fatty_acid_synthase-1.0.json"
28+
)
29+
30+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/new_fatty_acid_synthase")
31+
32+
metadata, data = load_data(JSON_PATH, JSON_PATH.with_suffix(".ndjson"))
33+
34+
encode_annotation(data, metadata, OUTPUT_PATH, 1.0, shard_by_id=None)
35+
36+
SOURCE = "http://127.0.0.1:9000/new_fatty_acid_synthase/"
37+
38+
output = generate_point_layer(
39+
source=SOURCE,
40+
name="Test Points",
41+
color="#FF00FF",
42+
point_size_multiplier=1.2,
43+
is_instance_segmentation=True,
44+
)
45+
46+
layer_json = combine_json_layers([output], 1.0)
47+
48+
json.dump(layer_json, open("point_layer.json", "w"), indent=2)

new_examples/convert_mesh_seg.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import json
2+
from pathlib import Path
3+
import logging
4+
from cryoet_data_portal_neuroglancer.precompute.segmentation_mask import (
5+
encode_segmentation,
6+
)
7+
from cryoet_data_portal_neuroglancer.state_generator import (
8+
generate_segmentation_mask_layer,
9+
)
10+
11+
# Set up logging to debug level
12+
logging.basicConfig(level=logging.INFO, force=True)
13+
14+
INPUT_FILENAME = r"/media/starfish/LargeSSD/data/cryoET/data/00004_MT_ground_truth_zarr"
15+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/new_MT_converted_mesh/")
16+
SOURCE = "http://localhost:1337"
17+
18+
encode_segmentation(
19+
filename=INPUT_FILENAME,
20+
output_path=OUTPUT_PATH,
21+
resolution=(1.048, 1.048, 1.048),
22+
max_lod=2,
23+
include_mesh=True,
24+
delete_existing=True,
25+
fast_bounding_box=True,
26+
max_simplification_error_in_voxels=4,
27+
min_mesh_chunk_dim=8,
28+
)
29+
30+
layer_json = generate_segmentation_mask_layer(
31+
source=SOURCE,
32+
name="Test segmentation with mesh",
33+
scale=1.048,
34+
color="#FF0000",
35+
)
36+
json.dump(layer_json, open("segmentation_with_mesh.json", "w"), indent=2)
37+
38+
# To serve the segmentation, can run something like:
39+
from cloudvolume import CloudVolume
40+
41+
volume = CloudVolume(f"file://{OUTPUT_PATH.resolve()}")
42+
volume.viewer(port=1337)
Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
import json
2+
import logging
3+
4+
from cryoet_data_portal_neuroglancer.precompute.points import encode_annotation
5+
from cryoet_data_portal_neuroglancer.precompute.instance_mesh import (
6+
encode_oriented_mesh,
7+
)
8+
from cryoet_data_portal_neuroglancer.state_generator import (
9+
generate_oriented_point_layer,
10+
combine_json_layers,
11+
generate_segmentation_mask_layer,
12+
)
13+
from cryoet_data_portal_neuroglancer.io import load_glb_file, load_oriented_point_data
14+
from pathlib import Path
15+
from cryoet_data_portal_neuroglancer.precompute.mesh import (
16+
generate_mesh_from_lods,
17+
)
18+
19+
20+
JSON_PATH = Path(
21+
r"/media/starfish/LargeSSD/data/cryoET/data/Annotations-test-run/100-proton_transporting_atp_synthase_complex-1.0.json"
22+
)
23+
24+
MESH_PATH = Path(
25+
r"/media/starfish/LargeSSD/data/cryoET/data/meshes-oriented/atpase.glb"
26+
)
27+
28+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/converted-01122021/")
29+
30+
metadata, data = load_oriented_point_data(
31+
JSON_PATH, JSON_PATH.with_name(JSON_PATH.stem + "_orientedpoint.ndjson")
32+
)
33+
34+
encode_annotation(
35+
data, metadata, OUTPUT_PATH, 0.784 * 1e-9, shard_by_id=(0, 10), is_oriented=True
36+
)
37+
logging.basicConfig(level=logging.DEBUG, force=True)
38+
scene = load_glb_file(MESH_PATH)
39+
40+
copy_pasted_lods = encode_oriented_mesh(
41+
scene,
42+
data,
43+
max_lod=2,
44+
max_faces_for_first_lod=10e6,
45+
decimation_aggressiveness=5.5,
46+
)
47+
generate_mesh_from_lods(
48+
copy_pasted_lods,
49+
OUTPUT_PATH / "meshoutput",
50+
min_mesh_chunk_dim=2,
51+
string_label="mesh",
52+
)
53+
54+
SOURCE = "http://127.0.0.1:9000/converted-01122021/"
55+
56+
output = generate_oriented_point_layer(
57+
source=SOURCE,
58+
name="Test Oriented Points",
59+
color="#FFFFFF",
60+
point_size_multiplier=0.5,
61+
line_width=2.0,
62+
is_visible=True,
63+
is_instance_segmentation=False,
64+
scale=0.784 * 1e-9,
65+
)
66+
67+
output2 = generate_segmentation_mask_layer(
68+
source="http://localhost:1337",
69+
name="Test Mesh",
70+
color="#FF0000",
71+
scale=0.784 * 1e-9,
72+
)
73+
layer_json = combine_json_layers([output, output2], 0.784, units="nm")
74+
json.dump(layer_json, open("oriented_point_layer.json", "w"), indent=2)
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import json
2+
3+
import ndjson
4+
from typing import Any
5+
from cryoet_data_portal_neuroglancer.precompute.points import (
6+
encode_annotation,
7+
)
8+
from cryoet_data_portal_neuroglancer.state_generator import (
9+
generate_oriented_point_layer,
10+
combine_json_layers,
11+
)
12+
from pathlib import Path
13+
14+
15+
def load_data(
16+
metadata_path: Path, annotations_path: Path
17+
) -> tuple[dict[str, Any], list[dict[str, Any]]]:
18+
"""Load in the metadata (json) and annotations (ndjson) files."""
19+
with open(metadata_path, mode="r") as f:
20+
metadata = json.load(f)
21+
with open(annotations_path, mode="r") as f:
22+
annotations = ndjson.load(f)
23+
return metadata, annotations
24+
25+
26+
JSON_PATH = Path(
27+
r"/media/starfish/LargeSSD/data/cryoET/data/Annotations-test-run/100-proton_transporting_atp_synthase_complex-1.0.json"
28+
)
29+
30+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/converted-01122021/")
31+
32+
metadata, data = load_data(
33+
JSON_PATH, JSON_PATH.with_name(JSON_PATH.stem + "_orientedpoint.ndjson")
34+
)
35+
36+
encode_annotation(
37+
data, metadata, OUTPUT_PATH, 0.784, shard_by_id=(0, 10), is_oriented=True
38+
)
39+
40+
SOURCE = "http://127.0.0.1:9000/converted-01122021/"
41+
42+
output = generate_oriented_point_layer(
43+
source=SOURCE,
44+
name="Test Oriented Points",
45+
color="#FF0000",
46+
point_size_multiplier=0.5,
47+
line_width=2.0,
48+
is_visible=True,
49+
is_instance_segmentation=False,
50+
)
51+
layer_json = combine_json_layers([output], 0.784)
52+
json.dump(layer_json, open("oriented_point_layer.json", "w"), indent=2)
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
import json
2+
3+
import ndjson
4+
from typing import Any
5+
from cryoet_data_portal_neuroglancer.precompute.points import (
6+
encode_annotation,
7+
)
8+
from cryoet_data_portal_neuroglancer.state_generator import (
9+
generate_oriented_point_layer,
10+
combine_json_layers,
11+
)
12+
from pathlib import Path
13+
14+
15+
def load_data(
16+
metadata_path: Path, annotations_path: Path
17+
) -> tuple[dict[str, Any], list[dict[str, Any]]]:
18+
"""Load in the metadata (json) and annotations (ndjson) files."""
19+
with open(metadata_path, mode="r") as f:
20+
metadata = json.load(f)
21+
with open(annotations_path, mode="r") as f:
22+
annotations = ndjson.load(f)
23+
return metadata, annotations
24+
25+
26+
JSON_PATH = Path(
27+
r"/media/starfish/LargeSSD/data/cryoET/data/Annotations-10155/103-type_iv_pilus-1.0.json"
28+
)
29+
30+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/converted-10155/")
31+
32+
metadata, data = load_data(
33+
JSON_PATH, JSON_PATH.with_name(JSON_PATH.stem + "_orientedpoint.ndjson")
34+
)
35+
36+
encode_annotation(
37+
data, metadata, OUTPUT_PATH, 1.6145, shard_by_id=(0, 10), is_oriented=True
38+
)
39+
40+
SOURCE = "http://127.0.0.1:9000/converted-10155/"
41+
42+
output = generate_oriented_point_layer(
43+
source=SOURCE,
44+
name="Test Oriented Points",
45+
color="#FFFFFF",
46+
point_size_multiplier=0.5,
47+
line_width=2.0,
48+
is_visible=True,
49+
is_instance_segmentation=False,
50+
scale=(1.6145e-9, 1.6145e-9, 1.6145e-9),
51+
)
52+
layer_json = combine_json_layers([output], 1.6145, units="nm")
53+
json.dump(layer_json, open("oriented_point_layer.json", "w"), indent=2)
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
import json
2+
3+
import ndjson
4+
from typing import Any
5+
from cryoet_data_portal_neuroglancer.precompute.points import (
6+
encode_annotation,
7+
)
8+
from cryoet_data_portal_neuroglancer.state_generator import (
9+
generate_oriented_point_layer,
10+
combine_json_layers,
11+
)
12+
from pathlib import Path
13+
14+
15+
def load_data(
16+
metadata_path: Path, annotations_path: Path
17+
) -> tuple[dict[str, Any], list[dict[str, Any]]]:
18+
"""Load in the metadata (json) and annotations (ndjson) files."""
19+
with open(metadata_path, mode="r") as f:
20+
metadata = json.load(f)
21+
with open(annotations_path, mode="r") as f:
22+
annotations = ndjson.load(f)
23+
return metadata, annotations
24+
25+
26+
JSON_PATH = Path(
27+
r"/media/starfish/LargeSSD/data/cryoET/data/oriented/liang_xue-chloramphenicol_bound_70s_ribosome-1.0.json"
28+
)
29+
30+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/new_oriented_points/")
31+
32+
metadata, data = load_data(JSON_PATH, JSON_PATH.with_suffix(".ndjson"))
33+
34+
encode_annotation(
35+
data, metadata, OUTPUT_PATH, 1.0, shard_by_id=(0, 10), is_oriented=True
36+
)
37+
38+
SOURCE = "http://127.0.0.1:9000/new_oriented_points/"
39+
40+
output = generate_oriented_point_layer(
41+
source=SOURCE,
42+
name="Test Oriented Points",
43+
color="#FF0000",
44+
point_size_multiplier=0.5,
45+
line_width=2.0,
46+
is_visible=True,
47+
is_instance_segmentation=False,
48+
)
49+
print(output)
50+
51+
layer_json = combine_json_layers([output], 1.0)
52+
53+
54+
json.dump(layer_json, open("oriented_point_layer.json", "w"), indent=2)

0 commit comments

Comments
 (0)