diff --git a/LICENSE b/LICENSE index d64569567..ae363a38c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,202 +1,24 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +Copyright (c) 2024, MIT Lincoln Laboratory + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/NOTICES b/NOTICES new file mode 100644 index 000000000..896603084 --- /dev/null +++ b/NOTICES @@ -0,0 +1,52 @@ +The Large-Scale Brain Mapping +repository was originally forked from Neuroglancer commit: + f543a5749c63aee71d691a306e14471400e6ded0 + +This NOTICE file is included to satisfy the APACHE 2.0 redistribution +requirements of the original Neuroglancer Project. + +MIT has applied its copyright statement to its modifications and has exercised +the source license terms to use, reproduce, and distribute these modifications +as Derivative Works. + +The following MIT authored files were added to Neuroglancer: + NOTICES + src/annotation/linestring.ts + +To complete the integration of the new functionality the following +files from the original Neuroglancer commit were additionally modified: + src/annotation/backend.ts + src/annotation/bounding_box.ts + src/annotation/ellipsoid.ts + src/annotation/frontend_source.ts + src/annotation/index.ts + src/annotation/line.ts + src/annotation/point.ts + src/annotation/renderlayer.ts + src/annotation/type_handler.ts + src/ui/annotations.ts + +Copyright (C) 2024, MASSACHUSETTS INSTITUTE OF TECHNOLOGY +SPDX-License-Identifier: BSD-2-Clause +Subject to FAR 52.227-11 – Patent Rights – Ownership by the Contractor (May 2014) + +DISTRIBUTION STATEMENT A. Approved for public release: distribution unlimited. +This material is based upon work supported by the Defense Advanced Research Projects Agency and MIT under Air Force Contract No. FA8702-15-D-0001. Any opinions, findings, conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the Defense Advanced Research Projects Agency and MIT. + +The software/firmware is provided to you on an As-Is basis + +Delivered to the U.S. Government with Unlimited Rights, as defined in DFARS Part 252.227-7013 or 7014 (Feb 2014). Notwithstanding any copyright notice, U.S. Government rights in this work are defined by DFARS 252.227-7013 or DFARS 252.227-7014 as detailed above. Use of this work other than as specifically authorized by the U.S. Government may violate any copyrights that exist in this work. + +======== +Neuroglancer is an open source project maintained by Google on GitHub + + +Copyright 2016 Google Inc. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this software except in compliance with the License. +You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/NeuroglancerLicenseCopy b/NeuroglancerLicenseCopy new file mode 100644 index 000000000..d64569567 --- /dev/null +++ b/NeuroglancerLicenseCopy @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 74dc170d8..2126e0274 100644 --- a/README.md +++ b/README.md @@ -1,62 +1,76 @@ -## Neuroglancer: Web-based volumetric data visualization +![](docs/NeuroTrALE_logo_with_background.jpg) -[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) -[![PyPI](https://img.shields.io/pypi/v/neuroglancer)](https://pypi.org/project/neuroglancer) -![Build](https://github.com/google/neuroglancer/workflows/Build/badge.svg) -[![DOI](https://zenodo.org/badge/59798355.svg)](https://zenodo.org/badge/latestdoi/59798355) +# NeuroTrALE (Neuron Tracing and Active Learning Environment) -Neuroglancer is a WebGL-based viewer for volumetric data. It is capable of displaying arbitrary (non axis-aligned) cross-sectional views of volumetric data, as well as 3-D meshes and line-segment based models (skeletons). +[![License](https://img.shields.io/badge/License-BSD%202--Clause-orange.svg)](https://opensource.org/licenses/BSD-2-Clause) -This is not an official Google product. +NeuroTrALE is a scalable active learning pipeline prototype for large-scale brain mapping that leverages high performance computing power. It enables high-throughput evaluation of algorithm results, which, after human review, are used for iterative machine learning model training. -# Examples +NeuroTrALE is a derivative work building upon Google's open-source [Neuroglancer project](https://github.com/google/neuroglancer). Neuroglancer is a WebGL-based viewer for volumetric data. It is capable of displaying arbitrary (non axis-aligned) cross-sectional views of volumetric data, as well as 3-D meshes and line-segment based models (skeletons). NeuroTrALE adds features for creating polygon, point-wise, and linestring annotations, and editing algorithm-generated segmentations. -A live demo is hosted at . (The prior link opens the viewer without any preloaded dataset.) Use the viewer links below to open the viewer preloaded with an example dataset. +To satisfy the redistribution requirements of the original, APACHE 2.0-licensed Neuroglancer Project, a copy of the source LICENSE file (NeuroglancerLicenseCopy) is included in this repository. In addition, prominent changes and copyright notices have been added to a NOTICES file clearly indicating the files authored by MIT and those modified to support integration of the new functionality. -The four-pane view consists of 3 orthogonal cross-sectional views as well as a 3-D view (with independent orientation) that displays 3-D models (if available) for the selected objects. All four views maintain the same center position. The orientation of the 3 cross-sectional views can also be adjusted, although they maintain a fixed orientation relative to each other. (Try holding the shift key and either dragging with the left mouse button or pressing an arrow key.) +NeuroTrALE also leverages [Precomputed-tif](https://github.com/chunglabmit/precomputed-tif), a precomputed data source of TIF files along with a simple HTTP server of those files, developed by the Chung Lab at MIT. -- [FlyEM Hemibrain](https://www.janelia.org/project-team/flyem/hemibrain) (8x8x8 cubic nanometer resolution). Open viewer +# Key functionality -- [FAFB-FFN1 Full Adult Fly Brain Automated Segmentation](https://fafb-ffn1.storage.googleapis.com/landing.html) (4x4x40 cubic nanometer resolution). Open viewer +NeuroTrALE has been modified and extended in order to ingest, serve, and visualize raw images and algorithm results, as well as save any changes made by users for iterative machine learning model training. Specifically, we added the following capabilities to our web-based tool. These capabilities are typically only available in a desktop-based software: -- Kasthuri et al., 2014. Mouse somatosensory cortex (6x6x30 cubic nanometer resolution). Open viewer. +- Ingest raw image volumes and algorithm outputs, including neuron/glia/centroid/axon detections +- Enhance visualization of algorithm detections overlaid on the raw imagery (along with existing visualization of imagery) +- Editing tools for polygons, points, and linestring annotation types +- Automatic saving of updated annotation data after human review +- Provide data scalability by breaking up annotation data into blocks and while keeping imagery intact +- Support JSON, CSV, and HDF5 formats +- Support serving multiple annotated datasets at once - This dataset was copied from and is made available under the [Open Data Common Attribution License](http://opendatacommons.org/licenses/by/1.0/). Paper: Kasthuri, Narayanan, et al. "Saturated reconstruction of a volume of neocortex." Cell 162.3 (2015): 648-661. +NeuroTrALE augments the concept of layers in Neuroglancer to display brain microscopy image data from multiple channels (e.g., images from multiple fluorescent stains) and algorithm results in layers, which can be turned on and off dynamically by the user. -- Janelia FlyEM FIB-25. 7-column Drosophila medulla (8x8x8 cubic nanometer resolution). Open viewer. +# Example applications - This dataset was copied from , and is made available under the [Open Data Common Attribution License](http://opendatacommons.org/licenses/by/1.0/). Paper: Takemura, Shin-ya et al. "Synaptic Circuits and Their Variations within Different Columns in the Visual System of Drosophila." Proceedings of the National Academy of Sciences of the United States of America 112.44 (2015): 13711-13716. +Hemisphere region editing and navigation: -- Example of viewing 2D microscopy (coronal section of rat brain at 325 nanometer resolution). Open viewer. (Use Ctrl+MouseWheel to zoom out) +![](docs/NeuroTrALE_screenshot_Hemisphere.png) - This image is part of: Olsen et al., 2020. Anterogradely labeled axonal projections from the posterior parietal cortex in rat [Data set]. EBRAINS. +Nuclei segmentation: + +![](docs/NeuroTrALE_screenshot_Nuclei.png) + +Axon tracing: + +![](docs/NeuroTrALE_screenshot_Axons.png) + +# Significance + +Despite the success of new technologies, integrated reconstruction of the fine subcellular architectures, molecular details, and intercellular connectivity of diverse cell types in large-scale biological systems such as the human brain remains an unmet goal in biology. The top priority of the Brain Research through Advancing Innovative Neurotechnologies (BRAIN) Initiative sponsored by the National Institutes of Health (NIH) is to map the human brain at different scales with improved speed and accuracy. While deep learning-based approaches have shown effectiveness in neuron segmentation and tracing, one major challenge is the lack of annotated data, which often requires domain knowledge. The manual process is also laborious and time consuming. In addition, there is a lack of tools that allow domain experts to review the algorithm results at scale. In the scenarios where data may be abundant but labels are scarce or expensive to obtain, active learning is a viable solution and has been used in modern machine learning. Active learning is a special case of machine learning in which a learning algorithm can interactively cue a user to label new data points with the desired outputs. Active learning aims to achieve high accuracy using as few manually labeled instances as possible, thereby minimizing the cost of obtaining labeled data. + +# Background reading + +- HPEC 2020 paper: +- ISBI 2021 paper: # Supported data sources -Neuroglancer itself is purely a client-side program, but it depends on data being accessible via HTTP in a suitable format. It is designed to easily support many different data sources, and there is existing support for the following data APIs/formats: +Like Neuroglancer, NeuroTrALE depends on data being accessible via HTTP in a suitable format. It is designed to easily support many different data sources, and there is existing support for the following data APIs/formats: -- [Neuroglancer precomputed format](src/datasource/precomputed) -- [N5](src/datasource/n5) -- [Zarr v2/v3](src/datasource/zarr) -- [Python in-memory volumes](python/README.md) (with automatic mesh generation) - BOSS - DVID - Render +- [Precomputed chunk/mesh fragments exposed over HTTP](src/neuroglancer/datasource/precomputed) - Single NIfTI files -- [Deep Zoom images](src/datasource/deepzoom) +- [Python in-memory volumes](python/README.md) (with automatic mesh generation) +- N5 # Supported browsers - Chrome >= 51 - Firefox >= 46 -- Safari >= 15.0 # Keyboard and mouse bindings For the complete set of bindings, see -[src/ui/default_input_event_bindings.ts](src/ui/default_input_event_bindings.ts), -or within Neuroglancer, press `h` or click on the button labeled `?` in the upper right corner. +[src/neuroglancer/ui/default_input_event_bindings.ts](src/neuroglancer/default_input_event_bindings.ts), +or within NeuroTrALE, press `h` or click on the button labeled `?` in the upper right corner. - Click on a layer name to toggle its visibility. @@ -64,58 +78,56 @@ or within Neuroglancer, press `h` or click on the button labeled `?` in the uppe - Hover over a segmentation layer name to see the current list of objects shown and to access the opacity sliders. -- Hover over an image layer name to access the opacity slider and the text editor for modifying the [rendering code](src/sliceview/image_layer_rendering.md). - +- Hover over an image layer name to access the opacity slider and the text editor for modifying the [rendering code](src/neuroglancer/sliceview/image_layer_rendering.md). + # Troubleshooting -- Neuroglancer doesn't appear to load properly. - - Neuroglancer requires WebGL (2.0) and the `EXT_color_buffer_float` extension. - - To troubleshoot, check the developer console, which is accessed by the keyboard shortcut `control-shift-i` in Firefox and Chrome. If there is a message regarding failure to initialize WebGL, you can take the following steps: +- NeuroTrALE doesn't appear to load properly. + NeuroTrALE requires WebGL (2.0) and the `EXT_color_buffer_float` extension. + + To troubleshoot, check the developer console, which is accessed by the keyboard shortcut `control-shift-i` in Firefox and Chrome. If there is a message regarding failure to initialize WebGL, you can take the following steps: + - Chrome - - Check `chrome://gpu` to see if your GPU is blacklisted. There may be a flag you can enable to make it work. - + + Check `chrome://gpu` to see if your GPU is blacklisted. There may be a flag you can enable to make it work. + - Firefox - Check `about:support`. There may be webgl-related properties in `about:config` that you can change to make it work. Possible settings: - + Check `about:support`. There may be webgl-related properties in `about:config` that you can change to make it work. Possible settings: - `webgl.disable-fail-if-major-performance-caveat = true` - `webgl.force-enabled = true` - `webgl.msaa-force = true` - + - Failure to access a data source. - As a security measure, browsers will in many prevent a webpage from accessing the true error code associated with a failed HTTP request. It is therefore often necessary to check the developer tools to see the true cause of any HTTP request error. + As a security measure, browsers will in many prevent a webpage from accessing the true error code associated with a failed HTTP request. It is therefore often necessary to check the developer tools to see the true cause of any HTTP request error. There are several likely causes: - + - [Cross-origin resource sharing (CORS)](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing) - - Neuroglancer relies on cross-origin requests to retrieve data from third-party servers. As a security measure, if an appropriate `Access-Control-Allow-Origin` response header is not sent by the server, browsers prevent webpages from accessing any information about the response from a cross-origin request. In order to make the data accessible to Neuroglancer, you may need to change the cross-origin request sharing (CORS) configuration of the HTTP server. - - - Accessing an `http://` resource from a Neuroglancer client hosted at an `https://` URL - - As a security measure, recent versions of Chrome and Firefox prohibit webpages hosted at `https://` URLs from issuing requests to `http://` URLs. As a workaround, you can use a Neuroglancer client hosted at a `http://` URL, e.g. the demo client running at http://neuroglancer-demo.appspot.com, or one running on localhost. Alternatively, you can start Chrome with the `--disable-web-security` flag, but that should be done only with extreme caution. (Make sure to use a separate profile, and do not access any untrusted webpages when running with that flag enabled.) - + + Like Neuroglancer, NeuroTrALE relies on cross-origin requests to retrieve data from third-party servers. As a security measure, if an appropriate `Access-Control-Allow-Origin` response header is not sent by the server, browsers prevent webpages from accessing any information about the response from a cross-origin request. In order to make the data accessible to NeuroTrALE, you may need to change the cross-origin request sharing (CORS) configuration of the HTTP server. + + - Accessing an `http://` resource from a NeuroTrALE client hosted at an `https://` URL + + As a security measure, recent versions of Chrome and Firefox prohibit webpages hosted at `https://` URLs from issuing requests to `http://` URLs. As a workaround, you can use a NeuroTrALE client hosted at a `http://` URL, or one running on localhost. Alternatively, you can start Chrome with the `--disable-web-security` flag, but that should be done only with extreme caution. (Make sure to use a separate profile, and do not access any untrusted webpages when running with that flag enabled.) + # Multi-threaded architecture -In order to maintain a responsive UI and data display even during rapid navigation, work is split between the main UI thread (referred to as the "frontend") and a separate WebWorker thread (referred to as the "backend"). This introduces some complexity due to the fact that current browsers: - -- do not support any form of _shared_ memory or standard synchronization mechanism (although they do support relatively efficient _transfers_ of typed arrays between threads); -- require that all manipulation of the DOM and the WebGL context happens on the main UI thread. +In order to maintain a responsive UI and data display even during rapid navigation, work is split between the main UI thread (referred to as the "frontend") and a separate WebWorker thread (referred to as the "backend"). This introduces some complexity due to the fact that current browsers: + - do not support any form of *shared* memory or standard synchronization mechanism (although they do support relatively efficient *transfers* of typed arrays between threads); + - require that all manipulation of the DOM and the WebGL context happens on the main UI thread. The "frontend" UI thread handles user actions and rendering, while the "backend" WebWorker thread handle all queuing, downloading, and preprocessing of data needed for rendering. # Documentation Index -- [Image Layer Rendering](src/sliceview/image_layer_rendering.md) -- [Cross-sectional view implementation architecture](src/sliceview/README.md) -- [Compressed segmentation format](src/sliceview/compressed_segmentation/README.md) -- [Data chunk management](src/chunk_manager/) -- [On-GPU hashing](src/gpu_hash/) +- [Image Layer Rendering](src/neuroglancer/sliceview/image_layer_rendering.md) +- [Cross-sectional view implementation architecture](src/neuroglancer/sliceview/README.md) +- [Compressed segmentation format](src/neuroglancer/sliceview/compressed_segmentation/README.md) +- [Data chunk management](src/neuroglancer/chunk_manager/) +- [On-GPU hashing](src/neuroglancer/gpu_hash/) # Building @@ -123,12 +135,12 @@ node.js is required to build the viewer. 1. First install NVM (node version manager) per the instructions here: -https://github.com/creationix/nvm + https://github.com/creationix/nvm 2. Install a recent version of Node.js if you haven't already done so: - `nvm install stable` - + `nvm install stable` + 3. Install the dependencies required by this project: (From within this directory) @@ -141,49 +153,22 @@ https://github.com/creationix/nvm 4. To run a local server for development purposes: `npm run dev-server` - + This will start a server on . - + 5. To run the unit test suite on Chrome: - + `npm test` 6. See [package.json](package.json) for other commands available. -# Discussion Group - -There is a Google Group/mailing list for discussion related to Neuroglancer: -. - -# Related Projects - -- [TensorStore](https://github.com/google/tensorstore) - C++ and Python library for efficiently - reading and writing multi-dimensional arrays in formats supported by Neuroglancer. -- [4Quant/neuroglancer-docker](https://github.com/4Quant/neuroglancer-docker) - Example setup for - Docker deployment of the [Neuroglancer Python integration](python/README.md). -- [FZJ-INM1-BDA/neuroglancer-scripts](https://github.com/FZJ-INM1-BDA/neuroglancer-scripts) - - Scripts for converting the [BigBrain](https://bigbrain.loris.ca) dataset to the - Neuroglancer [precomputed data format](src/datasource/precomputed), which may serve - as a useful example for converting other datasets. -- [BigArrays.jl](https://github.com/seung-lab/BigArrays.jl) - Julia interface of neuroglancer precomputed data format. -- [cloudvolume](https://github.com/seung-lab/cloud-volume) - Python interface of neuroglancer precomputed data format. -- [multiresolution-mesh-creator](https://github.com/janelia-cosem/multiresolution-mesh-creator) - Python tool for creating [multi-resolution meshes](https://github.com/google/neuroglancer/blob/master/src/datasource/precomputed/meshes.md#multi-resolution-mesh-format) from single resolution - or multiscale - meshes. -- [Igneous](https://github.com/seung-lab/igneous) - Python pipeline for scalable meshing, skeletonizing, downsampling, and managment of large 3d images focusing on Neuroglancer Precomputed format. - -# Contributing - -Want to contribute? Great! First, read [CONTRIBUTING.md](CONTRIBUTING.md). - -# License +# Disclaimer -Copyright 2016 Google Inc. +DISTRIBUTION STATEMENT A. Approved for public release: distribution unlimited. +This material is based upon work supported by the Defense Advanced Research Projects Agency and MIT under Air Force Contract No. FA8702-15-D-0001. Any opinions, findings, conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the Defense Advanced Research Projects Agency and MIT. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this software except in compliance with the License. -You may obtain a copy of the License at . +Copyright (C) 2024, MASSACHUSETTS INSTITUTE OF TECHNOLOGY + Subject to FAR 52.227-11 – Patent Rights – Ownership by the Contractor (May 2014) + SPDX-License-Identifier: BSD-2-Clause -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. +The software/firmware is provided to you on an As-Is basis \ No newline at end of file diff --git a/docs/NeuroTrALE_logo_with_background.jpg b/docs/NeuroTrALE_logo_with_background.jpg new file mode 100644 index 000000000..6dda4ea94 Binary files /dev/null and b/docs/NeuroTrALE_logo_with_background.jpg differ diff --git a/docs/NeuroTrALE_screenshot_Axons.png b/docs/NeuroTrALE_screenshot_Axons.png new file mode 100644 index 000000000..54639ea0d Binary files /dev/null and b/docs/NeuroTrALE_screenshot_Axons.png differ diff --git a/docs/NeuroTrALE_screenshot_Hemisphere.png b/docs/NeuroTrALE_screenshot_Hemisphere.png new file mode 100644 index 000000000..8b8c6c896 Binary files /dev/null and b/docs/NeuroTrALE_screenshot_Hemisphere.png differ diff --git a/docs/NeuroTrALE_screenshot_Nuclei.png b/docs/NeuroTrALE_screenshot_Nuclei.png new file mode 100644 index 000000000..42f9f700e Binary files /dev/null and b/docs/NeuroTrALE_screenshot_Nuclei.png differ diff --git a/src/annotation/backend.ts b/src/annotation/backend.ts index 779678863..1ccf594d2 100644 --- a/src/annotation/backend.ts +++ b/src/annotation/backend.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ import type { AnnotationGeometryChunkSpecification } from "#src/annotation/base.js"; @@ -104,6 +107,7 @@ export class AnnotationGeometryData implements SerializedAnnotations { typeToOffset: number[]; typeToIds: string[][]; typeToIdMaps: Map[]; + typeToPrimitiveCount: number[]; serialize(msg: any, transfers: any[]) { msg.data = this.data; diff --git a/src/annotation/bounding_box.ts b/src/annotation/bounding_box.ts index 60100918f..3fa45630b 100644 --- a/src/annotation/bounding_box.ts +++ b/src/annotation/bounding_box.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ /** @@ -28,6 +31,7 @@ import { AnnotationRenderHelper, registerAnnotationTypeRenderHandler, } from "#src/annotation/type_handler.js"; +import type { MouseSelectionState } from "#src/layer/index.js"; import { defineBoundingBoxCrossSectionShader, setBoundingBoxCrossSectionShaderViewportPlane, @@ -620,13 +624,19 @@ function snapPositionToCorner(position: Float32Array, corners: Float32Array) { registerAnnotationTypeRenderHandler( AnnotationType.AXIS_ALIGNED_BOUNDING_BOX, { + bytes: () => 6 * 4, sliceViewRenderHelper: SliceViewRenderHelper, perspectiveViewRenderHelper: PerspectiveViewRenderHelper, defineShaderNoOpSetters(builder) { addFaceNoOpSetters(builder); addBorderNoOpSetters(builder); }, - pickIdsPerInstance: PICK_IDS_PER_INSTANCE, + staticPickIdsPerInstance: PICK_IDS_PER_INSTANCE, + pickIdsPerInstance: (annotations) => Array(annotations.length).fill(PICK_IDS_PER_INSTANCE), + assignPickingInformation(mouseState:MouseSelectionState, pickIds:number[], pickedOffset:number) { + mouseState.pickedAnnotationIndex = Math.floor(pickedOffset / pickIds[0]); + mouseState.pickedOffset = pickedOffset % pickIds[0]; + }, snapPosition(position, data, offset, partIndex) { const rank = position.length; const corners = new Float32Array(data, offset, rank * 2); diff --git a/src/annotation/ellipsoid.ts b/src/annotation/ellipsoid.ts index 26d778575..502c20e66 100644 --- a/src/annotation/ellipsoid.ts +++ b/src/annotation/ellipsoid.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ /** @@ -28,6 +31,7 @@ import { AnnotationRenderHelper, registerAnnotationTypeRenderHandler, } from "#src/annotation/type_handler.js"; +import type { MouseSelectionState } from "#src/layer/index.js"; import type { PerspectiveViewRenderContext } from "#src/perspective_view/render_layer.js"; import type { SliceViewPanelRenderContext } from "#src/sliceview/renderlayer.js"; import { mat3, mat4, vec3 } from "#src/util/geom.js"; @@ -338,6 +342,7 @@ emitAnnotation(vec4(vColor.rgb, vColor.a * vClipCoefficient)); } registerAnnotationTypeRenderHandler(AnnotationType.ELLIPSOID, { + bytes: () => 6 * 4, sliceViewRenderHelper: SliceViewRenderHelper, perspectiveViewRenderHelper: PerspectiveRenderHelper, defineShaderNoOpSetters(builder) { @@ -345,7 +350,12 @@ registerAnnotationTypeRenderHandler(AnnotationType.ELLIPSOID, { void setEllipsoidFillColor(vec4 color) {} `); }, - pickIdsPerInstance: 1, + staticPickIdsPerInstance: 1, + pickIdsPerInstance: (annotations) => Array(annotations.length).fill(1), + assignPickingInformation(mouseState:MouseSelectionState, pickIds: number[], pickedOffset:number) { + mouseState.pickedAnnotationIndex = Math.floor(pickedOffset / pickIds[0]); + mouseState.pickedOffset = pickedOffset % pickIds[0]; + }, snapPosition: (/*position, annotation, partIndex*/) => { // FIXME: snap to nearest point on ellipsoid surface }, diff --git a/src/annotation/frontend_source.ts b/src/annotation/frontend_source.ts index 1d6bc9423..cb454ef39 100644 --- a/src/annotation/frontend_source.ts +++ b/src/annotation/frontend_source.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ import type { AnnotationGeometryChunkSpecification } from "#src/annotation/base.js"; @@ -29,6 +32,7 @@ import type { AnnotationId, AnnotationPropertySerializer, AnnotationPropertySpec, + AnnotationSource, AnnotationSourceSignals, SerializedAnnotations, } from "#src/annotation/index.js"; @@ -78,13 +82,16 @@ export interface AnnotationGeometryChunkSourceOptions export function computeNumPickIds( serializedAnnotations: SerializedAnnotations, + source: AnnotationSource | MultiscaleAnnotationSource, ) { + serializedAnnotations; let numPickIds = 0; - const { typeToIds } = serializedAnnotations; for (const annotationType of annotationTypes) { - numPickIds += - getAnnotationTypeRenderHandler(annotationType).pickIdsPerInstance * - typeToIds[annotationType].length; + const idMap = serializedAnnotations.typeToIdMaps[annotationType]; + const annotations: Annotation[] = []; + idMap.forEach((_, id) => annotations.push(source.getReference(id).value!)); + + numPickIds += getAnnotationTypeRenderHandler(annotationType).pickIdsPerInstance(annotations).reduce((a,b) => a + b, 0); } return numPickIds; } @@ -101,6 +108,7 @@ export class AnnotationGeometryData { typeToIds: x.typeToIds, typeToOffset: x.typeToOffset, typeToIdMaps: x.typeToIdMaps, + typeToPrimitiveCount: x.typeToPrimitiveCount, }; } freeGPUMemory(gl: GL) { diff --git a/src/annotation/index.ts b/src/annotation/index.ts index f920d1a96..f6073470f 100644 --- a/src/annotation/index.ts +++ b/src/annotation/index.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ /** @@ -77,6 +80,7 @@ export enum AnnotationType { LINE = 1, AXIS_ALIGNED_BOUNDING_BOX = 2, ELLIPSOID = 3, + LINE_STRING = 4, } export const annotationTypes = [ @@ -84,6 +88,7 @@ export const annotationTypes = [ AnnotationType.LINE, AnnotationType.AXIS_ALIGNED_BOUNDING_BOX, AnnotationType.ELLIPSOID, + AnnotationType.LINE_STRING, ]; export interface AnnotationPropertySpecBase { @@ -669,7 +674,12 @@ export interface Ellipsoid extends AnnotationBase { type: AnnotationType.ELLIPSOID; } -export type Annotation = Line | Point | AxisAlignedBoundingBox | Ellipsoid; +export interface LineString extends AnnotationBase { + points: Float32Array[]; + type: AnnotationType.LINE_STRING; +} + +export type Annotation = Line | Point | AxisAlignedBoundingBox | Ellipsoid | LineString; export interface AnnotationTypeHandler { icon: string; @@ -695,6 +705,7 @@ export interface AnnotationTypeHandler { annotation: T, callback: (vec: Float32Array, isVector: boolean) => void, ) => void; + getByteInstanceCount: (annotations: Annotation[]) => number; } function serializeFloatVector( @@ -724,6 +735,20 @@ function serializeTwoFloatVectors( return offset; } + +function serializeFloatVectorArray( + buffer: DataView, + offset: number, + isLittleEndian: boolean, + rank: number, + arr: Float32Array[] +) { + for (let i = 0; i < arr.length; ++i) { + offset = serializeFloatVector(buffer, offset, isLittleEndian, rank, arr[i]); + } + return offset; +} + function deserializeFloatVector( buffer: DataView, offset: number, @@ -814,6 +839,9 @@ export const annotationTypeHandlers: Record< callback(annotation.pointA, false); callback(annotation.pointB, false); }, + getByteInstanceCount(annotations: Line[]) { + return annotations.length; + }, }, [AnnotationType.POINT]: { icon: "⚬", @@ -858,6 +886,9 @@ export const annotationTypeHandlers: Record< visitGeometry(annotation: Point, callback) { callback(annotation.point, false); }, + getByteInstanceCount(annotations: Point[]) { + return annotations.length; + }, }, [AnnotationType.AXIS_ALIGNED_BOUNDING_BOX]: { icon: "❑", @@ -926,6 +957,9 @@ export const annotationTypeHandlers: Record< callback(annotation.pointA, false); callback(annotation.pointB, false); }, + getByteInstanceCount(annotations: AxisAlignedBoundingBox[]) { + return annotations.length; + }, }, [AnnotationType.ELLIPSOID]: { icon: "◎", @@ -994,6 +1028,50 @@ export const annotationTypeHandlers: Record< callback(annotation.center, false); callback(annotation.radii, true); }, + getByteInstanceCount(annotations: Ellipsoid[]) { + return annotations.length; + }, + }, + [AnnotationType.LINE_STRING]: { + icon: '┉', + description: 'Line String', + toJSON(annotation: LineString) { + return { + points: Array.from(annotation.points.map(point => Array.from(point))) + }; + }, + restoreState(annotation: LineString, obj: any, rank: number) { + annotation.points = verifyObjectProperty( + obj, 'points', x => x.map((xi: any) => parseFixedLengthArray(new Float32Array(rank), xi, verifyFiniteFloat))); + }, + serializedBytes(rank: number) { + return 2 * 4 * rank; + }, + serialize( + buffer: DataView, offset: number, isLittleEndian: boolean, rank: number, annotation: LineString) { + serializeFloatVectorArray(buffer, offset, isLittleEndian, rank, annotation.points); + }, + deserialize: + (buffer: DataView, offset: number, isLittleEndian: boolean, rank: number, id: string): + Line => { + const pointA = new Float32Array(rank); + const pointB = new Float32Array(rank); + deserializeTwoFloatVectors(buffer, offset, isLittleEndian, rank, pointA, pointB); + return {type: AnnotationType.LINE, pointA, pointB, id, properties: []}; + }, + // TODO How do we deserialize when we don't know the array length ahead of time? Always store the length in index zero? + // (buffer: DataView, offset: number, isLittleEndian: boolean, rank: number, id: string): + // LineString => { + // const points = [new Float32Array(rank)]; + // deserializeFloatVectorArray(buffer, offset, isLittleEndian, rank, points); + // return {type: AnnotationType.LINE_STRING, points, id, properties: []}; + // }, + visitGeometry(annotation: LineString, callback) { + callback(annotation.points[0], false); + }, + getByteInstanceCount(annotations: LineString[]) { + return annotations.reduce((a,c) => a + c.points.length / 2, 0); + }, }, }; @@ -1367,6 +1445,7 @@ export interface SerializedAnnotations { typeToIds: string[][]; typeToOffset: number[]; typeToIdMaps: Map[]; + typeToPrimitiveCount: number[]; } function serializeAnnotations( @@ -1374,14 +1453,18 @@ function serializeAnnotations( propertySerializers: AnnotationPropertySerializer[], ): SerializedAnnotations { let totalBytes = 0; + const typeToPrimitiveCount: number[] = []; const typeToOffset: number[] = []; for (const annotationType of annotationTypes) { const propertySerializer = propertySerializers[annotationType]; const serializedPropertiesBytes = propertySerializer.serializedBytes; typeToOffset[annotationType] = totalBytes; const annotations: Annotation[] = allAnnotations[annotationType]; - const count = annotations.length; - totalBytes += serializedPropertiesBytes * count; + const handler = annotationTypeHandlers[annotationType]; + const byteInstanceCount = handler.getByteInstanceCount(annotations); + + typeToPrimitiveCount[annotationType] = byteInstanceCount; + totalBytes += serializedPropertiesBytes * byteInstanceCount; } const typeToIds: string[][] = []; const typeToIdMaps: Map[] = []; @@ -1400,15 +1483,15 @@ function serializeAnnotations( const handler = annotationTypeHandlers[annotationType]; const serialize = handler.serialize; const offset = typeToOffset[annotationType]; - const geometryDataStride = propertySerializer.propertyGroupBytes[0]; + let geometryOffset = 0; for (let i = 0, count = annotations.length; i < count; ++i) { const annotation = annotations[i]; serialize( dataView, - offset + i * geometryDataStride, + offset + geometryOffset, isLittleEndian, rank, - annotation, + annotation ); serializeProperties( dataView, @@ -1418,13 +1501,16 @@ function serializeAnnotations( isLittleEndian, annotation.properties, ); + + geometryOffset += handler.getByteInstanceCount([annotation]) * handler.serializedBytes(rank); } } - return { data: new Uint8Array(data), typeToIds, typeToOffset, typeToIdMaps }; + return {data: new Uint8Array(data), typeToIds, typeToOffset, typeToIdMaps, typeToPrimitiveCount}; } export class AnnotationSerializer { - annotations: [Point[], Line[], AxisAlignedBoundingBox[], Ellipsoid[]] = [ + annotations: [Point[], Line[], AxisAlignedBoundingBox[], Ellipsoid[], LineString[]] = [ + [], [], [], [], diff --git a/src/annotation/line.ts b/src/annotation/line.ts index 053fd30e7..e7d6b44b3 100644 --- a/src/annotation/line.ts +++ b/src/annotation/line.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ /** @@ -28,6 +31,7 @@ import { AnnotationRenderHelper, registerAnnotationTypeRenderHandler, } from "#src/annotation/type_handler.js"; +import type { MouseSelectionState } from "#src/layer/index.js"; import { projectPointToLineSegment } from "#src/util/geom.js"; import { defineCircleShader, @@ -247,13 +251,19 @@ function snapPositionToEndpoint( } registerAnnotationTypeRenderHandler(AnnotationType.LINE, { + bytes: () => 6 * 4, sliceViewRenderHelper: RenderHelper, perspectiveViewRenderHelper: RenderHelper, defineShaderNoOpSetters(builder) { defineNoOpEndpointMarkerSetters(builder); defineNoOpLineSetters(builder); }, - pickIdsPerInstance: PICK_IDS_PER_INSTANCE, + staticPickIdsPerInstance: PICK_IDS_PER_INSTANCE, + pickIdsPerInstance: (annotations) => Array(annotations.length).fill(PICK_IDS_PER_INSTANCE), + assignPickingInformation(mouseState:MouseSelectionState, pickIds: number[], pickedOffset:number) { + mouseState.pickedAnnotationIndex = Math.floor(pickedOffset / pickIds[0]); + mouseState.pickedOffset = pickedOffset % pickIds[0]; + }, snapPosition(position, data, offset, partIndex) { const rank = position.length; const endpoints = new Float32Array(data, offset, rank * 2); diff --git a/src/annotation/linestring.ts b/src/annotation/linestring.ts new file mode 100644 index 000000000..00fed5ae4 --- /dev/null +++ b/src/annotation/linestring.ts @@ -0,0 +1,319 @@ +/** + * @file Support for rendering line string annotations. + */ + +import type { LineString } from "#src/annotation/index.js"; +import { AnnotationType } from "#src/annotation/index.js"; +import type { + AnnotationRenderContext, + AnnotationShaderGetter, +} from "#src/annotation/type_handler.js"; +import { + AnnotationRenderHelper, + registerAnnotationTypeRenderHandler, +} from "#src/annotation/type_handler.js"; +import type { MouseSelectionState } from "#src/layer/index.js"; +import { projectPointToLineSegment, vec3 } from "#src/util/geom.js"; +import { + defineCircleShader, + drawCircles, + initializeCircleShader, + VERTICES_PER_CIRCLE, +} from "#src/webgl/circles.js"; +import { + defineLineShader, + drawLines, + initializeLineShader, +} from "#src/webgl/lines.js"; +import type { ShaderBuilder, ShaderProgram } from "#src/webgl/shader.js"; +import { defineVectorArrayVertexShaderInput } from "#src/webgl/shader_lib.js"; +import { defineVertexId, VertexIdHelper } from "#src/webgl/vertex_id.js"; + +const FULL_OBJECT_PICK_OFFSET = 0; +const ENDPOINTS_PICK_OFFSET = FULL_OBJECT_PICK_OFFSET + 1; + +function defineNoOpControlPointMarkerSetters(builder: ShaderBuilder) { + builder.addVertexCode(` +void setControlPointMarkerSize(float startSize, float endSize) {} +void setControlPointMarkerBorderWidth(float startSize, float endSize) {} +void setControlPointMarkerColor(vec4 startColor, vec4 endColor) {} +void setControlPointMarkerBorderColor(vec4 startColor, vec4 endColor) {} +`); +} + +function defineNoOpLineSegmentSetters(builder: ShaderBuilder) { + builder.addVertexCode(` +void setLineSegmentWidth(float width) {} +void setLineSegmentColor(vec4 startColor, vec4 endColor) {} +`); +} + +class RenderHelper extends AnnotationRenderHelper { + defineShader(builder: ShaderBuilder) { + defineVertexId(builder); + // Position of endpoints in model coordinates. + const {rank} = this; + defineVectorArrayVertexShaderInput( + builder, 'float', WebGL2RenderingContext.FLOAT, /*normalized=*/ false, 'VertexPosition', + rank, 2); + } + + private vertexIdHelper = this.registerDisposer(VertexIdHelper.get(this.gl)); + + private edgeShaderGetter = + this.getDependentShader('annotation/linestring/edge', (builder: ShaderBuilder) => { + const {rank} = this; + this.defineShader(builder); + defineLineShader(builder); + builder.addVarying(`highp float[${rank}]`, 'vModelPosition'); + builder.addVertexCode(` +float ng_LineWidth; +`); + defineNoOpControlPointMarkerSetters(builder); + builder.addVertexCode(` +void setLineSegmentWidth(float width) { + ng_LineWidth = width; +} +void setLineSegmentColor(vec4 startColor, vec4 endColor) { + vColor = mix(startColor, endColor, getLineEndpointCoefficient()); +} +`); + builder.setVertexMain(` +float modelPositionA[${rank}] = getVertexPosition0(); +float modelPositionB[${rank}] = getVertexPosition1(); +for (int i = 0; i < ${rank}; ++i) { + vModelPosition[i] = mix(modelPositionA[i], modelPositionB[i], getLineEndpointCoefficient()); +} +ng_LineWidth = 1.0; +vColor = vec4(0.0, 0.0, 0.0, 0.0); +${this.invokeUserMain} +emitLine(uModelViewProjection * vec4(projectModelVectorToSubspace(modelPositionA), 1.0), + uModelViewProjection * vec4(projectModelVectorToSubspace(modelPositionB), 1.0), + ng_LineWidth); +${this.setPartIndex(builder)}; +`); + builder.setFragmentMain(` +float clipCoefficient = getSubspaceClipCoefficient(vModelPosition); +emitAnnotation(vec4(vColor.rgb, vColor.a * getLineAlpha() * + ${this.getCrossSectionFadeFactor()} * + clipCoefficient)); +`); + }); + + private endpointShaderGetter = + this.getDependentShader('annotation/linestring/endpoint', (builder: ShaderBuilder) => { + const {rank} = this; + this.defineShader(builder); + defineCircleShader(builder, this.targetIsSliceView); + builder.addVarying('highp float', 'vClipCoefficient'); + builder.addVarying('highp vec4', 'vBorderColor'); + defineNoOpLineSegmentSetters(builder); + builder.addVertexCode(` +float ng_markerDiameter; +float ng_markerBorderWidth; +int getEndpointIndex() { + return gl_VertexID / ${VERTICES_PER_CIRCLE}; +} +void setControlPointMarkerSize(float startSize, float endSize) { + ng_markerDiameter = mix(startSize, endSize, float(getEndpointIndex())); +} +void setControlPointMarkerBorderWidth(float startSize, float endSize) { + ng_markerBorderWidth = mix(startSize, endSize, float(getEndpointIndex())); +} +void setControlPointMarkerColor(vec4 startColor, vec4 endColor) { + vColor = mix(startColor, endColor, float(getEndpointIndex())); +} +void setControlPointMarkerBorderColor(vec4 startColor, vec4 endColor) { + vBorderColor = mix(startColor, endColor, float(getEndpointIndex())); +} +`); + + builder.setVertexMain(` +float modelPosition[${rank}] = getVertexPosition0(); +float modelPositionB[${rank}] = getVertexPosition1(); +for (int i = 0; i < ${rank}; ++i) { + modelPosition[i] = mix(modelPosition[i], modelPositionB[i], float(getEndpointIndex())); +} +vClipCoefficient = getSubspaceClipCoefficient(modelPosition); +vColor = vec4(0.0, 0.0, 0.0, 0.0); +vBorderColor = vec4(0.0, 0.0, 0.0, 1.0); +ng_markerDiameter = 5.0; +ng_markerBorderWidth = 1.0; +${this.invokeUserMain} +emitCircle(uModelViewProjection * vec4(projectModelVectorToSubspace(modelPosition), 1.0), ng_markerDiameter, ng_markerBorderWidth); +${this.setPartIndex(builder, 'uint(getEndpointIndex()) + 1u')}; +`); + builder.setFragmentMain(` +vec4 color = getCircleColor(vColor, vBorderColor); +color.a *= vClipCoefficient; +emitAnnotation(color); +`); + }); + + enable( + shaderGetter: AnnotationShaderGetter, context: AnnotationRenderContext, + callback: (shader: ShaderProgram) => void) { + super.enable(shaderGetter, context, shader => { + const binder = shader.vertexShaderInputBinders['VertexPosition']; + binder.enable(1); + this.gl.bindBuffer(WebGL2RenderingContext.ARRAY_BUFFER, context.buffer.buffer); + binder.bind(this.geometryDataStride, context.bufferOffset); + const {vertexIdHelper} = this; + vertexIdHelper.enable(); + callback(shader); + vertexIdHelper.disable(); + binder.disable(); + }); + } + + drawEdges(context: AnnotationRenderContext) { + this.enable(this.edgeShaderGetter, context, shader => { + initializeLineShader( + shader, context.renderContext.projectionParameters, /*featherWidthInPixels=*/ 1.0); + drawLines(shader.gl, 1, context.count); + }); + } + + drawEndpoints(context: AnnotationRenderContext) { + this.enable(this.endpointShaderGetter, context, shader => { + initializeCircleShader(shader, context.renderContext.projectionParameters, {featherWidthInPixels: 0.5}); + drawCircles(shader.gl, 1, context.count); + }); + } + + draw(context: AnnotationRenderContext) { + const basePickId = context.basePickId; + + this.drawEdges(context); + context.basePickId += context.count; + this.drawEndpoints(context); + + context.basePickId = basePickId; // Just in case the original value is needed downstream. + } +} + +function snapPositionToLine(position: Float32Array, endpoints: Float32Array) { + const rank = position.length; + projectPointToLineSegment( + position, endpoints.subarray(0, rank), endpoints.subarray(rank), position); +} + +function snapPositionToEndpoint( + position: Float32Array, endpoints: Float32Array, endpointIndex: number) { + const rank = position.length; + const startOffset = rank * endpointIndex; + for (let i = 0; i < rank; ++i) { + position[i] = endpoints[startOffset + i]; + } +} + +registerAnnotationTypeRenderHandler(AnnotationType.LINE_STRING, { + bytes: (annotation: LineString) => annotation.points.length * 3 * 4, + sliceViewRenderHelper: RenderHelper, + perspectiveViewRenderHelper: RenderHelper, + defineShaderNoOpSetters(builder) { + defineNoOpControlPointMarkerSetters(builder); + defineNoOpLineSegmentSetters(builder); + }, + staticPickIdsPerInstance: null, + pickIdsPerInstance: (annotations:LineString[]) => annotations.map(a => a.points.length * 2), + assignPickingInformation(mouseState:MouseSelectionState, pickIds:number[], pickedOffset:number) { + const pickIdCountLines = pickIds.reduce((a, b) => a + (b / 2) / 2, 0); + const typeIndex = pickedOffset >= pickIdCountLines ? pickedOffset - pickIdCountLines - 1 : pickedOffset; + let instanceIndex = 0; + let linePickIdSum = 0; + let pointPickIdSum = 0; + let partIndex = 0; + + // Given the pick ID and the annotations being rendered, determine which piece of geometry is being interacted with. + // Points are rendered after line segments, so they will have higher pick ID's; a modulo must be performed on point + // ID's using the number of line segments to calculate which control point is being picked. + for (const instancePickIds of pickIds) { + const instanceLinePickIds = (instancePickIds / 2) / 2; + const instancePointPickIds = instanceLinePickIds + 1; + + if (pickedOffset > pickIdCountLines) { // Picking an endpoint. + const pickedPointInstance = pickedOffset - pickIdCountLines; + const pickedPointOffset = pickedOffset - pickIdCountLines - 1; + + if (pointPickIdSum + instancePointPickIds > pickedPointInstance) { + partIndex = pickedPointOffset - pointPickIdSum + instanceLinePickIds; + break; + } + } + else { // Picking a line. + if (linePickIdSum + instanceLinePickIds > pickedOffset) { + partIndex = typeIndex - linePickIdSum; + break; + } + } + + linePickIdSum += instanceLinePickIds; + pointPickIdSum += instancePointPickIds - 1; + ++instanceIndex; + } + + mouseState.pickedOffset = partIndex; + mouseState.pickedAnnotationIndex = instanceIndex; + }, + snapPosition(position, data, offset, partIndex) { + const rank = position.length; + const endpoints = new Float32Array(data, offset, rank * 2); + if (partIndex === FULL_OBJECT_PICK_OFFSET) { + snapPositionToLine(position, endpoints); + } else { + snapPositionToEndpoint(position, endpoints, partIndex - ENDPOINTS_PICK_OFFSET); + } + }, + getRepresentativePoint(out, ann, partIndex) { + if (partIndex >= (ann.points.length) / 2) { // An endpoint was selected; modulo away the line segments. + partIndex = partIndex - ((ann.points.length) / 2); + } + + out.set((ann.points[partIndex * 2])); + }, + updateViaRepresentativePoint(oldAnnotation, position, partIndex) { + const baseLine = {...oldAnnotation}; + const pointIndicesToMove = []; + let pointOffset = null; + + if (partIndex < (baseLine.points.length - 1) / 2) { // Moving an edge. + const pointIndex = partIndex * 2; + pointOffset = vec3.subtract(vec3.create(), [...position], [...baseLine.points[pointIndex]]); + + // Move both ends of the segment, plus the point that begins the subsequent segment. + pointIndicesToMove.push(pointIndex, pointIndex + 1, pointIndex + 2); + + if (pointIndex != 0) { // Move the point that the preceeding segment connects to, if the segment being moved is not the first one. + pointIndicesToMove.push(pointIndex - 1); + } + if (pointIndex == baseLine.points.length - 4) { // Special case for the last segment of a line, which has a terminal degenerate line. + pointIndicesToMove.push(pointIndex + 3); + } + } + else { // Moving a point. + const pointIndex = (partIndex - ((baseLine.points.length) / 2)) * 2; + pointOffset = vec3.subtract(vec3.create(), [...position], [...baseLine.points[pointIndex]]); + + // Move the point itself. + pointIndicesToMove.push(pointIndex); + + // If it's not the first point, also move the point before it which the previous segment connects to. + if (pointIndex != 0) { + pointIndicesToMove.push(pointIndex - 1) + } + + // If it's not the last point, move the point after it which the subsequent point connects to. + if (pointIndex == baseLine.points.length - 2) { + pointIndicesToMove.push(pointIndex + 1) + } + } + + // Move all points involved in either the point or segment shift. + for (const pointIndex of pointIndicesToMove) { + baseLine.points[pointIndex] = vec3.add(vec3.create(), [...baseLine.points[pointIndex]], pointOffset); + } + + return baseLine; + } +}); diff --git a/src/annotation/point.ts b/src/annotation/point.ts index 27b12f149..ebb89a081 100644 --- a/src/annotation/point.ts +++ b/src/annotation/point.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ /** @@ -28,6 +31,7 @@ import { AnnotationRenderHelper, registerAnnotationTypeRenderHandler, } from "#src/annotation/type_handler.js"; +import type { MouseSelectionState } from "#src/layer/index.js"; import { defineCircleShader, drawCircles, @@ -214,6 +218,7 @@ emitAnnotation(vec4(color.rgb, color.a * ${this.getCrossSectionFadeFactor()})); } registerAnnotationTypeRenderHandler(AnnotationType.POINT, { + bytes: () => 3 * 4, sliceViewRenderHelper: RenderHelper, perspectiveViewRenderHelper: RenderHelper, defineShaderNoOpSetters(builder) { @@ -224,7 +229,12 @@ void setPointMarkerColor(vec4 color) {} void setPointMarkerBorderColor(vec4 color) {} `); }, - pickIdsPerInstance: 1, + staticPickIdsPerInstance: 1, + pickIdsPerInstance: (annotations) => Array(annotations.length).fill(1), + assignPickingInformation(mouseState:MouseSelectionState, pickIds: number[], pickedOffset:number) { + mouseState.pickedAnnotationIndex = Math.floor(pickedOffset / pickIds[0]); + mouseState.pickedOffset = pickedOffset % pickIds[0]; + }, snapPosition(position, data, offset) { position.set(new Float32Array(data, offset, position.length)); }, diff --git a/src/annotation/renderlayer.ts b/src/annotation/renderlayer.ts index 836fa8dd1..9e4ea5b4a 100644 --- a/src/annotation/renderlayer.ts +++ b/src/annotation/renderlayer.ts @@ -12,12 +12,16 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ import "#src/annotation/bounding_box.js"; import "#src/annotation/line.js"; import "#src/annotation/point.js"; import "#src/annotation/ellipsoid.js"; +import '#src/annotation/linestring.js'; import type { AnnotationLayerState, @@ -39,6 +43,7 @@ import { MultiscaleAnnotationSource, } from "#src/annotation/frontend_source.js"; import type { + Annotation, AnnotationBase, SerializedAnnotations, } from "#src/annotation/index.js"; @@ -374,7 +379,7 @@ export class AnnotationLayer extends RefCounted { segmentationFilter(this.segmentationStates.value), )); buffer.setData(this.serializedAnnotations.data); - this.numPickIds = computeNumPickIds(serializedAnnotations); + this.numPickIds = computeNumPickIds(serializedAnnotations, source); } } } @@ -499,6 +504,7 @@ function AnnotationRenderLayer< displayState.shaderError, )); helper.pickIdsPerInstance = handler.pickIdsPerInstance; + helper.staticPickIdsPerInstance = handler.staticPickIdsPerInstance; helper.targetIsSliceView = renderHelperType === "sliceViewRenderHelper"; } } @@ -610,7 +616,7 @@ function AnnotationRenderLayer< } const { serializedAnnotations } = chunk; buffer.setData(serializedAnnotations.data); - chunk.numPickIds = computeNumPickIds(serializedAnnotations); + chunk.numPickIds = computeNumPickIds(serializedAnnotations, this.base.source); chunk.bufferValid = true; } this.drawGeometry( @@ -630,7 +636,7 @@ function AnnotationRenderLayer< const { base } = this; const { chunkDisplayTransform } = state; const { serializedAnnotations } = chunk; - const { typeToIdMaps, typeToOffset } = serializedAnnotations; + const { typeToIdMaps, typeToOffset, typeToPrimitiveCount } = serializedAnnotations; let pickId = 0; if (renderContext.emitPickID) { pickId = renderContext.pickIDs.register( @@ -669,16 +675,22 @@ function AnnotationRenderLayer< .visibleHistograms > 0; for (const annotationType of annotationTypes) { const idMap = typeToIdMaps[annotationType]; - let count = idMap.size; + const annotations: Annotation[] = []; + idMap.forEach((_, id) => annotations.push(this.base.state.source.getReference(id).value!)); + let count = typeToPrimitiveCount[annotationType]; if (count > 0) { const handler = getAnnotationTypeRenderHandler(annotationType); let selectedIndex = 0xffffffff; + const pickIdsPerInstance = handler.pickIdsPerInstance(annotations); if (hoverValue !== undefined) { const index = idMap.get(hoverValue.id); if (index !== undefined) { - selectedIndex = index * handler.pickIdsPerInstance; + selectedIndex = 0; + for (let i = 0; i < index; ++i) { + selectedIndex += pickIdsPerInstance[i]; + } // If we wanted to include the partIndex, we would add: - // selectedIndex += hoverValue.partIndex; + selectedIndex += hoverValue.partIndex; } } count = Math.round(count * drawFraction); @@ -691,7 +703,7 @@ function AnnotationRenderLayer< renderHelper.computeHistograms(context, renderContext.frameNumber); renderContext.bindFramebuffer(); } - context.basePickId += count * handler.pickIdsPerInstance; + context.basePickId += pickIdsPerInstance.reduce((a, b) => a + b, 0); } } } @@ -704,27 +716,30 @@ function AnnotationRenderLayer< ) { const chunk = data as AnnotationGeometryDataInterface; const { serializedAnnotations } = chunk; - const { typeToIds, typeToOffset } = serializedAnnotations; + const { typeToIds } = serializedAnnotations; const rank = this.curRank; const chunkTransform = this.chunkTransform; if (chunkTransform.error !== undefined) return; for (const annotationType of annotationTypes) { const ids = typeToIds[annotationType]; + const annotations: Annotation[] = []; + ids.forEach((id) => annotations.push(this.base.state.source.getReference(id).value!)); const renderHandler = getAnnotationTypeRenderHandler(annotationType); - const { pickIdsPerInstance } = renderHandler; - if (pickedOffset < ids.length * pickIdsPerInstance) { - const instanceIndex = Math.floor(pickedOffset / pickIdsPerInstance); - const id = ids[instanceIndex]; - const partIndex = pickedOffset % pickIdsPerInstance; + const pickIds = renderHandler.pickIdsPerInstance(annotations); + const pickIdCount = pickIds.reduce((a, b) => a + b, 0); + if (pickIdCount != 0 && pickedOffset < pickIdCount) { + renderHandler.assignPickingInformation(mouseState, pickIds, pickedOffset); + + let bufferOffset = 0; + for (let i = 0; i < mouseState.pickedAnnotationIndex!; ++i) { + bufferOffset += renderHandler.bytes(this.base.state.source.getReference(ids[i]).value!); + } + const id = ids[mouseState.pickedAnnotationIndex!]; mouseState.pickedAnnotationId = id; mouseState.pickedAnnotationLayer = this.base.state; - mouseState.pickedOffset = partIndex; mouseState.pickedAnnotationBuffer = serializedAnnotations.data.buffer; mouseState.pickedAnnotationType = annotationType; - mouseState.pickedAnnotationBufferBaseOffset = - serializedAnnotations.data.byteOffset + - typeToOffset[annotationType]; - mouseState.pickedAnnotationIndex = instanceIndex; + mouseState.pickedAnnotationBufferBaseOffset = bufferOffset; mouseState.pickedAnnotationCount = ids.length; const chunkPosition = this.tempChunkPosition; const { @@ -746,15 +761,12 @@ function AnnotationRenderLayer< ) { return; } - const propertySerializer = - this.base.source.annotationPropertySerializers[annotationType]; + renderHandler.snapPosition( chunkPosition, mouseState.pickedAnnotationBuffer, - mouseState.pickedAnnotationBufferBaseOffset + - mouseState.pickedAnnotationIndex * - propertySerializer.propertyGroupBytes[0], - partIndex, + mouseState.pickedAnnotationBufferBaseOffset, + mouseState.pickedOffset ); const globalRank = globalToRenderLayerDimensions.length; for (let globalDim = 0; globalDim < globalRank; ++globalDim) { @@ -773,7 +785,7 @@ function AnnotationRenderLayer< } return; } - pickedOffset -= ids.length * pickIdsPerInstance; + pickedOffset -= pickIdCount; } } diff --git a/src/annotation/type_handler.ts b/src/annotation/type_handler.ts index 4e6af1819..e3aa708e8 100644 --- a/src/annotation/type_handler.ts +++ b/src/annotation/type_handler.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ import type { @@ -25,6 +28,7 @@ import { propertyTypeDataType, } from "#src/annotation/index.js"; import type { AnnotationLayer } from "#src/annotation/renderlayer.js"; +import type { MouseSelectionState } from "#src/layer/index.js"; import type { PerspectiveViewRenderContext } from "#src/perspective_view/render_layer.js"; import type { ChunkDisplayTransformParameters } from "#src/render_coordinate_transform.js"; import type { SliceViewPanelRenderContext } from "#src/sliceview/renderlayer.js"; @@ -307,7 +311,8 @@ class AnnotationRenderHelperBase extends RefCounted { } export abstract class AnnotationRenderHelper extends AnnotationRenderHelperBase { - pickIdsPerInstance: number; + staticPickIdsPerInstance: number|null; + pickIdsPerInstance: (annotations: Annotation[]) => number[]; targetIsSliceView: boolean; constructor( @@ -380,7 +385,7 @@ export abstract class AnnotationRenderHelper extends AnnotationRenderHelperBase builder.addVertexCode(` vec3 defaultColor() { return uColor; } -highp uint getPickBaseOffset() { return uint(gl_InstanceID) * ${this.pickIdsPerInstance}u; } +highp uint getPickBaseOffset() { return uint(gl_InstanceID) * ${this.staticPickIdsPerInstance == null ? 1 : this.staticPickIdsPerInstance}u; } `); builder.addFragmentCode(` @@ -445,11 +450,19 @@ void ng_discard() { void setLineColor(vec4 startColor, vec4 endColor); void setLineWidth(float width); +void setLineSegmentColor(vec4 startColor, vec4 endColor); +void setLineSegmentWidth(float width); + void setEndpointMarkerColor(vec4 startColor, vec4 endColor); void setEndpointMarkerBorderColor(vec4 startColor, vec4 endColor); void setEndpointMarkerSize(float startSize, float endSize); void setEndpointMarkerBorderWidth(float startSize, float endSize); +void setControlPointMarkerColor(vec4 startColor, vec4 endColor); +void setControlPointMarkerBorderColor(vec4 startColor, vec4 endColor); +void setControlPointMarkerSize(float startSize, float endSize); +void setControlPointMarkerBorderWidth(float startSize, float endSize); + void setPointMarkerColor(vec4 color); void setPointMarkerColor(vec3 color) { setPointMarkerColor(vec4(color, 1.0)); } void setPointMarkerBorderColor(vec4 color); @@ -475,13 +488,35 @@ void setEndpointMarkerBorderColor(vec3 color) { setEndpointMarkerBorderColor(col void setEndpointMarkerBorderColor(vec4 color) { setEndpointMarkerBorderColor(color, color); } void setEndpointMarkerSize(float size) { setEndpointMarkerSize(size, size); } void setEndpointMarkerBorderWidth(float size) { setEndpointMarkerBorderWidth(size, size); } + + +void setControlPointMarkerColor(vec3 startColor, vec3 endColor) { + setControlPointMarkerColor(vec4(startColor, 1.0), vec4(endColor, 1.0)); +} +void setControlPointMarkerBorderColor(vec3 startColor, vec3 endColor) { + setControlPointMarkerBorderColor(vec4(startColor, 1.0), vec4(endColor, 1.0)); +} +void setControlPointMarkerColor(vec3 color) { setControlPointMarkerColor(color, color); } +void setControlPointMarkerColor(vec4 color) { setControlPointMarkerColor(color, color); } +void setControlPointMarkerBorderColor(vec3 color) { setControlPointMarkerBorderColor(color, color); } +void setControlPointMarkerBorderColor(vec4 color) { setControlPointMarkerBorderColor(color, color); } +void setControlPointMarkerSize(float size) { setControlPointMarkerSize(size, size); } +void setControlPointMarkerBorderWidth(float size) { setControlPointMarkerBorderWidth(size, size); } + void setLineColor(vec4 color) { setLineColor(color, color); } void setLineColor(vec3 color) { setLineColor(vec4(color, 1.0)); } void setLineColor(vec3 startColor, vec3 endColor) { setLineColor(vec4(startColor, 1.0), vec4(endColor, 1.0)); } + +void setLineSegmentColor(vec4 color) { setLineSegmentColor(color, color); } +void setLineSegmentColor(vec3 color) { setLineSegmentColor(vec4(color, 1.0)); } +void setLineSegmentColor(vec3 startColor, vec3 endColor) { setLineSegmentColor(vec4(startColor, 1.0), vec4(endColor, 1.0)); } + void setColor(vec4 color) { setPointMarkerColor(color); setLineColor(color); + setLineSegmentColor(color); setEndpointMarkerColor(color); + setControlPointMarkerColor(color); setBoundingBoxBorderColor(color); setEllipsoidFillColor(vec4(color.rgb, color.a * (PROJECTION_VIEW ? 1.0 : 0.5))); } @@ -753,7 +788,16 @@ interface AnnotationTypeRenderHandler { defineShaderNoOpSetters: (builder: ShaderBuilder) => void; perspectiveViewRenderHelper: AnnotationRenderHelperConstructor; sliceViewRenderHelper: AnnotationRenderHelperConstructor; - pickIdsPerInstance: number; + bytes: (annotation: T) => number; + pickIdsPerInstance( + annotations: Annotation[] + ): number[]; + staticPickIdsPerInstance: null|number; + assignPickingInformation( + mouseState: MouseSelectionState, + pickIds: number[], + pickedOffset: number + ): void; getRepresentativePoint( out: Float32Array, annotation: T, diff --git a/src/ui/annotations.ts b/src/ui/annotations.ts index c799c90be..d4718993a 100644 --- a/src/ui/annotations.ts +++ b/src/ui/annotations.ts @@ -12,6 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. + * + * @modifcations + * MIT modified this file. For more information see the NOTICES.txt file */ /** @@ -31,6 +34,7 @@ import type { AxisAlignedBoundingBox, Ellipsoid, Line, + LineString, } from "#src/annotation/index.js"; import { AnnotationPropertySerializer, @@ -445,6 +449,16 @@ export class AnnotationLayerView extends Tab { }, }); mutableControls.appendChild(ellipsoidButton); + + const lineStringButton = makeIcon({ + text: annotationTypeHandlers[AnnotationType.LINE_STRING].icon, + title: 'Annotate line string', + onClick: () => { + this.layer.tool.value = new PlaceLineStringTool(this.layer, {}); + }, + }); + mutableControls.appendChild(lineStringButton); + toolbox.appendChild(mutableControls); this.element.appendChild(toolbox); @@ -1026,6 +1040,7 @@ const ANNOTATE_POINT_TOOL_ID = "annotatePoint"; const ANNOTATE_LINE_TOOL_ID = "annotateLine"; const ANNOTATE_BOUNDING_BOX_TOOL_ID = "annotateBoundingBox"; const ANNOTATE_ELLIPSOID_TOOL_ID = "annotateSphere"; +const ANNOTATE_LINE_STRING_TOOL_ID = 'annotateLineString'; export class PlacePointTool extends PlaceAnnotationTool { trigger(mouseState: MouseSelectionState) { @@ -1216,6 +1231,38 @@ abstract class PlaceTwoCornerAnnotationTool extends TwoStepAnnotationTool { } } +abstract class PlaceMultiPointAnnotationTool extends TwoStepAnnotationTool { + annotationType: AnnotationType.LINE_STRING; + + getInitialAnnotation(mouseState: MouseSelectionState, annotationLayer: AnnotationLayerState): + Annotation { + const point = getMousePositionInAnnotationCoordinates(mouseState, annotationLayer); + return { + id: '', + type: this.annotationType, + description: '', + points: [point, point], + properties: annotationLayer.source.properties.map(x => x.default), + }; + } + + getUpdatedAnnotation( + oldAnnotation: LineString, mouseState: MouseSelectionState, + annotationLayer: AnnotationLayerState): Annotation { + const point = getMousePositionInAnnotationCoordinates(mouseState, annotationLayer); + const lastPoint = oldAnnotation.points[oldAnnotation.points.length - 1]; + const newPoints = oldAnnotation.points.slice(); + + // Only record the new point if the cursor has moved. + if (point !== undefined && (point[0] != lastPoint[0] || point[1] != lastPoint[1] || point[2] != lastPoint[2])) { + newPoints.pop(); + newPoints.push(point, point, point); + } + + return {...oldAnnotation, points: newPoints}; + } +} + export class PlaceBoundingBoxTool extends PlaceTwoCornerAnnotationTool { get description() { return "annotate bounding box"; @@ -1355,6 +1402,46 @@ class PlaceEllipsoidTool extends TwoStepAnnotationTool { } } +export class PlaceLineStringTool extends PlaceMultiPointAnnotationTool { + get description() { + return `annotate line string`; + } + + private initialRelationships: Uint64[][]|undefined; + + getInitialAnnotation(mouseState: MouseSelectionState, annotationLayer: AnnotationLayerState): + Annotation { + const result = super.getInitialAnnotation(mouseState, annotationLayer); + this.initialRelationships = result.relatedSegments = + getSelectedAssociatedSegments(annotationLayer); + return result; + } + + getUpdatedAnnotation( + oldAnnotation: LineString, mouseState: MouseSelectionState, + annotationLayer: AnnotationLayerState) { + const result = super.getUpdatedAnnotation(oldAnnotation, mouseState, annotationLayer); + const initialRelationships = this.initialRelationships; + const newRelationships = getSelectedAssociatedSegments(annotationLayer); + if (initialRelationships === undefined) { + result.relatedSegments = newRelationships; + } else { + result.relatedSegments = Array.from(newRelationships, (newSegments, i) => { + const initialSegments = initialRelationships[i]; + newSegments = + newSegments.filter(x => initialSegments.findIndex(y => Uint64.equal(x, y)) === -1); + return [...initialSegments, ...newSegments]; + }); + } + return result; + } + + toJSON() { + return ANNOTATE_LINE_STRING_TOOL_ID; + } +} +PlaceLineStringTool.prototype.annotationType = AnnotationType.LINE_STRING; + registerLegacyTool( ANNOTATE_POINT_TOOL_ID, (layer, options) => @@ -1375,6 +1462,9 @@ registerLegacyTool( (layer, options) => new PlaceEllipsoidTool(layer, options), ); +registerLegacyTool( + ANNOTATE_LINE_STRING_TOOL_ID, + (layer, options) => new PlaceLineStringTool(layer, options)); const newRelatedSegmentKeyMap = EventActionMap.fromObject({ enter: { action: "commit" },